mirror of
https://github.com/flibusta-apps/batch_downloader.git
synced 2025-12-06 06:15:37 +01:00
Add pre-commit config
This commit is contained in:
7
.pre-commit-config.yaml
Normal file
7
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/doublify/pre-commit-rust
|
||||||
|
rev: v1.0
|
||||||
|
hooks:
|
||||||
|
- id: fmt
|
||||||
|
- id: cargo-check
|
||||||
|
- id: clippy
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
|
||||||
fn get_env(env: &'static str) -> String {
|
fn get_env(env: &'static str) -> String {
|
||||||
std::env::var(env).unwrap_or_else(|_| panic!("Cannot get the {} env variable", env))
|
std::env::var(env).unwrap_or_else(|_| panic!("Cannot get the {} env variable", env))
|
||||||
}
|
}
|
||||||
@@ -21,7 +20,7 @@ pub struct Config {
|
|||||||
pub cache_api_key: String,
|
pub cache_api_key: String,
|
||||||
pub cache_url: String,
|
pub cache_url: String,
|
||||||
|
|
||||||
pub sentry_dsn: String
|
pub sentry_dsn: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
@@ -42,11 +41,9 @@ impl Config {
|
|||||||
cache_api_key: get_env("CACHE_API_KEY"),
|
cache_api_key: get_env("CACHE_API_KEY"),
|
||||||
cache_url: get_env("CACHE_URL"),
|
cache_url: get_env("CACHE_URL"),
|
||||||
|
|
||||||
sentry_dsn: get_env("SENTRY_DSN")
|
sentry_dsn: get_env("SENTRY_DSN"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
pub static CONFIG: Lazy<Config> = Lazy::new(Config::load);
|
||||||
Config::load()
|
|
||||||
});
|
|
||||||
|
|||||||
22
src/main.rs
22
src/main.rs
@@ -1,15 +1,14 @@
|
|||||||
pub mod views;
|
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod services;
|
pub mod services;
|
||||||
pub mod structures;
|
pub mod structures;
|
||||||
|
pub mod views;
|
||||||
|
|
||||||
|
use sentry::{integrations::debug_images::DebugImagesIntegration, types::Dsn, ClientOptions};
|
||||||
use std::{net::SocketAddr, str::FromStr};
|
use std::{net::SocketAddr, str::FromStr};
|
||||||
use sentry::{ClientOptions, types::Dsn, integrations::debug_images::DebugImagesIntegration};
|
use tokio_cron_scheduler::{Job, JobScheduler};
|
||||||
use tokio_cron_scheduler::{JobScheduler, Job};
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use crate::{views::get_router, services::files_cleaner::clean_files};
|
use crate::{services::files_cleaner::clean_files, views::get_router};
|
||||||
|
|
||||||
|
|
||||||
async fn start_app() {
|
async fn start_app() {
|
||||||
tracing_subscriber::fmt()
|
tracing_subscriber::fmt()
|
||||||
@@ -29,11 +28,11 @@ async fn start_app() {
|
|||||||
info!("Webserver shutdown...");
|
info!("Webserver shutdown...");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async fn start_job_scheduler() {
|
async fn start_job_scheduler() {
|
||||||
let job_scheduler = JobScheduler::new().await.unwrap();
|
let job_scheduler = JobScheduler::new().await.unwrap();
|
||||||
|
|
||||||
let clean_files_job = match Job::new_async("0 */5 * * * *", |_uuid, _l| Box::pin(async {
|
let clean_files_job = match Job::new_async("0 */5 * * * *", |_uuid, _l| {
|
||||||
|
Box::pin(async {
|
||||||
match clean_files(config::CONFIG.minio_bucket.clone()).await {
|
match clean_files(config::CONFIG.minio_bucket.clone()).await {
|
||||||
Ok(_) => info!("Archive files cleaned!"),
|
Ok(_) => info!("Archive files cleaned!"),
|
||||||
Err(err) => info!("Clean archive files err: {:?}", err),
|
Err(err) => info!("Clean archive files err: {:?}", err),
|
||||||
@@ -43,7 +42,8 @@ async fn start_job_scheduler() {
|
|||||||
Ok(_) => info!("Share files cleaned!"),
|
Ok(_) => info!("Share files cleaned!"),
|
||||||
Err(err) => info!("Clean share files err: {:?}", err),
|
Err(err) => info!("Clean share files err: {:?}", err),
|
||||||
};
|
};
|
||||||
})) {
|
})
|
||||||
|
}) {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(err) => panic!("{:?}", err),
|
Err(err) => panic!("{:?}", err),
|
||||||
};
|
};
|
||||||
@@ -57,7 +57,6 @@ async fn start_job_scheduler() {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
let options = ClientOptions {
|
let options = ClientOptions {
|
||||||
@@ -69,8 +68,5 @@ async fn main() {
|
|||||||
|
|
||||||
let _guard = sentry::init(options);
|
let _guard = sentry::init(options);
|
||||||
|
|
||||||
tokio::join![
|
tokio::join![start_app(), start_job_scheduler()];
|
||||||
start_app(),
|
|
||||||
start_job_scheduler()
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,14 +2,13 @@ use std::fmt;
|
|||||||
|
|
||||||
use base64::{engine::general_purpose, Engine};
|
use base64::{engine::general_purpose, Engine};
|
||||||
use reqwest::StatusCode;
|
use reqwest::StatusCode;
|
||||||
use tempfile::SpooledTempFile;
|
|
||||||
use smartstring::alias::String as SmartString;
|
use smartstring::alias::String as SmartString;
|
||||||
|
use tempfile::SpooledTempFile;
|
||||||
|
|
||||||
use crate::config;
|
use crate::config;
|
||||||
|
|
||||||
use super::utils::response_to_tempfile;
|
use super::utils::response_to_tempfile;
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
struct DownloadError {
|
struct DownloadError {
|
||||||
status_code: StatusCode,
|
status_code: StatusCode,
|
||||||
@@ -23,7 +22,6 @@ impl fmt::Display for DownloadError {
|
|||||||
|
|
||||||
impl std::error::Error for DownloadError {}
|
impl std::error::Error for DownloadError {}
|
||||||
|
|
||||||
|
|
||||||
pub async fn download(
|
pub async fn download(
|
||||||
book_id: u64,
|
book_id: u64,
|
||||||
file_type: SmartString,
|
file_type: SmartString,
|
||||||
@@ -60,4 +58,3 @@ pub async fn download(
|
|||||||
|
|
||||||
Ok((output_file.0, filename))
|
Ok((output_file.0, filename))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,22 @@
|
|||||||
use chrono::{DateTime, Utc, Duration};
|
use chrono::{DateTime, Duration, Utc};
|
||||||
use minio_rsc::{client::ListObjectsArgs, datatype::Object};
|
use minio_rsc::{client::ListObjectsArgs, datatype::Object};
|
||||||
|
|
||||||
use super::minio::get_minio;
|
use super::minio::get_minio;
|
||||||
|
|
||||||
|
|
||||||
pub async fn clean_files(bucket: String) -> Result<(), Box<dyn std::error::Error>> {
|
pub async fn clean_files(bucket: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let minio_client = get_minio();
|
let minio_client = get_minio();
|
||||||
|
|
||||||
let objects = minio_client.list_objects(
|
let objects = minio_client
|
||||||
&bucket,
|
.list_objects(&bucket, ListObjectsArgs::default())
|
||||||
ListObjectsArgs::default()
|
.await?;
|
||||||
).await?;
|
|
||||||
|
|
||||||
let delete_before = Utc::now() - Duration::hours(3);
|
let delete_before = Utc::now() - Duration::hours(3);
|
||||||
for Object { key, last_modified, .. } in objects.contents {
|
for Object {
|
||||||
let last_modified_date: DateTime<Utc> = DateTime::parse_from_rfc3339(&last_modified)?.into();
|
key, last_modified, ..
|
||||||
|
} in objects.contents
|
||||||
|
{
|
||||||
|
let last_modified_date: DateTime<Utc> =
|
||||||
|
DateTime::parse_from_rfc3339(&last_modified)?.into();
|
||||||
|
|
||||||
if last_modified_date <= delete_before {
|
if last_modified_date <= delete_before {
|
||||||
let _ = minio_client.remove_object(&bucket, key).await;
|
let _ = minio_client.remove_object(&bucket, key).await;
|
||||||
|
|||||||
@@ -5,18 +5,17 @@ use tracing::log;
|
|||||||
|
|
||||||
use crate::config;
|
use crate::config;
|
||||||
|
|
||||||
|
|
||||||
const PAGE_SIZE: &str = "50";
|
const PAGE_SIZE: &str = "50";
|
||||||
|
|
||||||
|
fn get_allowed_langs_params(
|
||||||
fn get_allowed_langs_params(allowed_langs: SmallVec<[SmartString; 3]>) -> Vec<(&'static str, SmartString)> {
|
allowed_langs: SmallVec<[SmartString; 3]>,
|
||||||
|
) -> Vec<(&'static str, SmartString)> {
|
||||||
allowed_langs
|
allowed_langs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|lang| ("allowed_langs", lang))
|
.map(|lang| ("allowed_langs", lang))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async fn _make_request<T>(
|
async fn _make_request<T>(
|
||||||
url: &str,
|
url: &str,
|
||||||
params: Vec<(&str, SmartString)>,
|
params: Vec<(&str, SmartString)>,
|
||||||
@@ -37,18 +36,16 @@ where
|
|||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::error!("Failed serialization: url={:?} err={:?}", url, err);
|
log::error!("Failed serialization: url={:?} err={:?}", url, err);
|
||||||
Err(Box::new(err))
|
Err(Box::new(err))
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
pub struct Book {
|
pub struct Book {
|
||||||
pub id: u64,
|
pub id: u64,
|
||||||
pub available_types: SmallVec<[String; 4]>
|
pub available_types: SmallVec<[String; 4]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
pub struct Page<T> {
|
pub struct Page<T> {
|
||||||
pub items: Vec<T>,
|
pub items: Vec<T>,
|
||||||
@@ -60,23 +57,20 @@ pub struct Page<T> {
|
|||||||
pub pages: u32,
|
pub pages: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
pub struct Sequence {
|
pub struct Sequence {
|
||||||
pub id: u32,
|
pub id: u32,
|
||||||
pub name: String
|
pub name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
pub struct Author {
|
pub struct Author {
|
||||||
pub id: u32,
|
pub id: u32,
|
||||||
pub first_name: String,
|
pub first_name: String,
|
||||||
pub last_name: String,
|
pub last_name: String,
|
||||||
pub middle_name: Option<String>
|
pub middle_name: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub async fn get_author_books(
|
pub async fn get_author_books(
|
||||||
id: u32,
|
id: u32,
|
||||||
page: u32,
|
page: u32,
|
||||||
|
|||||||
@@ -2,12 +2,11 @@ use minio_rsc::{provider::StaticProvider, Minio};
|
|||||||
|
|
||||||
use crate::config;
|
use crate::config;
|
||||||
|
|
||||||
|
|
||||||
pub fn get_minio() -> Minio {
|
pub fn get_minio() -> Minio {
|
||||||
let provider = StaticProvider::new(
|
let provider = StaticProvider::new(
|
||||||
&config::CONFIG.minio_access_key,
|
&config::CONFIG.minio_access_key,
|
||||||
&config::CONFIG.minio_secret_key,
|
&config::CONFIG.minio_secret_key,
|
||||||
None
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
Minio::builder()
|
Minio::builder()
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
pub mod task_creator;
|
|
||||||
pub mod library_client;
|
|
||||||
pub mod utils;
|
|
||||||
pub mod downloader;
|
pub mod downloader;
|
||||||
pub mod minio;
|
|
||||||
pub mod files_cleaner;
|
pub mod files_cleaner;
|
||||||
|
pub mod library_client;
|
||||||
|
pub mod minio;
|
||||||
|
pub mod task_creator;
|
||||||
|
pub mod utils;
|
||||||
|
|||||||
@@ -7,16 +7,27 @@ use tempfile::SpooledTempFile;
|
|||||||
use tracing::log;
|
use tracing::log;
|
||||||
use zip::write::FileOptions;
|
use zip::write::FileOptions;
|
||||||
|
|
||||||
use crate::{structures::{CreateTask, Task, ObjectType}, config, views::TASK_RESULTS, services::{downloader::download, utils::{get_stream, get_filename}, minio::get_minio}};
|
use crate::{
|
||||||
|
config,
|
||||||
use super::{library_client::{Book, get_sequence_books, get_author_books, get_translator_books, Page}, utils::get_key};
|
services::{
|
||||||
|
downloader::download,
|
||||||
|
minio::get_minio,
|
||||||
|
utils::{get_filename, get_stream},
|
||||||
|
},
|
||||||
|
structures::{CreateTask, ObjectType, Task},
|
||||||
|
views::TASK_RESULTS,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
library_client::{get_author_books, get_sequence_books, get_translator_books, Book, Page},
|
||||||
|
utils::get_key,
|
||||||
|
};
|
||||||
|
|
||||||
pub async fn get_books<Fut>(
|
pub async fn get_books<Fut>(
|
||||||
object_id: u32,
|
object_id: u32,
|
||||||
allowed_langs: SmallVec<[SmartString; 3]>,
|
allowed_langs: SmallVec<[SmartString; 3]>,
|
||||||
books_getter: fn(id: u32, page: u32, allowed_langs: SmallVec<[SmartString; 3]>) -> Fut,
|
books_getter: fn(id: u32, page: u32, allowed_langs: SmallVec<[SmartString; 3]>) -> Fut,
|
||||||
file_format: SmartString
|
file_format: SmartString,
|
||||||
) -> Result<Vec<Book>, Box<dyn std::error::Error + Send + Sync>>
|
) -> Result<Vec<Book>, Box<dyn std::error::Error + Send + Sync>>
|
||||||
where
|
where
|
||||||
Fut: std::future::Future<Output = Result<Page<Book>, Box<dyn std::error::Error + Send + Sync>>>,
|
Fut: std::future::Future<Output = Result<Page<Book>, Box<dyn std::error::Error + Send + Sync>>>,
|
||||||
@@ -41,17 +52,17 @@ where
|
|||||||
result.extend(page.items);
|
result.extend(page.items);
|
||||||
|
|
||||||
current_page += 1;
|
current_page += 1;
|
||||||
};
|
}
|
||||||
|
|
||||||
let result = result
|
let result = result
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|book| book.available_types.contains(&file_format.to_string())).cloned()
|
.filter(|book| book.available_types.contains(&file_format.to_string()))
|
||||||
|
.cloned()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub async fn set_task_error(key: String, error_message: String) {
|
pub async fn set_task_error(key: String, error_message: String) {
|
||||||
let task = Task {
|
let task = Task {
|
||||||
id: key.clone(),
|
id: key.clone(),
|
||||||
@@ -60,13 +71,12 @@ pub async fn set_task_error(key: String, error_message: String) {
|
|||||||
error_message: Some(error_message),
|
error_message: Some(error_message),
|
||||||
result_filename: None,
|
result_filename: None,
|
||||||
result_link: None,
|
result_link: None,
|
||||||
content_size: None
|
content_size: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
TASK_RESULTS.insert(key, task.clone()).await;
|
TASK_RESULTS.insert(key, task.clone()).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub async fn set_progress_description(key: String, description: String) {
|
pub async fn set_progress_description(key: String, description: String) {
|
||||||
let task = Task {
|
let task = Task {
|
||||||
id: key.clone(),
|
id: key.clone(),
|
||||||
@@ -75,15 +85,16 @@ pub async fn set_progress_description(key: String, description: String) {
|
|||||||
error_message: None,
|
error_message: None,
|
||||||
result_filename: None,
|
result_filename: None,
|
||||||
result_link: None,
|
result_link: None,
|
||||||
content_size: None
|
content_size: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
TASK_RESULTS.insert(key, task.clone()).await;
|
TASK_RESULTS.insert(key, task.clone()).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn upload_to_minio(
|
||||||
|
archive: SpooledTempFile,
|
||||||
pub async fn upload_to_minio(archive: SpooledTempFile, filename: String) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
filename: String,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
let minio = get_minio();
|
let minio = get_minio();
|
||||||
|
|
||||||
let is_bucket_exist = match minio.bucket_exists(&config::CONFIG.minio_bucket).await {
|
let is_bucket_exist = match minio.bucket_exists(&config::CONFIG.minio_bucket).await {
|
||||||
@@ -97,29 +108,36 @@ pub async fn upload_to_minio(archive: SpooledTempFile, filename: String) -> Resu
|
|||||||
|
|
||||||
let data_stream = get_stream(Box::new(archive));
|
let data_stream = get_stream(Box::new(archive));
|
||||||
|
|
||||||
if let Err(err) = minio.put_object_stream(
|
if let Err(err) = minio
|
||||||
|
.put_object_stream(
|
||||||
&config::CONFIG.minio_bucket,
|
&config::CONFIG.minio_bucket,
|
||||||
filename.clone(),
|
filename.clone(),
|
||||||
Box::pin(data_stream),
|
Box::pin(data_stream),
|
||||||
None
|
None,
|
||||||
).await {
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
return Err(Box::new(err));
|
return Err(Box::new(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
let link = match minio.presigned_get_object(
|
let link = match minio
|
||||||
PresignedArgs::new(&config::CONFIG.minio_bucket, filename)
|
.presigned_get_object(PresignedArgs::new(&config::CONFIG.minio_bucket, filename))
|
||||||
).await {
|
.await
|
||||||
|
{
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(Box::new(err));
|
return Err(Box::new(err));
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(link)
|
Ok(link)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn create_archive(
|
||||||
pub async fn create_archive(key: String, books: Vec<Book>, file_format: SmartString) -> Result<(SpooledTempFile, u64), Box<dyn std::error::Error + Send + Sync>> {
|
key: String,
|
||||||
|
books: Vec<Book>,
|
||||||
|
file_format: SmartString,
|
||||||
|
) -> Result<(SpooledTempFile, u64), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
let output_file = tempfile::spooled_tempfile(5 * 1024 * 1024);
|
let output_file = tempfile::spooled_tempfile(5 * 1024 * 1024);
|
||||||
let mut archive = zip::ZipWriter::new(output_file);
|
let mut archive = zip::ZipWriter::new(output_file);
|
||||||
|
|
||||||
@@ -147,7 +165,11 @@ pub async fn create_archive(key: String, books: Vec<Book>, file_format: SmartStr
|
|||||||
Err(err) => return Err(Box::new(err)),
|
Err(err) => return Err(Box::new(err)),
|
||||||
};
|
};
|
||||||
|
|
||||||
set_progress_description(key.clone(), format!("Загрузка книг: {}/{}", index + 1, books_count)).await;
|
set_progress_description(
|
||||||
|
key.clone(),
|
||||||
|
format!("Загрузка книг: {}/{}", index + 1, books_count),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut archive_result = match archive.finish() {
|
let mut archive_result = match archive.finish() {
|
||||||
@@ -160,12 +182,35 @@ pub async fn create_archive(key: String, books: Vec<Book>, file_format: SmartStr
|
|||||||
Ok((archive_result, bytes_count))
|
Ok((archive_result, bytes_count))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub async fn create_archive_task(key: String, data: CreateTask) {
|
pub async fn create_archive_task(key: String, data: CreateTask) {
|
||||||
let books = match data.object_type {
|
let books = match data.object_type {
|
||||||
ObjectType::Sequence => get_books(data.object_id, data.allowed_langs, get_sequence_books, data.file_format.clone()).await,
|
ObjectType::Sequence => {
|
||||||
ObjectType::Author => get_books(data.object_id, data.allowed_langs, get_author_books, data.file_format.clone()).await,
|
get_books(
|
||||||
ObjectType::Translator => get_books(data.object_id, data.allowed_langs, get_translator_books, data.file_format.clone()).await,
|
data.object_id,
|
||||||
|
data.allowed_langs,
|
||||||
|
get_sequence_books,
|
||||||
|
data.file_format.clone(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
ObjectType::Author => {
|
||||||
|
get_books(
|
||||||
|
data.object_id,
|
||||||
|
data.allowed_langs,
|
||||||
|
get_author_books,
|
||||||
|
data.file_format.clone(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
ObjectType::Translator => {
|
||||||
|
get_books(
|
||||||
|
data.object_id,
|
||||||
|
data.allowed_langs,
|
||||||
|
get_translator_books,
|
||||||
|
data.file_format.clone(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
set_progress_description(key.clone(), "Получение списка книг...".to_string()).await;
|
set_progress_description(key.clone(), "Получение списка книг...".to_string()).await;
|
||||||
@@ -176,7 +221,7 @@ pub async fn create_archive_task(key: String, data: CreateTask) {
|
|||||||
set_task_error(key.clone(), "Failed getting books!".to_string()).await;
|
set_task_error(key.clone(), "Failed getting books!".to_string()).await;
|
||||||
log::error!("{}", err);
|
log::error!("{}", err);
|
||||||
return;
|
return;
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if books.is_empty() {
|
if books.is_empty() {
|
||||||
@@ -184,24 +229,26 @@ pub async fn create_archive_task(key: String, data: CreateTask) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let final_filename = match get_filename(data.object_type, data.object_id, data.file_format.clone()).await {
|
let final_filename =
|
||||||
|
match get_filename(data.object_type, data.object_id, data.file_format.clone()).await {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
set_task_error(key.clone(), "Can't get archive name!".to_string()).await;
|
set_task_error(key.clone(), "Can't get archive name!".to_string()).await;
|
||||||
log::error!("{}", err);
|
log::error!("{}", err);
|
||||||
return;
|
return;
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
set_progress_description(key.clone(), "Сборка архива...".to_string()).await;
|
set_progress_description(key.clone(), "Сборка архива...".to_string()).await;
|
||||||
|
|
||||||
let (archive_result, content_size) = match create_archive(key.clone(), books, data.file_format).await {
|
let (archive_result, content_size) =
|
||||||
|
match create_archive(key.clone(), books, data.file_format).await {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
set_task_error(key.clone(), "Failed downloading books!".to_string()).await;
|
set_task_error(key.clone(), "Failed downloading books!".to_string()).await;
|
||||||
log::error!("{}", err);
|
log::error!("{}", err);
|
||||||
return;
|
return;
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
set_progress_description(key.clone(), "Загрузка архива...".to_string()).await;
|
set_progress_description(key.clone(), "Загрузка архива...".to_string()).await;
|
||||||
@@ -212,7 +259,7 @@ pub async fn create_archive_task(key: String, data: CreateTask) {
|
|||||||
set_task_error(key.clone(), "Failed uploading archive!".to_string()).await;
|
set_task_error(key.clone(), "Failed uploading archive!".to_string()).await;
|
||||||
log::error!("{}", err);
|
log::error!("{}", err);
|
||||||
return;
|
return;
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let task = Task {
|
let task = Task {
|
||||||
@@ -222,16 +269,13 @@ pub async fn create_archive_task(key: String, data: CreateTask) {
|
|||||||
error_message: None,
|
error_message: None,
|
||||||
result_filename: Some(final_filename),
|
result_filename: Some(final_filename),
|
||||||
result_link: Some(link),
|
result_link: Some(link),
|
||||||
content_size: Some(content_size)
|
content_size: Some(content_size),
|
||||||
};
|
};
|
||||||
|
|
||||||
TASK_RESULTS.insert(key.clone(), task.clone()).await;
|
TASK_RESULTS.insert(key.clone(), task.clone()).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn create_task(data: CreateTask) -> Task {
|
||||||
pub async fn create_task(
|
|
||||||
data: CreateTask
|
|
||||||
) -> Task {
|
|
||||||
let key = get_key(data.clone());
|
let key = get_key(data.clone());
|
||||||
|
|
||||||
let task = Task {
|
let task = Task {
|
||||||
@@ -241,7 +285,7 @@ pub async fn create_task(
|
|||||||
error_message: None,
|
error_message: None,
|
||||||
result_filename: None,
|
result_filename: None,
|
||||||
result_link: None,
|
result_link: None,
|
||||||
content_size: None
|
content_size: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
TASK_RESULTS.insert(key.clone(), task.clone()).await;
|
TASK_RESULTS.insert(key.clone(), task.clone()).await;
|
||||||
|
|||||||
@@ -1,21 +1,18 @@
|
|||||||
|
use async_stream::stream;
|
||||||
|
use bytes::{Buf, Bytes};
|
||||||
use minio_rsc::error::Error;
|
use minio_rsc::error::Error;
|
||||||
use reqwest::Response;
|
use reqwest::Response;
|
||||||
use tempfile::SpooledTempFile;
|
|
||||||
use bytes::{Buf, Bytes};
|
|
||||||
use async_stream::stream;
|
|
||||||
use translit::{gost779b_ru, Transliterator, CharsMapping};
|
|
||||||
use smartstring::alias::String as SmartString;
|
use smartstring::alias::String as SmartString;
|
||||||
|
use tempfile::SpooledTempFile;
|
||||||
|
use translit::{gost779b_ru, CharsMapping, Transliterator};
|
||||||
|
|
||||||
use std::io::{Seek, SeekFrom, Write, Read};
|
use std::io::{Read, Seek, SeekFrom, Write};
|
||||||
|
|
||||||
use crate::structures::{CreateTask, ObjectType};
|
use crate::structures::{CreateTask, ObjectType};
|
||||||
|
|
||||||
use super::library_client::{get_sequence, get_author};
|
use super::library_client::{get_author, get_sequence};
|
||||||
|
|
||||||
|
pub fn get_key(input_data: CreateTask) -> String {
|
||||||
pub fn get_key(
|
|
||||||
input_data: CreateTask
|
|
||||||
) -> String {
|
|
||||||
let mut data = input_data.clone();
|
let mut data = input_data.clone();
|
||||||
data.allowed_langs.sort();
|
data.allowed_langs.sort();
|
||||||
|
|
||||||
@@ -24,7 +21,6 @@ pub fn get_key(
|
|||||||
format!("{:x}", md5::compute(data_string))
|
format!("{:x}", md5::compute(data_string))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub async fn response_to_tempfile(res: &mut Response) -> Option<(SpooledTempFile, usize)> {
|
pub async fn response_to_tempfile(res: &mut Response) -> Option<(SpooledTempFile, usize)> {
|
||||||
let mut tmp_file = tempfile::spooled_tempfile(5 * 1024 * 1024);
|
let mut tmp_file = tempfile::spooled_tempfile(5 * 1024 * 1024);
|
||||||
|
|
||||||
@@ -58,8 +54,9 @@ pub async fn response_to_tempfile(res: &mut Response) -> Option<(SpooledTempFile
|
|||||||
Some((tmp_file, data_size))
|
Some((tmp_file, data_size))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_stream(
|
||||||
pub fn get_stream(mut temp_file: Box<dyn Read + Send>) -> impl futures_core::Stream<Item = Result<Bytes, Error>> {
|
mut temp_file: Box<dyn Read + Send>,
|
||||||
|
) -> impl futures_core::Stream<Item = Result<Bytes, Error>> {
|
||||||
stream! {
|
stream! {
|
||||||
let mut buf = [0; 2048];
|
let mut buf = [0; 2048];
|
||||||
|
|
||||||
@@ -73,29 +70,30 @@ pub fn get_stream(mut temp_file: Box<dyn Read + Send>) -> impl futures_core::Str
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_filename(
|
||||||
pub async fn get_filename(object_type: ObjectType, object_id: u32, file_format: SmartString) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
object_type: ObjectType,
|
||||||
|
object_id: u32,
|
||||||
|
file_format: SmartString,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
let result_filename = match object_type {
|
let result_filename = match object_type {
|
||||||
ObjectType::Sequence => {
|
ObjectType::Sequence => match get_sequence(object_id).await {
|
||||||
match get_sequence(object_id).await {
|
|
||||||
Ok(v) => v.name,
|
Ok(v) => v.name,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(err);
|
return Err(err);
|
||||||
},
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ObjectType::Author | ObjectType::Translator => {
|
ObjectType::Author | ObjectType::Translator => match get_author(object_id).await {
|
||||||
match get_author(object_id).await {
|
Ok(v) => vec![
|
||||||
Ok(v) => {
|
v.first_name,
|
||||||
vec![v.first_name, v.last_name, v.middle_name.unwrap_or("".to_string())]
|
v.last_name,
|
||||||
|
v.middle_name.unwrap_or("".to_string()),
|
||||||
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|v| !v.is_empty())
|
.filter(|v| !v.is_empty())
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join("_")
|
.join("_"),
|
||||||
},
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(err);
|
return Err(err);
|
||||||
},
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -132,7 +130,8 @@ pub async fn get_filename(object_type: ObjectType, object_id: u32, file_format:
|
|||||||
("[", ""),
|
("[", ""),
|
||||||
("]", ""),
|
("]", ""),
|
||||||
("\"", ""),
|
("\"", ""),
|
||||||
].to_vec();
|
]
|
||||||
|
.to_vec();
|
||||||
|
|
||||||
let replace_transliterator = Transliterator::new(replace_char_map);
|
let replace_transliterator = Transliterator::new(replace_char_map);
|
||||||
let normal_filename = replace_transliterator.convert(&filename_without_type, false);
|
let normal_filename = replace_transliterator.convert(&filename_without_type, false);
|
||||||
@@ -140,12 +139,20 @@ pub async fn get_filename(object_type: ObjectType, object_id: u32, file_format:
|
|||||||
let normal_filename = normal_filename.replace(|c: char| !c.is_ascii(), "");
|
let normal_filename = normal_filename.replace(|c: char| !c.is_ascii(), "");
|
||||||
|
|
||||||
let right_part = format!(".{file_format}.zip");
|
let right_part = format!(".{file_format}.zip");
|
||||||
let normal_filename_slice = std::cmp::min(64 - right_part.len() - 1, normal_filename.len() - 1);
|
let normal_filename_slice =
|
||||||
|
std::cmp::min(64 - right_part.len() - 1, normal_filename.len() - 1);
|
||||||
|
|
||||||
let left_part = if normal_filename_slice == normal_filename.len() - 1 {
|
let left_part = if normal_filename_slice == normal_filename.len() - 1 {
|
||||||
&normal_filename
|
&normal_filename
|
||||||
} else {
|
} else {
|
||||||
normal_filename.get(..normal_filename_slice).unwrap_or_else(|| panic!("Can't slice left part: {:?} {:?}", normal_filename, normal_filename_slice))
|
normal_filename
|
||||||
|
.get(..normal_filename_slice)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"Can't slice left part: {:?} {:?}",
|
||||||
|
normal_filename, normal_filename_slice
|
||||||
|
)
|
||||||
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
format!("{left_part}{right_part}")
|
format!("{left_part}{right_part}")
|
||||||
|
|||||||
@@ -2,14 +2,13 @@ use serde::{Deserialize, Serialize};
|
|||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use smartstring::alias::String as SmartString;
|
use smartstring::alias::String as SmartString;
|
||||||
|
|
||||||
|
|
||||||
#[derive(Serialize, Clone, PartialEq)]
|
#[derive(Serialize, Clone, PartialEq)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum TaskStatus {
|
pub enum TaskStatus {
|
||||||
InProgress,
|
InProgress,
|
||||||
Archiving,
|
Archiving,
|
||||||
Complete,
|
Complete,
|
||||||
Failed
|
Failed,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||||
@@ -17,15 +16,15 @@ pub enum TaskStatus {
|
|||||||
pub enum ObjectType {
|
pub enum ObjectType {
|
||||||
Sequence,
|
Sequence,
|
||||||
Author,
|
Author,
|
||||||
Translator
|
Translator,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone)]
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
pub struct CreateTask{
|
pub struct CreateTask {
|
||||||
pub object_id: u32,
|
pub object_id: u32,
|
||||||
pub object_type: ObjectType,
|
pub object_type: ObjectType,
|
||||||
pub file_format: SmartString,
|
pub file_format: SmartString,
|
||||||
pub allowed_langs: SmallVec<[SmartString; 3]>
|
pub allowed_langs: SmallVec<[SmartString; 3]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Clone)]
|
#[derive(Serialize, Clone)]
|
||||||
@@ -36,5 +35,5 @@ pub struct Task {
|
|||||||
pub error_message: Option<String>,
|
pub error_message: Option<String>,
|
||||||
pub result_filename: Option<String>,
|
pub result_filename: Option<String>,
|
||||||
pub result_link: Option<String>,
|
pub result_link: Option<String>,
|
||||||
pub content_size: Option<u64>
|
pub content_size: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|||||||
37
src/views.rs
37
src/views.rs
@@ -1,15 +1,25 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use axum::{Router, routing::{get, post}, middleware::{self, Next}, http::{Request, StatusCode, self}, response::{Response, IntoResponse}, extract::Path, Json};
|
use axum::{
|
||||||
|
extract::Path,
|
||||||
|
http::{self, Request, StatusCode},
|
||||||
|
middleware::{self, Next},
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
routing::{get, post},
|
||||||
|
Json, Router,
|
||||||
|
};
|
||||||
use axum_prometheus::PrometheusMetricLayer;
|
use axum_prometheus::PrometheusMetricLayer;
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use tower_http::trace::{TraceLayer, self};
|
use tower_http::trace::{self, TraceLayer};
|
||||||
|
|
||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
|
|
||||||
use crate::{config::CONFIG, structures::{Task, CreateTask, TaskStatus}, services::{task_creator::create_task, utils::get_key}};
|
use crate::{
|
||||||
|
config::CONFIG,
|
||||||
|
services::{task_creator::create_task, utils::get_key},
|
||||||
|
structures::{CreateTask, Task, TaskStatus},
|
||||||
|
};
|
||||||
|
|
||||||
pub static TASK_RESULTS: Lazy<Cache<String, Task>> = Lazy::new(|| {
|
pub static TASK_RESULTS: Lazy<Cache<String, Task>> = Lazy::new(|| {
|
||||||
Cache::builder()
|
Cache::builder()
|
||||||
@@ -18,10 +28,7 @@ pub static TASK_RESULTS: Lazy<Cache<String, Task>> = Lazy::new(|| {
|
|||||||
.build()
|
.build()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
async fn create_archive_task(Json(data): Json<CreateTask>) -> impl IntoResponse {
|
||||||
async fn create_archive_task(
|
|
||||||
Json(data): Json<CreateTask>
|
|
||||||
) -> impl IntoResponse {
|
|
||||||
let key = get_key(data.clone());
|
let key = get_key(data.clone());
|
||||||
|
|
||||||
let result = match TASK_RESULTS.get(&key).await {
|
let result = match TASK_RESULTS.get(&key).await {
|
||||||
@@ -31,24 +38,20 @@ async fn create_archive_task(
|
|||||||
} else {
|
} else {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
None => create_task(data).await,
|
None => create_task(data).await,
|
||||||
};
|
};
|
||||||
|
|
||||||
Json::<Task>(result).into_response()
|
Json::<Task>(result).into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn check_archive_task_status(Path(task_id): Path<String>) -> impl IntoResponse {
|
||||||
async fn check_archive_task_status(
|
|
||||||
Path(task_id): Path<String>
|
|
||||||
) -> impl IntoResponse {
|
|
||||||
match TASK_RESULTS.get(&task_id).await {
|
match TASK_RESULTS.get(&task_id).await {
|
||||||
Some(result) => Json::<Task>(result).into_response(),
|
Some(result) => Json::<Task>(result).into_response(),
|
||||||
None => StatusCode::NOT_FOUND.into_response(),
|
None => StatusCode::NOT_FOUND.into_response(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
|
async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode> {
|
||||||
let auth_header = req
|
let auth_header = req
|
||||||
.headers()
|
.headers()
|
||||||
@@ -68,13 +71,15 @@ async fn auth<B>(req: Request<B>, next: Next<B>) -> Result<Response, StatusCode>
|
|||||||
Ok(next.run(req).await)
|
Ok(next.run(req).await)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub async fn get_router() -> Router {
|
pub async fn get_router() -> Router {
|
||||||
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
|
let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair();
|
||||||
|
|
||||||
let app_router = Router::new()
|
let app_router = Router::new()
|
||||||
.route("/api/", post(create_archive_task))
|
.route("/api/", post(create_archive_task))
|
||||||
.route("/api/check_archive/:task_id", get(check_archive_task_status))
|
.route(
|
||||||
|
"/api/check_archive/:task_id",
|
||||||
|
get(check_archive_task_status),
|
||||||
|
)
|
||||||
.layer(middleware::from_fn(auth))
|
.layer(middleware::from_fn(auth))
|
||||||
.layer(prometheus_layer);
|
.layer(prometheus_layer);
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user