mirror of
https://github.com/flibusta-apps/library_updater.git
synced 2025-12-06 07:45:35 +01:00
Update tasks
This commit is contained in:
File diff suppressed because it is too large
Load Diff
0
src/app/services/updaters/utils/__init__.py
Normal file
0
src/app/services/updaters/utils/__init__.py
Normal file
11
src/app/services/updaters/utils/cmd.py
Normal file
11
src/app/services/updaters/utils/cmd.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import asyncio
|
||||
from typing import Optional
|
||||
|
||||
|
||||
async def run_cmd(cmd: str) -> tuple[bytes, bytes, Optional[int]]:
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await proc.communicate()
|
||||
return stdout, stderr, proc.returncode
|
||||
29
src/app/services/updaters/utils/tasks.py
Normal file
29
src/app/services/updaters/utils/tasks.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from typing import Optional
|
||||
|
||||
from arq.connections import ArqRedis
|
||||
from arq.jobs import Job, JobStatus
|
||||
|
||||
|
||||
async def is_jobs_complete(
|
||||
arq_pool: ArqRedis, job_ids: list[str], prefix: Optional[str] = None
|
||||
) -> Optional[bool]:
|
||||
job_statuses = set()
|
||||
for job_id in job_ids:
|
||||
_job_id = f"{prefix}_{job_id}" if prefix else job_id
|
||||
status = await Job(
|
||||
_job_id, arq_pool, arq_pool.default_queue_name, arq_pool.job_deserializer
|
||||
).status()
|
||||
job_statuses.add(status.value)
|
||||
|
||||
if JobStatus.not_found.value in job_statuses:
|
||||
return False
|
||||
|
||||
for status in (
|
||||
JobStatus.deferred.value,
|
||||
JobStatus.in_progress.value,
|
||||
JobStatus.queued.value,
|
||||
):
|
||||
if status in job_statuses:
|
||||
return False
|
||||
|
||||
return True
|
||||
28
src/app/services/updaters/utils/text.py
Normal file
28
src/app/services/updaters/utils/text.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import re
|
||||
|
||||
|
||||
def remove_wrong_ch(s: str) -> str:
|
||||
return s.replace(";", "").replace("\n", " ").replace("ё", "е")
|
||||
|
||||
|
||||
def remove_dots(s: str) -> str:
|
||||
return s.replace(".", "")
|
||||
|
||||
|
||||
tags_regexp = re.compile(r"<.*?>")
|
||||
|
||||
|
||||
def fix_annotation_text(text: str) -> str:
|
||||
replace_map = {
|
||||
" ": "",
|
||||
"[b]": "",
|
||||
"[/b]": "",
|
||||
"[hr]": "",
|
||||
}
|
||||
|
||||
t = tags_regexp.sub("", text)
|
||||
|
||||
for key in replace_map:
|
||||
t = t.replace(key, replace_map[key])
|
||||
|
||||
return t
|
||||
@@ -1,6 +1,8 @@
|
||||
from arq.connections import ArqRedis
|
||||
from arq.cron import cron
|
||||
|
||||
from app.services.updaters.fl_updater import run_fl_update, run_fl_update2
|
||||
from app.services.updaters.fl_updater import __tasks__ as fl_tasks
|
||||
from app.services.updaters.fl_updater import run_fl_update
|
||||
from core.arq_pool import get_redis_settings, get_arq_pool
|
||||
|
||||
|
||||
@@ -8,10 +10,19 @@ async def startup(ctx):
|
||||
ctx["arq_pool"] = await get_arq_pool()
|
||||
|
||||
|
||||
async def shutdown(ctx):
|
||||
arq_pool: ArqRedis = ctx["arq_pool"]
|
||||
|
||||
arq_pool.close()
|
||||
await arq_pool.wait_closed()
|
||||
|
||||
|
||||
class WorkerSettings:
|
||||
functions = [run_fl_update, run_fl_update2]
|
||||
functions = [*fl_tasks]
|
||||
on_startup = startup
|
||||
on_shutdown = shutdown
|
||||
redis_settings = get_redis_settings()
|
||||
max_jobs = 1
|
||||
job_timeout = 45 * 60
|
||||
max_jobs = 2
|
||||
max_tries = 10
|
||||
job_timeout = 5 * 60
|
||||
cron_jobs = [cron(run_fl_update, hour={5}, minute=0)]
|
||||
|
||||
Reference in New Issue
Block a user