feat(deployment): add cron support

This commit is contained in:
2025-11-11 13:52:17 +01:00
parent f58083870f
commit e708f7b18b
4 changed files with 40 additions and 13 deletions

View File

@@ -1,3 +1,4 @@
import json
import logging import logging
import os import os
import sys import sys
@@ -65,7 +66,6 @@ fastApi.include_router(auth_router)
fastApi.include_router(categories_router) fastApi.include_router(categories_router)
fastApi.include_router(transactions_router) fastApi.include_router(transactions_router)
for h in list(logging.root.handlers): for h in list(logging.root.handlers):
logging.root.removeHandler(h) logging.root.removeHandler(h)
@@ -78,7 +78,6 @@ _log_handler.setFormatter(_formatter)
logging.root.setLevel(logging.INFO) logging.root.setLevel(logging.INFO)
logging.root.addHandler(_log_handler) logging.root.addHandler(_log_handler)
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"): for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
_logger = logging.getLogger(_name) _logger = logging.getLogger(_name)
_logger.handlers = [_log_handler] _logger.handlers = [_log_handler]
@@ -161,16 +160,9 @@ async def authenticated_route(user: User = Depends(current_active_verified_user)
return {"message": f"Hello {user.email}!"} return {"message": f"Hello {user.email}!"}
@fastApi.get("/debug/scrape/csas/all", tags=["debug"]) @fastApi.get("/_cron", include_in_schema=False)
async def debug_scrape_csas_all(): async def handle_cron(request: Request):
logging.info("[Debug] Queueing CSAS scrape for all users via HTTP endpoint (Celery)") logging.info("[Cron] Triggering scheduled tasks via HTTP endpoint")
logging.info(json.dumps(request.headers))
task = load_all_transactions.delay() task = load_all_transactions.delay()
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)} return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
@fastApi.post("/debug/scrape/csas/{user_id}", tags=["debug"])
async def debug_scrape_csas_user(user_id: str, user: User = Depends(current_active_verified_user)):
logging.info("[Debug] Queueing CSAS scrape for single user via HTTP endpoint (Celery) | user_id=%s", user_id)
task = load_transactions.delay(user_id)
return {"status": "queued", "action": "csas_scrape_single", "user_id": user_id,
"task_id": getattr(task, 'id', None)}

View File

@@ -0,0 +1,25 @@
{{ - if .Values.cron.enabled }}
apiVersion: batch/v1
kind: CronJob
metadata:
name: cronjob
spec:
schedule: {{ .Values.cron.schedule | quote }}
concurrencyPolicy: {{ .Values.cron.concurrencyPolicy | quote }}
jobTemplate:
spec:
template:
spec:
containers:
- name: cronjob
image: curlimages/curl:latest
imagePullPolicy: IfNotPresent
args:
- -sS
- -o
- /dev/null
- -w
- "%{http_code}"
- "{{ printf "%s://%s.%s.svc.cluster.local" .Values.cron.scheme .Values.app.name .Release.Namespace | quote }}{{ .Values.cron.endpoint }}"
restartPolicy: OnFailure
{{ - end }}

View File

@@ -5,3 +5,6 @@ app:
worker: worker:
replicas: 3 replicas: 3
cron:
enabled: true

View File

@@ -35,6 +35,13 @@ worker:
# Queue name for Celery worker and for CRD Queue # Queue name for Celery worker and for CRD Queue
mailQueueName: "mail_queue" mailQueueName: "mail_queue"
cron:
enabled: false
schedule: "*/5 * * * *" # every 5 minutes
scheme: "http"
endpoint: "/_cron"
concurrencyPolicy: "Forbid"
service: service:
port: 80 port: 80