feat(infrastructure): use celery worker

This commit is contained in:
2025-10-02 14:59:44 +02:00
parent 9a436d3c70
commit 145565b542
3 changed files with 6 additions and 64 deletions

View File

@@ -1,8 +1,6 @@
import os
from celery import Celery
# Reuse existing RabbitMQ configuration env vars with vhost and optional TLS support
# If RABBITMQ_URL is provided, it takes precedence. Otherwise compose from parts.
if os.getenv("RABBITMQ_URL"):
RABBITMQ_URL = os.getenv("RABBITMQ_URL") # type: ignore
else:
@@ -27,26 +25,19 @@ else:
if vhost in ("/", "") and not RABBITMQ_URL.endswith("//"):
RABBITMQ_URL += "/"
# Default queue name to keep parity with the previous worker
DEFAULT_QUEUE = os.getenv("MAIL_QUEUE", "mail_queue")
# Use RPC backend by default to avoid coupling to Redis
CELERY_BACKEND = os.getenv("CELERY_BACKEND", "rpc://")
celery_app = Celery(
"app",
broker=RABBITMQ_URL,
backend=CELERY_BACKEND,
include=[
"app.workers.celery_tasks",
],
# backend=CELERY_BACKEND,
)
celery_app.autodiscover_tasks(["app.workers"], related_name="celery_tasks") # discover app.workers.celery_tasks
# Ensure this Celery app becomes the default for producers (e.g., FastAPI process)
# so that @shared_task.delay(...) uses the same broker/credentials as the worker.
celery_app.set_default()
# Basic, safe defaults single prefetch helps fairness similarly to the old worker
celery_app.conf.update(
task_default_queue=DEFAULT_QUEUE,
task_acks_late=True,
@@ -54,8 +45,6 @@ celery_app.conf.update(
task_serializer="json",
result_serializer="json",
accept_content=["json"],
broker_heartbeat=0, # let kombu handle heartbeats robustly in some envs
)
# Expose a shortcut for Celery CLI discovery: `celery -A app.celery_app worker ...`
__all__ = ["celery_app"]