Files
orion/app/core/celery_config.py
Samir Boulahtit 2792414395 feat: add Celery/Redis task queue with feature flag support
Migrate background tasks from FastAPI BackgroundTasks to Celery with Redis
for persistent task queuing, retries, and scheduled jobs.

Key changes:
- Add Celery configuration with Redis broker/backend
- Create task dispatcher with USE_CELERY feature flag for gradual rollout
- Add Celery task wrappers for all background operations:
  - Marketplace imports
  - Letzshop historical imports
  - Product exports
  - Code quality scans
  - Test runs
  - Subscription scheduled tasks (via Celery Beat)
- Add celery_task_id column to job tables for Flower integration
- Add Flower dashboard link to admin background tasks page
- Update docker-compose.yml with worker, beat, and flower services
- Add Makefile targets: celery-worker, celery-beat, celery-dev, flower

When USE_CELERY=false (default), system falls back to FastAPI BackgroundTasks
for development without Redis dependency.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-11 17:35:16 +01:00

125 lines
4.7 KiB
Python

# app/core/celery_config.py
"""
Celery configuration for Wizamart background task processing.
This module configures Celery with Redis as the broker and result backend.
It includes:
- Task routing to separate queues (default, long_running, scheduled)
- Celery Beat schedule for periodic tasks
- Task retry policies
"""
import os
from celery import Celery
from celery.schedules import crontab
# Redis URL from environment or default
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
# Create Celery application
celery_app = Celery(
"wizamart",
broker=REDIS_URL,
backend=REDIS_URL,
include=[
"app.tasks.celery_tasks.marketplace",
"app.tasks.celery_tasks.letzshop",
"app.tasks.celery_tasks.subscription",
"app.tasks.celery_tasks.export",
"app.tasks.celery_tasks.code_quality",
"app.tasks.celery_tasks.test_runner",
],
)
# =============================================================================
# CELERY CONFIGURATION
# =============================================================================
celery_app.conf.update(
# Serialization
task_serializer="json",
accept_content=["json"],
result_serializer="json",
# Timezone
timezone="Europe/Luxembourg",
enable_utc=True,
# Task behavior
task_track_started=True,
task_time_limit=30 * 60, # 30 minutes hard limit
task_soft_time_limit=25 * 60, # 25 minutes soft limit
# Worker settings
worker_prefetch_multiplier=1, # Disable prefetching for long tasks
worker_concurrency=4, # Number of concurrent workers
# Result backend
result_expires=86400, # Results expire after 24 hours
# Retry policy
task_default_retry_delay=60, # 1 minute between retries
task_max_retries=3,
# Task events (required for Flower monitoring)
worker_send_task_events=True,
task_send_sent_event=True,
)
# =============================================================================
# TASK ROUTING - Route tasks to appropriate queues
# =============================================================================
celery_app.conf.task_routes = {
# Long-running import tasks
"app.tasks.celery_tasks.marketplace.*": {"queue": "long_running"},
"app.tasks.celery_tasks.letzshop.*": {"queue": "long_running"},
# Fast export tasks
"app.tasks.celery_tasks.export.*": {"queue": "default"},
# Scheduled subscription tasks
"app.tasks.celery_tasks.subscription.*": {"queue": "scheduled"},
# Code quality and test tasks (can be long)
"app.tasks.celery_tasks.code_quality.*": {"queue": "long_running"},
"app.tasks.celery_tasks.test_runner.*": {"queue": "long_running"},
}
# =============================================================================
# CELERY BEAT SCHEDULE - Periodic tasks
# =============================================================================
celery_app.conf.beat_schedule = {
# Reset usage counters at start of each period
"reset-period-counters-daily": {
"task": "app.tasks.celery_tasks.subscription.reset_period_counters",
"schedule": crontab(hour=0, minute=5), # 00:05 daily
"options": {"queue": "scheduled"},
},
# Check for expiring trials and send notifications
"check-trial-expirations-daily": {
"task": "app.tasks.celery_tasks.subscription.check_trial_expirations",
"schedule": crontab(hour=1, minute=0), # 01:00 daily
"options": {"queue": "scheduled"},
},
# Sync subscription status with Stripe
"sync-stripe-status-hourly": {
"task": "app.tasks.celery_tasks.subscription.sync_stripe_status",
"schedule": crontab(minute=30), # Every hour at :30
"options": {"queue": "scheduled"},
},
# Clean up stale/orphaned subscriptions
"cleanup-stale-subscriptions-weekly": {
"task": "app.tasks.celery_tasks.subscription.cleanup_stale_subscriptions",
"schedule": crontab(hour=3, minute=0, day_of_week=0), # Sunday 03:00
"options": {"queue": "scheduled"},
},
# Capture daily capacity snapshot for analytics
"capture-capacity-snapshot-daily": {
"task": "app.tasks.celery_tasks.subscription.capture_capacity_snapshot",
"schedule": crontab(hour=0, minute=0), # Midnight daily
"options": {"queue": "scheduled"},
},
}
# =============================================================================
# QUEUE CONFIGURATION
# =============================================================================
celery_app.conf.task_queues = {
"default": {"exchange": "default", "routing_key": "default"},
"long_running": {"exchange": "long_running", "routing_key": "long_running"},
"scheduled": {"exchange": "scheduled", "routing_key": "scheduled"},
}
celery_app.conf.task_default_queue = "default"