feat: add Celery/Redis task queue with feature flag support
Migrate background tasks from FastAPI BackgroundTasks to Celery with Redis for persistent task queuing, retries, and scheduled jobs. Key changes: - Add Celery configuration with Redis broker/backend - Create task dispatcher with USE_CELERY feature flag for gradual rollout - Add Celery task wrappers for all background operations: - Marketplace imports - Letzshop historical imports - Product exports - Code quality scans - Test runs - Subscription scheduled tasks (via Celery Beat) - Add celery_task_id column to job tables for Flower integration - Add Flower dashboard link to admin background tasks page - Update docker-compose.yml with worker, beat, and flower services - Add Makefile targets: celery-worker, celery-beat, celery-dev, flower When USE_CELERY=false (default), system falls back to FastAPI BackgroundTasks for development without Redis dependency. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
124
app/core/celery_config.py
Normal file
124
app/core/celery_config.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# app/core/celery_config.py
|
||||
"""
|
||||
Celery configuration for Wizamart background task processing.
|
||||
|
||||
This module configures Celery with Redis as the broker and result backend.
|
||||
It includes:
|
||||
- Task routing to separate queues (default, long_running, scheduled)
|
||||
- Celery Beat schedule for periodic tasks
|
||||
- Task retry policies
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from celery import Celery
|
||||
from celery.schedules import crontab
|
||||
|
||||
# Redis URL from environment or default
|
||||
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
||||
|
||||
# Create Celery application
|
||||
celery_app = Celery(
|
||||
"wizamart",
|
||||
broker=REDIS_URL,
|
||||
backend=REDIS_URL,
|
||||
include=[
|
||||
"app.tasks.celery_tasks.marketplace",
|
||||
"app.tasks.celery_tasks.letzshop",
|
||||
"app.tasks.celery_tasks.subscription",
|
||||
"app.tasks.celery_tasks.export",
|
||||
"app.tasks.celery_tasks.code_quality",
|
||||
"app.tasks.celery_tasks.test_runner",
|
||||
],
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# CELERY CONFIGURATION
|
||||
# =============================================================================
|
||||
celery_app.conf.update(
|
||||
# Serialization
|
||||
task_serializer="json",
|
||||
accept_content=["json"],
|
||||
result_serializer="json",
|
||||
# Timezone
|
||||
timezone="Europe/Luxembourg",
|
||||
enable_utc=True,
|
||||
# Task behavior
|
||||
task_track_started=True,
|
||||
task_time_limit=30 * 60, # 30 minutes hard limit
|
||||
task_soft_time_limit=25 * 60, # 25 minutes soft limit
|
||||
# Worker settings
|
||||
worker_prefetch_multiplier=1, # Disable prefetching for long tasks
|
||||
worker_concurrency=4, # Number of concurrent workers
|
||||
# Result backend
|
||||
result_expires=86400, # Results expire after 24 hours
|
||||
# Retry policy
|
||||
task_default_retry_delay=60, # 1 minute between retries
|
||||
task_max_retries=3,
|
||||
# Task events (required for Flower monitoring)
|
||||
worker_send_task_events=True,
|
||||
task_send_sent_event=True,
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# TASK ROUTING - Route tasks to appropriate queues
|
||||
# =============================================================================
|
||||
celery_app.conf.task_routes = {
|
||||
# Long-running import tasks
|
||||
"app.tasks.celery_tasks.marketplace.*": {"queue": "long_running"},
|
||||
"app.tasks.celery_tasks.letzshop.*": {"queue": "long_running"},
|
||||
# Fast export tasks
|
||||
"app.tasks.celery_tasks.export.*": {"queue": "default"},
|
||||
# Scheduled subscription tasks
|
||||
"app.tasks.celery_tasks.subscription.*": {"queue": "scheduled"},
|
||||
# Code quality and test tasks (can be long)
|
||||
"app.tasks.celery_tasks.code_quality.*": {"queue": "long_running"},
|
||||
"app.tasks.celery_tasks.test_runner.*": {"queue": "long_running"},
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CELERY BEAT SCHEDULE - Periodic tasks
|
||||
# =============================================================================
|
||||
celery_app.conf.beat_schedule = {
|
||||
# Reset usage counters at start of each period
|
||||
"reset-period-counters-daily": {
|
||||
"task": "app.tasks.celery_tasks.subscription.reset_period_counters",
|
||||
"schedule": crontab(hour=0, minute=5), # 00:05 daily
|
||||
"options": {"queue": "scheduled"},
|
||||
},
|
||||
# Check for expiring trials and send notifications
|
||||
"check-trial-expirations-daily": {
|
||||
"task": "app.tasks.celery_tasks.subscription.check_trial_expirations",
|
||||
"schedule": crontab(hour=1, minute=0), # 01:00 daily
|
||||
"options": {"queue": "scheduled"},
|
||||
},
|
||||
# Sync subscription status with Stripe
|
||||
"sync-stripe-status-hourly": {
|
||||
"task": "app.tasks.celery_tasks.subscription.sync_stripe_status",
|
||||
"schedule": crontab(minute=30), # Every hour at :30
|
||||
"options": {"queue": "scheduled"},
|
||||
},
|
||||
# Clean up stale/orphaned subscriptions
|
||||
"cleanup-stale-subscriptions-weekly": {
|
||||
"task": "app.tasks.celery_tasks.subscription.cleanup_stale_subscriptions",
|
||||
"schedule": crontab(hour=3, minute=0, day_of_week=0), # Sunday 03:00
|
||||
"options": {"queue": "scheduled"},
|
||||
},
|
||||
# Capture daily capacity snapshot for analytics
|
||||
"capture-capacity-snapshot-daily": {
|
||||
"task": "app.tasks.celery_tasks.subscription.capture_capacity_snapshot",
|
||||
"schedule": crontab(hour=0, minute=0), # Midnight daily
|
||||
"options": {"queue": "scheduled"},
|
||||
},
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# QUEUE CONFIGURATION
|
||||
# =============================================================================
|
||||
celery_app.conf.task_queues = {
|
||||
"default": {"exchange": "default", "routing_key": "default"},
|
||||
"long_running": {"exchange": "long_running", "routing_key": "long_running"},
|
||||
"scheduled": {"exchange": "scheduled", "routing_key": "scheduled"},
|
||||
}
|
||||
|
||||
celery_app.conf.task_default_queue = "default"
|
||||
@@ -45,9 +45,9 @@ class Settings(BaseSettings):
|
||||
"""
|
||||
|
||||
# =============================================================================
|
||||
# DATABASE
|
||||
# DATABASE (PostgreSQL only)
|
||||
# =============================================================================
|
||||
database_url: str = "sqlite:///./wizamart.db"
|
||||
database_url: str = "postgresql://wizamart_user:secure_password@localhost:5432/wizamart_db"
|
||||
|
||||
# =============================================================================
|
||||
# ADMIN INITIALIZATION (for init_production.py)
|
||||
@@ -174,6 +174,19 @@ class Settings(BaseSettings):
|
||||
seed_products_per_vendor: int = 20 # Products per vendor
|
||||
seed_orders_per_vendor: int = 10 # Orders per vendor
|
||||
|
||||
# =============================================================================
|
||||
# CELERY / REDIS TASK QUEUE
|
||||
# =============================================================================
|
||||
# Redis URL for Celery broker and result backend
|
||||
redis_url: str = "redis://localhost:6379/0"
|
||||
|
||||
# Feature flag: enable Celery for background tasks (False = use FastAPI BackgroundTasks)
|
||||
use_celery: bool = False
|
||||
|
||||
# Flower monitoring dashboard
|
||||
flower_url: str = "http://localhost:5555"
|
||||
flower_password: str = "changeme" # CHANGE IN PRODUCTION!
|
||||
|
||||
model_config = {"env_file": ".env"}
|
||||
|
||||
|
||||
@@ -238,6 +251,27 @@ def is_staging_environment() -> bool:
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def validate_database_url() -> None:
|
||||
"""
|
||||
Validate that database URL is PostgreSQL.
|
||||
|
||||
Raises:
|
||||
ValueError: If database URL is not PostgreSQL
|
||||
"""
|
||||
if settings.database_url.startswith("sqlite"):
|
||||
raise ValueError(
|
||||
"SQLite is not supported. Please use PostgreSQL.\n"
|
||||
"Set DATABASE_URL environment variable to a PostgreSQL connection string.\n"
|
||||
"Example: postgresql://user:password@localhost:5432/dbname\n"
|
||||
"For local development, run: docker-compose up -d db"
|
||||
)
|
||||
if not settings.database_url.startswith("postgresql"):
|
||||
raise ValueError(
|
||||
f"Unsupported database: {settings.database_url.split(':')[0]}\n"
|
||||
"Only PostgreSQL is supported."
|
||||
)
|
||||
|
||||
|
||||
def validate_production_settings() -> list[str]:
|
||||
"""
|
||||
Validate settings for production environment.
|
||||
@@ -304,6 +338,7 @@ __all__ = [
|
||||
"is_development_environment",
|
||||
"is_staging_environment",
|
||||
# Validation
|
||||
"validate_database_url",
|
||||
"validate_production_settings",
|
||||
"print_environment_info",
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user