feat: add Celery/Redis task queue with feature flag support

Migrate background tasks from FastAPI BackgroundTasks to Celery with Redis
for persistent task queuing, retries, and scheduled jobs.

Key changes:
- Add Celery configuration with Redis broker/backend
- Create task dispatcher with USE_CELERY feature flag for gradual rollout
- Add Celery task wrappers for all background operations:
  - Marketplace imports
  - Letzshop historical imports
  - Product exports
  - Code quality scans
  - Test runs
  - Subscription scheduled tasks (via Celery Beat)
- Add celery_task_id column to job tables for Flower integration
- Add Flower dashboard link to admin background tasks page
- Update docker-compose.yml with worker, beat, and flower services
- Add Makefile targets: celery-worker, celery-beat, celery-dev, flower

When USE_CELERY=false (default), system falls back to FastAPI BackgroundTasks
for development without Redis dependency.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-11 17:35:16 +01:00
parent 879ac0caea
commit 2792414395
30 changed files with 2218 additions and 79 deletions

View File

@@ -0,0 +1,55 @@
"""add celery_task_id to job tables
Revision ID: 09d84a46530f
Revises: y3d4e5f6g7h8
Create Date: 2026-01-11 16:44:59.070110
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '09d84a46530f'
down_revision: Union[str, None] = 'y3d4e5f6g7h8'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add celery_task_id column to job tracking tables for Celery integration."""
# MarketplaceImportJob
op.add_column('marketplace_import_jobs', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_marketplace_import_jobs_celery_task_id'), 'marketplace_import_jobs', ['celery_task_id'], unique=False)
# LetzshopHistoricalImportJob
op.add_column('letzshop_historical_import_jobs', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_letzshop_historical_import_jobs_celery_task_id'), 'letzshop_historical_import_jobs', ['celery_task_id'], unique=False)
# ArchitectureScan
op.add_column('architecture_scans', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_architecture_scans_celery_task_id'), 'architecture_scans', ['celery_task_id'], unique=False)
# TestRun
op.add_column('test_runs', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_test_runs_celery_task_id'), 'test_runs', ['celery_task_id'], unique=False)
def downgrade() -> None:
"""Remove celery_task_id column from job tracking tables."""
# TestRun
op.drop_index(op.f('ix_test_runs_celery_task_id'), table_name='test_runs')
op.drop_column('test_runs', 'celery_task_id')
# ArchitectureScan
op.drop_index(op.f('ix_architecture_scans_celery_task_id'), table_name='architecture_scans')
op.drop_column('architecture_scans', 'celery_task_id')
# LetzshopHistoricalImportJob
op.drop_index(op.f('ix_letzshop_historical_import_jobs_celery_task_id'), table_name='letzshop_historical_import_jobs')
op.drop_column('letzshop_historical_import_jobs', 'celery_task_id')
# MarketplaceImportJob
op.drop_index(op.f('ix_marketplace_import_jobs_celery_task_id'), table_name='marketplace_import_jobs')
op.drop_column('marketplace_import_jobs', 'celery_task_id')