# app/tasks/background_tasks.py """Background tasks for marketplace imports.""" import logging from datetime import UTC, datetime from app.core.database import SessionLocal from app.utils.csv_processor import CSVProcessor from models.database.marketplace_import_job import MarketplaceImportJob from models.database.vendor import Vendor logger = logging.getLogger(__name__) async def process_marketplace_import( job_id: int, url: str, marketplace: str, vendor_id: int, batch_size: int = 1000, language: str = "en", ): """Background task to process marketplace CSV import. Args: job_id: ID of the MarketplaceImportJob record url: URL to the CSV file marketplace: Name of the marketplace (e.g., 'Letzshop') vendor_id: ID of the vendor batch_size: Number of rows to process per batch language: Language code for translations (default: 'en') """ db = SessionLocal() csv_processor = CSVProcessor() job = None try: # Get the import job job = ( db.query(MarketplaceImportJob) .filter(MarketplaceImportJob.id == job_id) .first() ) if not job: logger.error(f"Import job {job_id} not found") return # Get vendor information vendor = db.query(Vendor).filter(Vendor.id == vendor_id).first() if not vendor: logger.error(f"Vendor {vendor_id} not found for import job {job_id}") job.status = "failed" job.error_message = f"Vendor {vendor_id} not found" job.completed_at = datetime.now(UTC) db.commit() return # Update job status job.status = "processing" job.started_at = datetime.now(UTC) db.commit() logger.info( f"Processing import: Job {job_id}, Marketplace: {marketplace}, " f"Vendor: {vendor.name} ({vendor.vendor_code}), Language: {language}" ) # Process CSV with vendor name and language result = await csv_processor.process_marketplace_csv_from_url( url=url, marketplace=marketplace, vendor_name=vendor.name, # Pass vendor name to CSV processor batch_size=batch_size, db=db, language=language, # Pass language for translations import_job_id=job_id, # Pass job ID for error tracking ) # Update job with results job.status = "completed" job.completed_at = datetime.now(UTC) job.imported_count = result["imported"] job.updated_count = result["updated"] job.error_count = result.get("errors", 0) job.total_processed = result["total_processed"] if result.get("errors", 0) > 0: job.status = "completed_with_errors" job.error_message = f"{result['errors']} rows had errors" db.commit() logger.info( f"Import job {job_id} completed: " f"imported={result['imported']}, updated={result['updated']}, " f"errors={result.get('errors', 0)}" ) except Exception as e: logger.error(f"Import job {job_id} failed: {e}", exc_info=True) if job is not None: try: job.status = "failed" job.error_message = str(e) job.completed_at = datetime.now(UTC) db.commit() except Exception as commit_error: logger.error(f"Failed to update job status: {commit_error}") db.rollback() finally: if hasattr(db, "close") and callable(db.close): try: db.close() except Exception as close_error: logger.error(f"Error closing database session: {close_error}")