from sqlalchemy.orm import Session from app.core.database import SessionLocal from models.database_models import MarketplaceImportJob from utils.csv_processor import CSVProcessor from datetime import datetime import logging logger = logging.getLogger(__name__) async def process_marketplace_import( job_id: int, url: str, marketplace: str, shop_name: str, batch_size: int = 1000 ): """Background task to process marketplace CSV import""" db = SessionLocal() csv_processor = CSVProcessor() try: # Update job status job = db.query(MarketplaceImportJob).filter(MarketplaceImportJob.id == job_id).first() if not job: logger.error(f"Import job {job_id} not found") return job.status = "processing" job.started_at = datetime.utcnow() db.commit() logger.info(f"Processing import: Job {job_id}, Marketplace: {marketplace}") # Process CSV result = await csv_processor.process_marketplace_csv_from_url( url, marketplace, shop_name, batch_size, db ) # Update job with results job.status = "completed" job.completed_at = datetime.utcnow() job.imported_count = result["imported"] job.updated_count = result["updated"] job.error_count = result.get("errors", 0) job.total_processed = result["total_processed"] if result.get("errors", 0) > 0: job.status = "completed_with_errors" job.error_message = f"{result['errors']} rows had errors" db.commit() logger.info(f"Import job {job_id} completed successfully") except Exception as e: logger.error(f"Import job {job_id} failed: {e}") job.status = "failed" job.completed_at = datetime.utcnow() job.error_message = str(e) db.commit() finally: db.close()