- Replace black, isort, and flake8 with Ruff (all-in-one linter and formatter) - Add comprehensive pyproject.toml configuration - Simplify Makefile code quality targets - Configure exclusions for venv/.venv in pyproject.toml - Auto-fix 1,359 linting issues across codebase Benefits: - Much faster builds (Ruff is written in Rust) - Single tool replaces multiple tools - More comprehensive rule set (UP, B, C4, SIM, PIE, RET, Q) - All configuration centralized in pyproject.toml - Better import sorting and formatting consistency 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
101 lines
3.2 KiB
Python
101 lines
3.2 KiB
Python
# app/tasks/background_tasks.py
|
|
import logging
|
|
from datetime import UTC, datetime
|
|
|
|
from app.core.database import SessionLocal
|
|
from app.utils.csv_processor import CSVProcessor
|
|
from models.database.marketplace_import_job import MarketplaceImportJob
|
|
from models.database.vendor import Vendor
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
async def process_marketplace_import(
|
|
job_id: int,
|
|
url: str,
|
|
marketplace: str,
|
|
vendor_id: int, # FIXED: Changed from vendor_name to vendor_id
|
|
batch_size: int = 1000,
|
|
):
|
|
"""Background task to process marketplace CSV import."""
|
|
db = SessionLocal()
|
|
csv_processor = CSVProcessor()
|
|
job = None
|
|
|
|
try:
|
|
# Get the import job
|
|
job = (
|
|
db.query(MarketplaceImportJob)
|
|
.filter(MarketplaceImportJob.id == job_id)
|
|
.first()
|
|
)
|
|
if not job:
|
|
logger.error(f"Import job {job_id} not found")
|
|
return
|
|
|
|
# Get vendor information
|
|
vendor = db.query(Vendor).filter(Vendor.id == vendor_id).first()
|
|
if not vendor:
|
|
logger.error(f"Vendor {vendor_id} not found for import job {job_id}")
|
|
job.status = "failed"
|
|
job.error_message = f"Vendor {vendor_id} not found"
|
|
job.completed_at = datetime.now(UTC)
|
|
db.commit()
|
|
return
|
|
|
|
# Update job status
|
|
job.status = "processing"
|
|
job.started_at = datetime.now(UTC)
|
|
db.commit()
|
|
|
|
logger.info(
|
|
f"Processing import: Job {job_id}, Marketplace: {marketplace}, "
|
|
f"Vendor: {vendor.name} ({vendor.vendor_code})"
|
|
)
|
|
|
|
# Process CSV with vendor_id
|
|
result = await csv_processor.process_marketplace_csv_from_url(
|
|
url,
|
|
marketplace,
|
|
vendor_id, # FIXED: Pass vendor_id instead of vendor_name
|
|
batch_size,
|
|
db,
|
|
)
|
|
|
|
# Update job with results
|
|
job.status = "completed"
|
|
job.completed_at = datetime.now(UTC)
|
|
job.imported_count = result["imported"]
|
|
job.updated_count = result["updated"]
|
|
job.error_count = result.get("errors", 0)
|
|
job.total_processed = result["total_processed"]
|
|
|
|
if result.get("errors", 0) > 0:
|
|
job.status = "completed_with_errors"
|
|
job.error_message = f"{result['errors']} rows had errors"
|
|
|
|
db.commit()
|
|
logger.info(
|
|
f"Import job {job_id} completed: "
|
|
f"imported={result['imported']}, updated={result['updated']}, "
|
|
f"errors={result.get('errors', 0)}"
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Import job {job_id} failed: {e}", exc_info=True)
|
|
if job is not None:
|
|
try:
|
|
job.status = "failed"
|
|
job.error_message = str(e)
|
|
job.completed_at = datetime.now(UTC)
|
|
db.commit()
|
|
except Exception as commit_error:
|
|
logger.error(f"Failed to update job status: {commit_error}")
|
|
db.rollback()
|
|
finally:
|
|
if hasattr(db, "close") and callable(db.close):
|
|
try:
|
|
db.close()
|
|
except Exception as close_error:
|
|
logger.error(f"Error closing database session: {close_error}")
|