major refactoring adding vendor and customer features

This commit is contained in:
2025-10-11 09:09:25 +02:00
parent f569995883
commit dd16198276
126 changed files with 15109 additions and 3747 deletions

View File

@@ -1,32 +1,29 @@
# app/tasks/background_tasks.py
"""Summary description ....
This module provides classes and functions for:
- ....
- ....
- ....
"""
import logging
from datetime import datetime, timezone
from app.core.database import SessionLocal
from models.database.marketplace_import_job import MarketplaceImportJob
from models.database.vendor import Vendor
from app.utils.csv_processor import CSVProcessor
logger = logging.getLogger(__name__)
async def process_marketplace_import(
job_id: int, url: str, marketplace: str, vendor_name: str, batch_size: int = 1000
job_id: int,
url: str,
marketplace: str,
vendor_id: int, # FIXED: Changed from vendor_name to vendor_id
batch_size: int = 1000
):
"""Background task to process marketplace CSV import."""
db = SessionLocal()
csv_processor = CSVProcessor()
job = None # Initialize job variable
job = None
try:
# Update job status
# Get the import job
job = (
db.query(MarketplaceImportJob)
.filter(MarketplaceImportJob.id == job_id)
@@ -36,15 +33,33 @@ async def process_marketplace_import(
logger.error(f"Import job {job_id} not found")
return
# Get vendor information
vendor = db.query(Vendor).filter(Vendor.id == vendor_id).first()
if not vendor:
logger.error(f"Vendor {vendor_id} not found for import job {job_id}")
job.status = "failed"
job.error_message = f"Vendor {vendor_id} not found"
job.completed_at = datetime.now(timezone.utc)
db.commit()
return
# Update job status
job.status = "processing"
job.started_at = datetime.now(timezone.utc)
db.commit()
logger.info(f"Processing import: Job {job_id}, Marketplace: {marketplace}")
logger.info(
f"Processing import: Job {job_id}, Marketplace: {marketplace}, "
f"Vendor: {vendor.name} ({vendor.vendor_code})"
)
# Process CSV
# Process CSV with vendor_id
result = await csv_processor.process_marketplace_csv_from_url(
url, marketplace, vendor_name, batch_size, db
url,
marketplace,
vendor_id, # FIXED: Pass vendor_id instead of vendor_name
batch_size,
db
)
# Update job with results
@@ -60,11 +75,15 @@ async def process_marketplace_import(
job.error_message = f"{result['errors']} rows had errors"
db.commit()
logger.info(f"Import job {job_id} completed successfully")
logger.info(
f"Import job {job_id} completed: "
f"imported={result['imported']}, updated={result['updated']}, "
f"errors={result.get('errors', 0)}"
)
except Exception as e:
logger.error(f"Import job {job_id} failed: {e}")
if job is not None: # Only update if job was found
logger.error(f"Import job {job_id} failed: {e}", exc_info=True)
if job is not None:
try:
job.status = "failed"
job.error_message = str(e)
@@ -73,12 +92,7 @@ async def process_marketplace_import(
except Exception as commit_error:
logger.error(f"Failed to update job status: {commit_error}")
db.rollback()
# Don't re-raise the exception - background tasks should handle errors internally
# and update the job status accordingly. Only log the error.
pass
finally:
# Close the database session only if it's not a mock
# In tests, we use the same session so we shouldn't close it
if hasattr(db, "close") and callable(getattr(db, "close")):
try:
db.close()