feat: complete marketplace module migration (Phase 6)
Migrates marketplace module to self-contained structure: - Create app/modules/marketplace/services/ re-exporting from existing locations - Create app/modules/marketplace/models/ with marketplace & letzshop models - Create app/modules/marketplace/schemas/ with product & import schemas - Create app/modules/marketplace/tasks/ with 5 Celery tasks: - process_marketplace_import - CSV product import - process_historical_import - Letzshop order import - sync_vendor_directory - Scheduled daily vendor sync - export_vendor_products_to_folder - Multi-language export - export_marketplace_products - Admin export - Create app/modules/marketplace/exceptions.py - Update definition.py with is_self_contained=True and scheduled_tasks Celery task migration: - process_marketplace_import, process_historical_import -> import_tasks.py - sync_vendor_directory -> sync_tasks.py (scheduled daily at 02:00) - export_vendor_products_to_folder, export_marketplace_products -> export_tasks.py Backward compatibility: - Legacy task files now re-export from new locations - Remove marketplace/letzshop/export from LEGACY_TASK_MODULES Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,160 +1,17 @@
|
||||
# app/tasks/celery_tasks/marketplace.py
|
||||
"""
|
||||
Celery tasks for marketplace product imports.
|
||||
Legacy marketplace tasks.
|
||||
|
||||
Wraps the existing process_marketplace_import function for Celery execution.
|
||||
MIGRATED: All tasks have been migrated to app.modules.marketplace.tasks.
|
||||
|
||||
New locations:
|
||||
- process_marketplace_import -> app.modules.marketplace.tasks.import_tasks
|
||||
|
||||
Import from the new location:
|
||||
from app.modules.marketplace.tasks import process_marketplace_import
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
# Re-export from new location for backward compatibility
|
||||
from app.modules.marketplace.tasks.import_tasks import process_marketplace_import
|
||||
|
||||
from app.core.celery_config import celery_app
|
||||
from app.services.admin_notification_service import admin_notification_service
|
||||
from app.tasks.celery_tasks.base import DatabaseTask
|
||||
from app.utils.csv_processor import CSVProcessor
|
||||
from models.database.marketplace_import_job import MarketplaceImportJob
|
||||
from models.database.vendor import Vendor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
bind=True,
|
||||
base=DatabaseTask,
|
||||
name="app.tasks.celery_tasks.marketplace.process_marketplace_import",
|
||||
max_retries=3,
|
||||
default_retry_delay=60,
|
||||
autoretry_for=(Exception,),
|
||||
retry_backoff=True,
|
||||
retry_backoff_max=600,
|
||||
retry_jitter=True,
|
||||
)
|
||||
def process_marketplace_import(
|
||||
self,
|
||||
job_id: int,
|
||||
url: str,
|
||||
marketplace: str,
|
||||
vendor_id: int,
|
||||
batch_size: int = 1000,
|
||||
language: str = "en",
|
||||
):
|
||||
"""
|
||||
Celery task to process marketplace CSV import.
|
||||
|
||||
Args:
|
||||
job_id: ID of the MarketplaceImportJob record
|
||||
url: URL to the CSV file
|
||||
marketplace: Name of the marketplace (e.g., 'Letzshop')
|
||||
vendor_id: ID of the vendor
|
||||
batch_size: Number of rows to process per batch
|
||||
language: Language code for translations (default: 'en')
|
||||
|
||||
Returns:
|
||||
dict: Import results with counts
|
||||
"""
|
||||
csv_processor = CSVProcessor()
|
||||
|
||||
with self.get_db() as db:
|
||||
# Get the import job
|
||||
job = db.query(MarketplaceImportJob).filter(MarketplaceImportJob.id == job_id).first()
|
||||
if not job:
|
||||
logger.error(f"Import job {job_id} not found")
|
||||
return {"error": f"Import job {job_id} not found"}
|
||||
|
||||
# Store Celery task ID on job
|
||||
job.celery_task_id = self.request.id
|
||||
|
||||
# Get vendor information
|
||||
vendor = db.query(Vendor).filter(Vendor.id == vendor_id).first()
|
||||
if not vendor:
|
||||
logger.error(f"Vendor {vendor_id} not found for import job {job_id}")
|
||||
job.status = "failed"
|
||||
job.error_message = f"Vendor {vendor_id} not found"
|
||||
job.completed_at = datetime.now(UTC)
|
||||
db.commit()
|
||||
return {"error": f"Vendor {vendor_id} not found"}
|
||||
|
||||
# Update job status
|
||||
job.status = "processing"
|
||||
job.started_at = datetime.now(UTC)
|
||||
db.commit()
|
||||
|
||||
logger.info(
|
||||
f"Processing import: Job {job_id}, Marketplace: {marketplace}, "
|
||||
f"Vendor: {vendor.name} ({vendor.vendor_code}), Language: {language}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Run the async CSV processor in a sync context
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
result = loop.run_until_complete(
|
||||
csv_processor.process_marketplace_csv_from_url(
|
||||
url=url,
|
||||
marketplace=marketplace,
|
||||
vendor_name=vendor.name,
|
||||
batch_size=batch_size,
|
||||
db=db,
|
||||
language=language,
|
||||
import_job_id=job_id,
|
||||
)
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
# Update job with results
|
||||
job.status = "completed"
|
||||
job.completed_at = datetime.now(UTC)
|
||||
job.imported_count = result["imported"]
|
||||
job.updated_count = result["updated"]
|
||||
job.error_count = result.get("errors", 0)
|
||||
job.total_processed = result["total_processed"]
|
||||
|
||||
if result.get("errors", 0) > 0:
|
||||
job.status = "completed_with_errors"
|
||||
job.error_message = f"{result['errors']} rows had errors"
|
||||
|
||||
# Notify admin if error count is significant
|
||||
if result.get("errors", 0) >= 5:
|
||||
admin_notification_service.notify_import_failure(
|
||||
db=db,
|
||||
vendor_name=vendor.name,
|
||||
job_id=job_id,
|
||||
error_message=f"Import completed with {result['errors']} errors out of {result['total_processed']} rows",
|
||||
vendor_id=vendor_id,
|
||||
)
|
||||
|
||||
db.commit()
|
||||
logger.info(
|
||||
f"Import job {job_id} completed: "
|
||||
f"imported={result['imported']}, updated={result['updated']}, "
|
||||
f"errors={result.get('errors', 0)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"job_id": job_id,
|
||||
"imported": result["imported"],
|
||||
"updated": result["updated"],
|
||||
"errors": result.get("errors", 0),
|
||||
"total_processed": result["total_processed"],
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Import job {job_id} failed: {e}", exc_info=True)
|
||||
job.status = "failed"
|
||||
job.error_message = str(e)[:500] # Truncate long errors
|
||||
job.completed_at = datetime.now(UTC)
|
||||
|
||||
# Create admin notification for import failure
|
||||
admin_notification_service.notify_import_failure(
|
||||
db=db,
|
||||
vendor_name=vendor.name,
|
||||
job_id=job_id,
|
||||
error_message=str(e)[:200],
|
||||
vendor_id=vendor_id,
|
||||
)
|
||||
|
||||
db.commit()
|
||||
raise # Re-raise for Celery retry
|
||||
__all__ = ["process_marketplace_import"]
|
||||
|
||||
Reference in New Issue
Block a user