feat: complete marketplace module migration (Phase 6)
Migrates marketplace module to self-contained structure: - Create app/modules/marketplace/services/ re-exporting from existing locations - Create app/modules/marketplace/models/ with marketplace & letzshop models - Create app/modules/marketplace/schemas/ with product & import schemas - Create app/modules/marketplace/tasks/ with 5 Celery tasks: - process_marketplace_import - CSV product import - process_historical_import - Letzshop order import - sync_vendor_directory - Scheduled daily vendor sync - export_vendor_products_to_folder - Multi-language export - export_marketplace_products - Admin export - Create app/modules/marketplace/exceptions.py - Update definition.py with is_self_contained=True and scheduled_tasks Celery task migration: - process_marketplace_import, process_historical_import -> import_tasks.py - sync_vendor_directory -> sync_tasks.py (scheduled daily at 02:00) - export_vendor_products_to_folder, export_marketplace_products -> export_tasks.py Backward compatibility: - Legacy task files now re-export from new locations - Remove marketplace/letzshop/export from LEGACY_TASK_MODULES Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,350 +1,22 @@
|
||||
# app/tasks/celery_tasks/letzshop.py
|
||||
"""
|
||||
Celery tasks for Letzshop integration.
|
||||
Legacy Letzshop tasks.
|
||||
|
||||
Includes:
|
||||
- Historical order imports
|
||||
- Vendor directory sync
|
||||
MIGRATED: All tasks have been migrated to app.modules.marketplace.tasks.
|
||||
|
||||
New locations:
|
||||
- process_historical_import -> app.modules.marketplace.tasks.import_tasks
|
||||
- sync_vendor_directory -> app.modules.marketplace.tasks.sync_tasks
|
||||
|
||||
Import from the new location:
|
||||
from app.modules.marketplace.tasks import (
|
||||
process_historical_import,
|
||||
sync_vendor_directory,
|
||||
)
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Callable
|
||||
# Re-export from new location for backward compatibility
|
||||
from app.modules.marketplace.tasks.import_tasks import process_historical_import
|
||||
from app.modules.marketplace.tasks.sync_tasks import sync_vendor_directory
|
||||
|
||||
from app.core.celery_config import celery_app
|
||||
from app.services.admin_notification_service import admin_notification_service
|
||||
from app.services.letzshop import LetzshopClientError
|
||||
from app.services.letzshop.credentials_service import LetzshopCredentialsService
|
||||
from app.services.letzshop.order_service import LetzshopOrderService
|
||||
from app.services.letzshop.vendor_sync_service import LetzshopVendorSyncService
|
||||
from app.tasks.celery_tasks.base import DatabaseTask
|
||||
from models.database.letzshop import LetzshopHistoricalImportJob
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_credentials_service(db) -> LetzshopCredentialsService:
|
||||
"""Create a credentials service instance."""
|
||||
return LetzshopCredentialsService(db)
|
||||
|
||||
|
||||
def _get_order_service(db) -> LetzshopOrderService:
|
||||
"""Create an order service instance."""
|
||||
return LetzshopOrderService(db)
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
bind=True,
|
||||
base=DatabaseTask,
|
||||
name="app.tasks.celery_tasks.letzshop.process_historical_import",
|
||||
max_retries=2,
|
||||
default_retry_delay=120,
|
||||
autoretry_for=(Exception,),
|
||||
retry_backoff=True,
|
||||
)
|
||||
def process_historical_import(self, job_id: int, vendor_id: int):
|
||||
"""
|
||||
Celery task for historical order import with progress tracking.
|
||||
|
||||
Imports both confirmed and unconfirmed orders from Letzshop API,
|
||||
updating job progress in the database for frontend polling.
|
||||
|
||||
Args:
|
||||
job_id: ID of the LetzshopHistoricalImportJob record
|
||||
vendor_id: ID of the vendor to import orders for
|
||||
|
||||
Returns:
|
||||
dict: Import statistics
|
||||
"""
|
||||
with self.get_db() as db:
|
||||
# Get the import job
|
||||
job = (
|
||||
db.query(LetzshopHistoricalImportJob)
|
||||
.filter(LetzshopHistoricalImportJob.id == job_id)
|
||||
.first()
|
||||
)
|
||||
if not job:
|
||||
logger.error(f"Historical import job {job_id} not found")
|
||||
return {"error": f"Job {job_id} not found"}
|
||||
|
||||
# Store Celery task ID
|
||||
job.celery_task_id = self.request.id
|
||||
|
||||
try:
|
||||
# Mark as started
|
||||
job.status = "fetching"
|
||||
job.started_at = datetime.now(UTC)
|
||||
db.commit()
|
||||
|
||||
creds_service = _get_credentials_service(db)
|
||||
order_service = _get_order_service(db)
|
||||
|
||||
# Create progress callback for fetching
|
||||
def fetch_progress_callback(page: int, total_fetched: int):
|
||||
"""Update fetch progress in database."""
|
||||
job.current_page = page
|
||||
job.shipments_fetched = total_fetched
|
||||
db.commit()
|
||||
|
||||
# Create progress callback for processing
|
||||
def create_processing_callback(
|
||||
phase: str,
|
||||
) -> Callable[[int, int, int, int], None]:
|
||||
"""Create a processing progress callback for a phase."""
|
||||
|
||||
def callback(processed: int, imported: int, updated: int, skipped: int):
|
||||
job.orders_processed = processed
|
||||
job.orders_imported = imported
|
||||
job.orders_updated = updated
|
||||
job.orders_skipped = skipped
|
||||
db.commit()
|
||||
|
||||
return callback
|
||||
|
||||
with creds_service.create_client(vendor_id) as client:
|
||||
# ================================================================
|
||||
# Phase 1: Import confirmed orders
|
||||
# ================================================================
|
||||
job.current_phase = "confirmed"
|
||||
job.current_page = 0
|
||||
job.shipments_fetched = 0
|
||||
db.commit()
|
||||
|
||||
logger.info(f"Job {job_id}: Fetching confirmed shipments for vendor {vendor_id}")
|
||||
|
||||
confirmed_shipments = client.get_all_shipments_paginated(
|
||||
state="confirmed",
|
||||
page_size=50,
|
||||
progress_callback=fetch_progress_callback,
|
||||
)
|
||||
|
||||
logger.info(f"Job {job_id}: Fetched {len(confirmed_shipments)} confirmed shipments")
|
||||
|
||||
# Process confirmed shipments
|
||||
job.status = "processing"
|
||||
job.orders_processed = 0
|
||||
job.orders_imported = 0
|
||||
job.orders_updated = 0
|
||||
job.orders_skipped = 0
|
||||
db.commit()
|
||||
|
||||
confirmed_stats = order_service.import_historical_shipments(
|
||||
vendor_id=vendor_id,
|
||||
shipments=confirmed_shipments,
|
||||
match_products=True,
|
||||
progress_callback=create_processing_callback("confirmed"),
|
||||
)
|
||||
|
||||
# Store confirmed stats
|
||||
job.confirmed_stats = {
|
||||
"total": confirmed_stats["total"],
|
||||
"imported": confirmed_stats["imported"],
|
||||
"updated": confirmed_stats["updated"],
|
||||
"skipped": confirmed_stats["skipped"],
|
||||
"products_matched": confirmed_stats["products_matched"],
|
||||
"products_not_found": confirmed_stats["products_not_found"],
|
||||
}
|
||||
job.products_matched = confirmed_stats["products_matched"]
|
||||
job.products_not_found = confirmed_stats["products_not_found"]
|
||||
db.commit()
|
||||
|
||||
logger.info(
|
||||
f"Job {job_id}: Confirmed phase complete - "
|
||||
f"imported={confirmed_stats['imported']}, "
|
||||
f"updated={confirmed_stats['updated']}, "
|
||||
f"skipped={confirmed_stats['skipped']}"
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# Phase 2: Import unconfirmed (pending) orders
|
||||
# ================================================================
|
||||
job.current_phase = "unconfirmed"
|
||||
job.status = "fetching"
|
||||
job.current_page = 0
|
||||
job.shipments_fetched = 0
|
||||
db.commit()
|
||||
|
||||
logger.info(f"Job {job_id}: Fetching unconfirmed shipments for vendor {vendor_id}")
|
||||
|
||||
unconfirmed_shipments = client.get_all_shipments_paginated(
|
||||
state="unconfirmed",
|
||||
page_size=50,
|
||||
progress_callback=fetch_progress_callback,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Job {job_id}: Fetched {len(unconfirmed_shipments)} unconfirmed shipments"
|
||||
)
|
||||
|
||||
# Process unconfirmed shipments
|
||||
job.status = "processing"
|
||||
job.orders_processed = 0
|
||||
db.commit()
|
||||
|
||||
unconfirmed_stats = order_service.import_historical_shipments(
|
||||
vendor_id=vendor_id,
|
||||
shipments=unconfirmed_shipments,
|
||||
match_products=True,
|
||||
progress_callback=create_processing_callback("unconfirmed"),
|
||||
)
|
||||
|
||||
# Store unconfirmed stats
|
||||
job.declined_stats = {
|
||||
"total": unconfirmed_stats["total"],
|
||||
"imported": unconfirmed_stats["imported"],
|
||||
"updated": unconfirmed_stats["updated"],
|
||||
"skipped": unconfirmed_stats["skipped"],
|
||||
"products_matched": unconfirmed_stats["products_matched"],
|
||||
"products_not_found": unconfirmed_stats["products_not_found"],
|
||||
}
|
||||
|
||||
# Add to cumulative product matching stats
|
||||
job.products_matched += unconfirmed_stats["products_matched"]
|
||||
job.products_not_found += unconfirmed_stats["products_not_found"]
|
||||
|
||||
logger.info(
|
||||
f"Job {job_id}: Unconfirmed phase complete - "
|
||||
f"imported={unconfirmed_stats['imported']}, "
|
||||
f"updated={unconfirmed_stats['updated']}, "
|
||||
f"skipped={unconfirmed_stats['skipped']}"
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# Complete
|
||||
# ================================================================
|
||||
job.status = "completed"
|
||||
job.completed_at = datetime.now(UTC)
|
||||
db.commit()
|
||||
|
||||
# Update credentials sync status
|
||||
creds_service.update_sync_status(vendor_id, "success", None)
|
||||
|
||||
logger.info(f"Job {job_id}: Historical import completed successfully")
|
||||
|
||||
return {
|
||||
"job_id": job_id,
|
||||
"confirmed": confirmed_stats,
|
||||
"unconfirmed": unconfirmed_stats,
|
||||
}
|
||||
|
||||
except LetzshopClientError as e:
|
||||
logger.error(f"Job {job_id}: Letzshop API error: {e}")
|
||||
job.status = "failed"
|
||||
job.error_message = f"Letzshop API error: {e}"
|
||||
job.completed_at = datetime.now(UTC)
|
||||
|
||||
# Get vendor name for notification
|
||||
order_service = _get_order_service(db)
|
||||
vendor = order_service.get_vendor(vendor_id)
|
||||
vendor_name = vendor.name if vendor else f"Vendor {vendor_id}"
|
||||
|
||||
# Create admin notification
|
||||
admin_notification_service.notify_order_sync_failure(
|
||||
db=db,
|
||||
vendor_name=vendor_name,
|
||||
error_message=f"Historical import failed: {str(e)[:150]}",
|
||||
vendor_id=vendor_id,
|
||||
)
|
||||
|
||||
db.commit()
|
||||
|
||||
creds_service = _get_credentials_service(db)
|
||||
creds_service.update_sync_status(vendor_id, "failed", str(e))
|
||||
raise # Re-raise for Celery retry
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Job {job_id}: Unexpected error: {e}", exc_info=True)
|
||||
job.status = "failed"
|
||||
job.error_message = str(e)[:500]
|
||||
job.completed_at = datetime.now(UTC)
|
||||
|
||||
# Get vendor name for notification
|
||||
order_service = _get_order_service(db)
|
||||
vendor = order_service.get_vendor(vendor_id)
|
||||
vendor_name = vendor.name if vendor else f"Vendor {vendor_id}"
|
||||
|
||||
# Create admin notification
|
||||
admin_notification_service.notify_critical_error(
|
||||
db=db,
|
||||
error_type="Historical Import",
|
||||
error_message=f"Import job {job_id} failed for {vendor_name}: {str(e)[:150]}",
|
||||
details={"job_id": job_id, "vendor_id": vendor_id, "vendor_name": vendor_name},
|
||||
)
|
||||
|
||||
db.commit()
|
||||
raise # Re-raise for Celery retry
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Vendor Directory Sync
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
bind=True,
|
||||
base=DatabaseTask,
|
||||
name="app.tasks.celery_tasks.letzshop.sync_vendor_directory",
|
||||
max_retries=2,
|
||||
default_retry_delay=300,
|
||||
autoretry_for=(Exception,),
|
||||
retry_backoff=True,
|
||||
)
|
||||
def sync_vendor_directory(self) -> dict[str, Any]:
|
||||
"""
|
||||
Celery task to sync Letzshop vendor directory.
|
||||
|
||||
Fetches all vendors from Letzshop's public GraphQL API and updates
|
||||
the local letzshop_vendor_cache table.
|
||||
|
||||
This task should be scheduled to run periodically (e.g., daily)
|
||||
via Celery beat.
|
||||
|
||||
Returns:
|
||||
dict: Sync statistics including created, updated, and error counts.
|
||||
"""
|
||||
with self.get_db() as db:
|
||||
try:
|
||||
logger.info("Starting Letzshop vendor directory sync...")
|
||||
|
||||
sync_service = LetzshopVendorSyncService(db)
|
||||
|
||||
def progress_callback(page: int, fetched: int, total: int):
|
||||
"""Log progress during sync."""
|
||||
logger.info(f"Vendor sync progress: page {page}, {fetched}/{total} vendors")
|
||||
|
||||
stats = sync_service.sync_all_vendors(progress_callback=progress_callback)
|
||||
|
||||
logger.info(
|
||||
f"Vendor directory sync completed: "
|
||||
f"{stats.get('created', 0)} created, "
|
||||
f"{stats.get('updated', 0)} updated, "
|
||||
f"{stats.get('errors', 0)} errors"
|
||||
)
|
||||
|
||||
# Send admin notification if there were errors
|
||||
if stats.get("errors", 0) > 0:
|
||||
admin_notification_service.notify_system_info(
|
||||
db=db,
|
||||
title="Letzshop Vendor Sync Completed with Errors",
|
||||
message=(
|
||||
f"Synced {stats.get('total_fetched', 0)} vendors. "
|
||||
f"Errors: {stats.get('errors', 0)}"
|
||||
),
|
||||
details=stats,
|
||||
)
|
||||
db.commit()
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Vendor directory sync failed: {e}", exc_info=True)
|
||||
|
||||
# Notify admins of failure
|
||||
admin_notification_service.notify_critical_error(
|
||||
db=db,
|
||||
error_type="Vendor Directory Sync",
|
||||
error_message=f"Failed to sync Letzshop vendor directory: {str(e)[:200]}",
|
||||
details={"error": str(e)},
|
||||
)
|
||||
db.commit()
|
||||
raise # Re-raise for Celery retry
|
||||
__all__ = ["process_historical_import", "sync_vendor_directory"]
|
||||
|
||||
Reference in New Issue
Block a user