MIGRATION: - Delete app/api/v1/vendor/analytics.py (duplicate - analytics module already auto-discovered) - Move usage routes from app/api/v1/vendor/usage.py to billing module - Move onboarding routes from app/api/v1/vendor/onboarding.py to marketplace module - Move features routes to billing module (admin + vendor) - Move inventory routes to inventory module (admin + vendor) - Move marketplace/letzshop routes to marketplace module - Move orders routes to orders module - Delete legacy letzshop service files (moved to marketplace module) DOCUMENTATION: - Add docs/development/migration/module-autodiscovery-migration.md with full migration history - Update docs/architecture/module-system.md with Entity Auto-Discovery Reference section - Add detailed sections for each entity type: routes, services, models, schemas, tasks, exceptions, templates, static files, locales, configuration ARCHITECTURE VALIDATION: - Add MOD-016: Routes must be in modules, not app/api/v1/ - Add MOD-017: Services must be in modules, not app/services/ - Add MOD-018: Tasks must be in modules, not app/tasks/ - Add MOD-019: Schemas must be in modules, not models/schema/ - Update scripts/validate_architecture.py with _validate_legacy_locations method - Update .architecture-rules/module.yaml with legacy location rules These rules enforce that all entities must be in self-contained modules. Legacy locations now trigger ERROR severity violations. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
345 lines
13 KiB
Python
345 lines
13 KiB
Python
# app/tasks/letzshop_tasks.py
|
|
"""Background tasks for Letzshop integration."""
|
|
|
|
import logging
|
|
from datetime import UTC, datetime
|
|
from typing import Any, Callable
|
|
|
|
from app.core.database import SessionLocal
|
|
from app.services.admin_notification_service import admin_notification_service
|
|
from app.modules.marketplace.services.letzshop import (
|
|
LetzshopClientError,
|
|
LetzshopCredentialsService,
|
|
LetzshopOrderService,
|
|
LetzshopVendorSyncService,
|
|
)
|
|
from app.modules.marketplace.models import LetzshopHistoricalImportJob
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def _get_credentials_service(db) -> LetzshopCredentialsService:
|
|
"""Create a credentials service instance."""
|
|
return LetzshopCredentialsService(db)
|
|
|
|
|
|
def _get_order_service(db) -> LetzshopOrderService:
|
|
"""Create an order service instance."""
|
|
return LetzshopOrderService(db)
|
|
|
|
|
|
def process_historical_import(job_id: int, vendor_id: int):
|
|
"""
|
|
Background task for historical order import with progress tracking.
|
|
|
|
Imports both confirmed and declined orders from Letzshop API,
|
|
updating job progress in the database for frontend polling.
|
|
|
|
Args:
|
|
job_id: ID of the LetzshopHistoricalImportJob record
|
|
vendor_id: ID of the vendor to import orders for
|
|
"""
|
|
db = SessionLocal()
|
|
job = None
|
|
|
|
try:
|
|
# Get the import job
|
|
job = (
|
|
db.query(LetzshopHistoricalImportJob)
|
|
.filter(LetzshopHistoricalImportJob.id == job_id)
|
|
.first()
|
|
)
|
|
if not job:
|
|
logger.error(f"Historical import job {job_id} not found")
|
|
return
|
|
|
|
# Mark as started
|
|
job.status = "fetching"
|
|
job.started_at = datetime.now(UTC)
|
|
db.commit()
|
|
|
|
creds_service = _get_credentials_service(db)
|
|
order_service = _get_order_service(db)
|
|
|
|
# Create progress callback for fetching
|
|
def fetch_progress_callback(page: int, total_fetched: int):
|
|
"""Update fetch progress in database."""
|
|
job.current_page = page
|
|
job.shipments_fetched = total_fetched
|
|
db.commit()
|
|
|
|
# Create progress callback for processing
|
|
def create_processing_callback(
|
|
phase: str,
|
|
) -> Callable[[int, int, int, int], None]:
|
|
"""Create a processing progress callback for a phase."""
|
|
|
|
def callback(processed: int, imported: int, updated: int, skipped: int):
|
|
job.orders_processed = processed
|
|
job.orders_imported = imported
|
|
job.orders_updated = updated
|
|
job.orders_skipped = skipped
|
|
db.commit()
|
|
|
|
return callback
|
|
|
|
with creds_service.create_client(vendor_id) as client:
|
|
# ================================================================
|
|
# Phase 1: Import confirmed orders
|
|
# ================================================================
|
|
job.current_phase = "confirmed"
|
|
job.current_page = 0
|
|
job.shipments_fetched = 0
|
|
db.commit()
|
|
|
|
logger.info(f"Job {job_id}: Fetching confirmed shipments for vendor {vendor_id}")
|
|
|
|
confirmed_shipments = client.get_all_shipments_paginated(
|
|
state="confirmed",
|
|
page_size=50,
|
|
progress_callback=fetch_progress_callback,
|
|
)
|
|
|
|
logger.info(f"Job {job_id}: Fetched {len(confirmed_shipments)} confirmed shipments")
|
|
|
|
# Process confirmed shipments
|
|
job.status = "processing"
|
|
job.orders_processed = 0
|
|
job.orders_imported = 0
|
|
job.orders_updated = 0
|
|
job.orders_skipped = 0
|
|
db.commit()
|
|
|
|
confirmed_stats = order_service.import_historical_shipments(
|
|
vendor_id=vendor_id,
|
|
shipments=confirmed_shipments,
|
|
match_products=True,
|
|
progress_callback=create_processing_callback("confirmed"),
|
|
)
|
|
|
|
# Store confirmed stats
|
|
job.confirmed_stats = {
|
|
"total": confirmed_stats["total"],
|
|
"imported": confirmed_stats["imported"],
|
|
"updated": confirmed_stats["updated"],
|
|
"skipped": confirmed_stats["skipped"],
|
|
"products_matched": confirmed_stats["products_matched"],
|
|
"products_not_found": confirmed_stats["products_not_found"],
|
|
}
|
|
job.products_matched = confirmed_stats["products_matched"]
|
|
job.products_not_found = confirmed_stats["products_not_found"]
|
|
db.commit()
|
|
|
|
logger.info(
|
|
f"Job {job_id}: Confirmed phase complete - "
|
|
f"imported={confirmed_stats['imported']}, "
|
|
f"updated={confirmed_stats['updated']}, "
|
|
f"skipped={confirmed_stats['skipped']}"
|
|
)
|
|
|
|
# ================================================================
|
|
# Phase 2: Import unconfirmed (pending) orders
|
|
# Note: Letzshop API has no "declined" state. Declined items
|
|
# are tracked at the inventory unit level, not shipment level.
|
|
# Valid states: unconfirmed, confirmed, completed, accepted
|
|
# ================================================================
|
|
job.current_phase = "unconfirmed"
|
|
job.status = "fetching"
|
|
job.current_page = 0
|
|
job.shipments_fetched = 0
|
|
db.commit()
|
|
|
|
logger.info(f"Job {job_id}: Fetching unconfirmed shipments for vendor {vendor_id}")
|
|
|
|
unconfirmed_shipments = client.get_all_shipments_paginated(
|
|
state="unconfirmed",
|
|
page_size=50,
|
|
progress_callback=fetch_progress_callback,
|
|
)
|
|
|
|
logger.info(f"Job {job_id}: Fetched {len(unconfirmed_shipments)} unconfirmed shipments")
|
|
|
|
# Process unconfirmed shipments
|
|
job.status = "processing"
|
|
job.orders_processed = 0
|
|
db.commit()
|
|
|
|
unconfirmed_stats = order_service.import_historical_shipments(
|
|
vendor_id=vendor_id,
|
|
shipments=unconfirmed_shipments,
|
|
match_products=True,
|
|
progress_callback=create_processing_callback("unconfirmed"),
|
|
)
|
|
|
|
# Store unconfirmed stats (in declined_stats field for compatibility)
|
|
job.declined_stats = {
|
|
"total": unconfirmed_stats["total"],
|
|
"imported": unconfirmed_stats["imported"],
|
|
"updated": unconfirmed_stats["updated"],
|
|
"skipped": unconfirmed_stats["skipped"],
|
|
"products_matched": unconfirmed_stats["products_matched"],
|
|
"products_not_found": unconfirmed_stats["products_not_found"],
|
|
}
|
|
|
|
# Add to cumulative product matching stats
|
|
job.products_matched += unconfirmed_stats["products_matched"]
|
|
job.products_not_found += unconfirmed_stats["products_not_found"]
|
|
|
|
logger.info(
|
|
f"Job {job_id}: Unconfirmed phase complete - "
|
|
f"imported={unconfirmed_stats['imported']}, "
|
|
f"updated={unconfirmed_stats['updated']}, "
|
|
f"skipped={unconfirmed_stats['skipped']}"
|
|
)
|
|
|
|
# ================================================================
|
|
# Complete
|
|
# ================================================================
|
|
job.status = "completed"
|
|
job.completed_at = datetime.now(UTC)
|
|
db.commit()
|
|
|
|
# Update credentials sync status
|
|
creds_service.update_sync_status(vendor_id, "success", None)
|
|
|
|
logger.info(f"Job {job_id}: Historical import completed successfully")
|
|
|
|
except LetzshopClientError as e:
|
|
logger.error(f"Job {job_id}: Letzshop API error: {e}")
|
|
if job is not None:
|
|
try:
|
|
job.status = "failed"
|
|
job.error_message = f"Letzshop API error: {e}"
|
|
job.completed_at = datetime.now(UTC)
|
|
|
|
# Get vendor name for notification
|
|
order_service = _get_order_service(db)
|
|
vendor = order_service.get_vendor(vendor_id)
|
|
vendor_name = vendor.name if vendor else f"Vendor {vendor_id}"
|
|
|
|
# Create admin notification for sync failure
|
|
admin_notification_service.notify_order_sync_failure(
|
|
db=db,
|
|
vendor_name=vendor_name,
|
|
error_message=f"Historical import failed: {str(e)[:150]}",
|
|
vendor_id=vendor_id,
|
|
)
|
|
|
|
db.commit()
|
|
|
|
creds_service = _get_credentials_service(db)
|
|
creds_service.update_sync_status(vendor_id, "failed", str(e))
|
|
except Exception as commit_error:
|
|
logger.error(f"Job {job_id}: Failed to update job status: {commit_error}")
|
|
db.rollback()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Job {job_id}: Unexpected error: {e}", exc_info=True)
|
|
if job is not None:
|
|
try:
|
|
job.status = "failed"
|
|
job.error_message = str(e)
|
|
job.completed_at = datetime.now(UTC)
|
|
|
|
# Get vendor name for notification
|
|
order_service = _get_order_service(db)
|
|
vendor = order_service.get_vendor(vendor_id)
|
|
vendor_name = vendor.name if vendor else f"Vendor {vendor_id}"
|
|
|
|
# Create admin notification for critical error
|
|
admin_notification_service.notify_critical_error(
|
|
db=db,
|
|
error_type="Historical Import",
|
|
error_message=f"Import job {job_id} failed for {vendor_name}: {str(e)[:150]}",
|
|
details={"job_id": job_id, "vendor_id": vendor_id, "vendor_name": vendor_name},
|
|
)
|
|
|
|
db.commit()
|
|
except Exception as commit_error:
|
|
logger.error(f"Job {job_id}: Failed to update job status: {commit_error}")
|
|
db.rollback()
|
|
|
|
finally:
|
|
if hasattr(db, "close") and callable(db.close):
|
|
try:
|
|
db.close()
|
|
except Exception as close_error:
|
|
logger.error(f"Job {job_id}: Error closing database session: {close_error}")
|
|
|
|
|
|
# =============================================================================
|
|
# Vendor Directory Sync
|
|
# =============================================================================
|
|
|
|
|
|
def sync_letzshop_vendor_directory() -> dict[str, Any]:
|
|
"""
|
|
Sync Letzshop vendor directory to local cache.
|
|
|
|
This task fetches all vendors from Letzshop's public GraphQL API
|
|
and updates the local letzshop_vendor_cache table.
|
|
|
|
Should be run periodically (e.g., daily) via Celery beat.
|
|
|
|
Returns:
|
|
Dictionary with sync statistics.
|
|
"""
|
|
db = SessionLocal()
|
|
stats = {}
|
|
|
|
try:
|
|
logger.info("Starting Letzshop vendor directory sync task...")
|
|
|
|
sync_service = LetzshopVendorSyncService(db)
|
|
|
|
def progress_callback(page: int, fetched: int, total: int):
|
|
"""Log progress during sync."""
|
|
logger.info(f"Vendor sync progress: page {page}, {fetched}/{total} vendors")
|
|
|
|
stats = sync_service.sync_all_vendors(progress_callback=progress_callback)
|
|
|
|
logger.info(
|
|
f"Vendor directory sync completed: "
|
|
f"{stats.get('created', 0)} created, "
|
|
f"{stats.get('updated', 0)} updated, "
|
|
f"{stats.get('errors', 0)} errors"
|
|
)
|
|
|
|
# Send admin notification if there were errors
|
|
if stats.get("errors", 0) > 0:
|
|
admin_notification_service.notify_system_info(
|
|
db=db,
|
|
title="Letzshop Vendor Sync Completed with Errors",
|
|
message=(
|
|
f"Synced {stats.get('total_fetched', 0)} vendors. "
|
|
f"Errors: {stats.get('errors', 0)}"
|
|
),
|
|
details=stats,
|
|
)
|
|
|
|
return stats
|
|
|
|
except Exception as e:
|
|
logger.error(f"Vendor directory sync failed: {e}", exc_info=True)
|
|
|
|
# Notify admins of failure
|
|
try:
|
|
admin_notification_service.notify_critical_error(
|
|
db=db,
|
|
error_type="Vendor Directory Sync",
|
|
error_message=f"Failed to sync Letzshop vendor directory: {str(e)[:200]}",
|
|
details={"error": str(e)},
|
|
)
|
|
db.commit()
|
|
except Exception:
|
|
pass
|
|
|
|
raise
|
|
|
|
finally:
|
|
if hasattr(db, "close") and callable(db.close):
|
|
try:
|
|
db.close()
|
|
except Exception as close_error:
|
|
logger.error(f"Error closing database session: {close_error}")
|