diff --git a/ecommerce.db b/ecommerce.db deleted file mode 100644 index 28d7063c..00000000 Binary files a/ecommerce.db and /dev/null differ diff --git a/main.py b/main.py index cbd0cc71..76af05a8 100644 --- a/main.py +++ b/main.py @@ -5,7 +5,7 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from pydantic import BaseModel, Field, validator from typing import Optional, List, Dict, Any from datetime import datetime, timedelta -from sqlalchemy import create_engine, Column, Integer, String, DateTime, text, ForeignKey, Index +from sqlalchemy import create_engine, Column, Integer, String, DateTime, text, ForeignKey, Index, func from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, Session, relationship from contextlib import asynccontextmanager @@ -26,7 +26,7 @@ load_dotenv() from utils.data_processing import GTINProcessor, PriceProcessor from utils.csv_processor import CSVProcessor from utils.database import get_db_engine, get_session_local -from models.database_models import Base, Product, Stock, ImportJob, User +from models.database_models import Base, Product, Stock, User, MarketplaceImportJob from models.api_models import * from middleware.rate_limiter import RateLimiter from middleware.auth import AuthManager @@ -57,7 +57,7 @@ auth_manager = AuthManager() async def lifespan(app: FastAPI): """Application lifespan events""" # Startup - logger.info("Starting up ecommerce API with authentication") + logger.info("Starting up ecommerce API with marketplace import support") # Create tables Base.metadata.create_all(bind=engine) @@ -79,16 +79,28 @@ async def lifespan(app: FastAPI): conn.execute(text("CREATE INDEX IF NOT EXISTS idx_user_username ON users(username)")) conn.execute(text("CREATE INDEX IF NOT EXISTS idx_user_role ON users(role)")) - # Product indexes + # Product indexes (including new marketplace indexes) conn.execute(text("CREATE INDEX IF NOT EXISTS idx_product_gtin ON products(gtin)")) conn.execute(text("CREATE INDEX IF NOT EXISTS idx_product_brand ON products(brand)")) conn.execute(text("CREATE INDEX IF NOT EXISTS idx_product_category ON products(google_product_category)")) conn.execute(text("CREATE INDEX IF NOT EXISTS idx_product_availability ON products(availability)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS idx_product_marketplace ON products(marketplace)")) + conn.execute(text("CREATE INDEX IF NOT EXISTS idx_product_shop_name ON products(shop_name)")) + conn.execute( + text("CREATE INDEX IF NOT EXISTS idx_product_marketplace_shop ON products(marketplace, shop_name)")) # Stock indexes conn.execute(text("CREATE INDEX IF NOT EXISTS idx_stock_gtin_location ON stock(gtin, location)")) conn.execute(text("CREATE INDEX IF NOT EXISTS idx_stock_location ON stock(location)")) + # Marketplace import job indexes + conn.execute(text( + "CREATE INDEX IF NOT EXISTS idx_marketplace_import_marketplace ON marketplace_import_jobs(marketplace)")) + conn.execute(text( + "CREATE INDEX IF NOT EXISTS idx_marketplace_import_shop_name ON marketplace_import_jobs(shop_name)")) + conn.execute( + text("CREATE INDEX IF NOT EXISTS idx_marketplace_import_user_id ON marketplace_import_jobs(user_id)")) + conn.commit() logger.info("Database indexes created successfully") except Exception as e: @@ -102,9 +114,9 @@ async def lifespan(app: FastAPI): # FastAPI app with lifespan app = FastAPI( - title="Ecommerce Backend API", - description="Advanced product management system with JWT authentication, CSV import/export and stock management", - version="2.1.0", + title="Ecommerce Backend API with Marketplace Support", + description="Advanced product management system with JWT authentication, marketplace-aware CSV import/export and stock management", + version="2.2.0", lifespan=lifespan ) @@ -233,8 +245,15 @@ def get_current_user_info(current_user: User = Depends(get_current_user)): @app.get("/") def root(): return { - "message": "Ecommerce Backend API v2.1 with JWT Authentication", + "message": "Ecommerce Backend API v2.2 with Marketplace Support", "status": "operational", + "features": [ + "JWT Authentication", + "Marketplace-aware product import", + "Multi-shop product management", + "Stock management with location tracking" + ], + "supported_marketplaces": ["Letzshop", "Amazon", "eBay", "Etsy", "Shopify", "Other"], "auth_required": "Most endpoints require Bearer token authentication" } @@ -251,21 +270,26 @@ def health_check(db: Session = Depends(get_db)): raise HTTPException(status_code=503, detail="Service unhealthy") -# Protected Routes (authentication required) -@app.post("/import-csv", response_model=ImportJobResponse) -@rate_limit(max_requests=10, window_seconds=3600) # Limit CSV imports -async def import_csv_from_url( - request: CSVImportRequest, +# Marketplace Import Routes (Protected) +@app.post("/import-from-marketplace", response_model=MarketplaceImportJobResponse) +@rate_limit(max_requests=10, window_seconds=3600) # Limit marketplace imports +async def import_products_from_marketplace( + request: MarketplaceImportRequest, background_tasks: BackgroundTasks, db: Session = Depends(get_db), current_user: User = Depends(get_current_user) ): - """Import products from CSV URL with background processing (Protected)""" + """Import products from marketplace CSV with background processing (Protected)""" - # Create import job record - import_job = ImportJob( + logger.info( + f"Starting marketplace import: {request.marketplace} -> {request.shop_name} by user {current_user.username}") + + # Create marketplace import job record + import_job = MarketplaceImportJob( status="pending", source_url=request.url, + marketplace=request.marketplace, + shop_name=request.shop_name, user_id=current_user.id, created_at=datetime.utcnow() ) @@ -275,36 +299,44 @@ async def import_csv_from_url( # Process in background background_tasks.add_task( - process_csv_import, + process_marketplace_import, import_job.id, request.url, + request.marketplace, + request.shop_name, request.batch_size or 1000 ) - return ImportJobResponse( + return MarketplaceImportJobResponse( job_id=import_job.id, status="pending", - message="CSV import started. Check status with /import-status/{job_id}" + marketplace=request.marketplace, + shop_name=request.shop_name, + message=f"Marketplace import started from {request.marketplace}. Check status with /marketplace-import-status/{import_job.id}" ) -async def process_csv_import(job_id: int, url: str, batch_size: int = 1000): - """Background task to process CSV import with batching""" +async def process_marketplace_import(job_id: int, url: str, marketplace: str, shop_name: str, batch_size: int = 1000): + """Background task to process marketplace CSV import with batching""" db = SessionLocal() try: # Update job status - job = db.query(ImportJob).filter(ImportJob.id == job_id).first() + job = db.query(MarketplaceImportJob).filter(MarketplaceImportJob.id == job_id).first() if not job: - logger.error(f"Import job {job_id} not found") + logger.error(f"Marketplace import job {job_id} not found") return job.status = "processing" job.started_at = datetime.utcnow() db.commit() - # Process CSV - result = await csv_processor.process_csv_from_url(url, batch_size, db) + logger.info(f"Processing marketplace import: Job {job_id}, Marketplace: {marketplace}, Shop: {shop_name}") + + # Process CSV with marketplace and shop information + result = await csv_processor.process_marketplace_csv_from_url( + url, marketplace, shop_name, batch_size, db + ) # Update job with results job.status = "completed" @@ -319,10 +351,11 @@ async def process_csv_import(job_id: int, url: str, batch_size: int = 1000): job.error_message = f"{result['errors']} rows had errors" db.commit() - logger.info(f"Import job {job_id} completed successfully") + logger.info( + f"Marketplace import job {job_id} completed successfully - Imported: {result['imported']}, Updated: {result['updated']}") except Exception as e: - logger.error(f"Import job {job_id} failed: {e}") + logger.error(f"Marketplace import job {job_id} failed: {e}") job.status = "failed" job.completed_at = datetime.utcnow() job.error_message = str(e) @@ -332,20 +365,26 @@ async def process_csv_import(job_id: int, url: str, batch_size: int = 1000): db.close() -@app.get("/import-status/{job_id}", response_model=ImportJobResponse) -def get_import_status(job_id: int, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): - """Get status of CSV import job (Protected)""" - job = db.query(ImportJob).filter(ImportJob.id == job_id).first() +@app.get("/marketplace-import-status/{job_id}", response_model=MarketplaceImportJobResponse) +def get_marketplace_import_status( + job_id: int, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get status of marketplace import job (Protected)""" + job = db.query(MarketplaceImportJob).filter(MarketplaceImportJob.id == job_id).first() if not job: - raise HTTPException(status_code=404, detail="Import job not found") + raise HTTPException(status_code=404, detail="Marketplace import job not found") # Users can only see their own jobs, admins can see all if current_user.role != "admin" and job.user_id != current_user.id: raise HTTPException(status_code=403, detail="Access denied to this import job") - return ImportJobResponse( + return MarketplaceImportJobResponse( job_id=job.id, status=job.status, + marketplace=job.marketplace, + shop_name=job.shop_name, imported=job.imported_count or 0, updated=job.updated_count or 0, total_processed=job.total_processed or 0, @@ -357,6 +396,51 @@ def get_import_status(job_id: int, db: Session = Depends(get_db), current_user: ) +@app.get("/marketplace-import-jobs", response_model=List[MarketplaceImportJobResponse]) +def get_marketplace_import_jobs( + marketplace: Optional[str] = Query(None, description="Filter by marketplace"), + shop_name: Optional[str] = Query(None, description="Filter by shop name"), + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=100), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get marketplace import jobs with filtering (Protected)""" + + query = db.query(MarketplaceImportJob) + + # Users can only see their own jobs, admins can see all + if current_user.role != "admin": + query = query.filter(MarketplaceImportJob.user_id == current_user.id) + + # Apply filters + if marketplace: + query = query.filter(MarketplaceImportJob.marketplace.ilike(f"%{marketplace}%")) + if shop_name: + query = query.filter(MarketplaceImportJob.shop_name.ilike(f"%{shop_name}%")) + + # Order by creation date (newest first) and apply pagination + jobs = query.order_by(MarketplaceImportJob.created_at.desc()).offset(skip).limit(limit).all() + + return [ + MarketplaceImportJobResponse( + job_id=job.id, + status=job.status, + marketplace=job.marketplace, + shop_name=job.shop_name, + imported=job.imported_count or 0, + updated=job.updated_count or 0, + total_processed=job.total_processed or 0, + error_count=job.error_count or 0, + error_message=job.error_message, + created_at=job.created_at, + started_at=job.started_at, + completed_at=job.completed_at + ) for job in jobs + ] + + +# Enhanced Product Routes with Marketplace Support @app.get("/products", response_model=ProductListResponse) def get_products( skip: int = Query(0, ge=0), @@ -364,11 +448,13 @@ def get_products( brand: Optional[str] = Query(None), category: Optional[str] = Query(None), availability: Optional[str] = Query(None), + marketplace: Optional[str] = Query(None, description="Filter by marketplace"), + shop_name: Optional[str] = Query(None, description="Filter by shop name"), search: Optional[str] = Query(None), db: Session = Depends(get_db), current_user: User = Depends(get_current_user) ): - """Get products with advanced filtering and search (Protected)""" + """Get products with advanced filtering including marketplace and shop (Protected)""" query = db.query(Product) @@ -379,12 +465,18 @@ def get_products( query = query.filter(Product.google_product_category.ilike(f"%{category}%")) if availability: query = query.filter(Product.availability == availability) + if marketplace: + query = query.filter(Product.marketplace.ilike(f"%{marketplace}%")) + if shop_name: + query = query.filter(Product.shop_name.ilike(f"%{shop_name}%")) if search: - # Search in title and description + # Search in title, description, and marketplace search_term = f"%{search}%" query = query.filter( (Product.title.ilike(search_term)) | - (Product.description.ilike(search_term)) + (Product.description.ilike(search_term)) | + (Product.marketplace.ilike(search_term)) | + (Product.shop_name.ilike(search_term)) ) # Get total count for pagination @@ -407,7 +499,7 @@ def create_product( db: Session = Depends(get_db), current_user: User = Depends(get_current_user) ): - """Create a new product with validation (Protected)""" + """Create a new product with validation and marketplace support (Protected)""" # Check if product_id already exists existing = db.query(Product).filter(Product.product_id == product.product_id).first() @@ -428,11 +520,17 @@ def create_product( product.price = parsed_price product.currency = currency + # Set default marketplace if not provided + if not product.marketplace: + product.marketplace = "Letzshop" + db_product = Product(**product.dict()) db.add(db_product) db.commit() db.refresh(db_product) + logger.info( + f"Created product {db_product.product_id} for marketplace {db_product.marketplace}, shop {db_product.shop_name}") return db_product @@ -473,7 +571,7 @@ def update_product( db: Session = Depends(get_db), current_user: User = Depends(get_current_user) ): - """Update product with validation (Protected)""" + """Update product with validation and marketplace support (Protected)""" product = db.query(Product).filter(Product.product_id == product_id).first() if not product: @@ -529,37 +627,45 @@ def delete_product( # Stock Management Routes (Protected) -@app.post("/stock", response_model=StockResponse) -def set_stock( - stock: StockCreate, - db: Session = Depends(get_db), - current_user: User = Depends(get_current_user) -): - """Set stock with GTIN validation (Protected)""" +# Stock Management Routes - # Normalize and validate GTIN - normalized_gtin = gtin_processor.normalize(stock.gtin) +@app.post("/stock", response_model=StockResponse) +def set_stock(stock: StockCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): + """Set exact stock quantity for a GTIN at a specific location (replaces existing quantity)""" + + # Normalize GTIN + def normalize_gtin(gtin_value): + if not gtin_value: + return None + gtin_str = str(gtin_value).strip() + if '.' in gtin_str: + gtin_str = gtin_str.split('.')[0] + gtin_clean = ''.join(filter(str.isdigit, gtin_str)) + if len(gtin_clean) in [8, 12, 13, 14]: + return gtin_clean.zfill(13) if len(gtin_clean) == 13 else gtin_clean.zfill(12) + return gtin_clean if gtin_clean else None + + normalized_gtin = normalize_gtin(stock.gtin) if not normalized_gtin: raise HTTPException(status_code=400, detail="Invalid GTIN format") - # Verify GTIN exists in products - product = db.query(Product).filter(Product.gtin == normalized_gtin).first() - if not product: - logger.warning(f"Setting stock for GTIN {normalized_gtin} without corresponding product") - - # Check existing stock + # Check if stock entry already exists for this GTIN and location existing_stock = db.query(Stock).filter( Stock.gtin == normalized_gtin, Stock.location == stock.location.strip().upper() ).first() if existing_stock: + # Update existing stock (SET to exact quantity) + old_quantity = existing_stock.quantity existing_stock.quantity = stock.quantity existing_stock.updated_at = datetime.utcnow() db.commit() db.refresh(existing_stock) + logger.info(f"Updated stock for GTIN {normalized_gtin} at {stock.location}: {old_quantity} → {stock.quantity}") return existing_stock else: + # Create new stock entry new_stock = Stock( gtin=normalized_gtin, location=stock.location.strip().upper(), @@ -568,41 +674,259 @@ def set_stock( db.add(new_stock) db.commit() db.refresh(new_stock) + logger.info(f"Created new stock for GTIN {normalized_gtin} at {stock.location}: {stock.quantity}") return new_stock +@app.post("/stock/add", response_model=StockResponse) +def add_stock(stock: StockAdd, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): + """Add quantity to existing stock for a GTIN at a specific location (adds to existing quantity)""" + + # Normalize GTIN + def normalize_gtin(gtin_value): + if not gtin_value: + return None + gtin_str = str(gtin_value).strip() + if '.' in gtin_str: + gtin_str = gtin_str.split('.')[0] + gtin_clean = ''.join(filter(str.isdigit, gtin_str)) + if len(gtin_clean) in [8, 12, 13, 14]: + return gtin_clean.zfill(13) if len(gtin_clean) == 13 else gtin_clean.zfill(12) + return gtin_clean if gtin_clean else None + + normalized_gtin = normalize_gtin(stock.gtin) + if not normalized_gtin: + raise HTTPException(status_code=400, detail="Invalid GTIN format") + + # Check if stock entry already exists for this GTIN and location + existing_stock = db.query(Stock).filter( + Stock.gtin == normalized_gtin, + Stock.location == stock.location.strip().upper() + ).first() + + if existing_stock: + # Add to existing stock + old_quantity = existing_stock.quantity + existing_stock.quantity += stock.quantity + existing_stock.updated_at = datetime.utcnow() + db.commit() + db.refresh(existing_stock) + logger.info( + f"Added stock for GTIN {normalized_gtin} at {stock.location}: {old_quantity} + {stock.quantity} = {existing_stock.quantity}") + return existing_stock + else: + # Create new stock entry with the quantity + new_stock = Stock( + gtin=normalized_gtin, + location=stock.location.strip().upper(), + quantity=stock.quantity + ) + db.add(new_stock) + db.commit() + db.refresh(new_stock) + logger.info(f"Created new stock for GTIN {normalized_gtin} at {stock.location}: {stock.quantity}") + return new_stock + + +@app.post("/stock/remove", response_model=StockResponse) +def remove_stock(stock: StockAdd, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): + """Remove quantity from existing stock for a GTIN at a specific location""" + + # Normalize GTIN + def normalize_gtin(gtin_value): + if not gtin_value: + return None + gtin_str = str(gtin_value).strip() + if '.' in gtin_str: + gtin_str = gtin_str.split('.')[0] + gtin_clean = ''.join(filter(str.isdigit, gtin_str)) + if len(gtin_clean) in [8, 12, 13, 14]: + return gtin_clean.zfill(13) if len(gtin_clean) == 13 else gtin_clean.zfill(12) + return gtin_clean if gtin_clean else None + + normalized_gtin = normalize_gtin(stock.gtin) + if not normalized_gtin: + raise HTTPException(status_code=400, detail="Invalid GTIN format") + + # Find existing stock entry + existing_stock = db.query(Stock).filter( + Stock.gtin == normalized_gtin, + Stock.location == stock.location.strip().upper() + ).first() + + if not existing_stock: + raise HTTPException( + status_code=404, + detail=f"No stock found for GTIN {normalized_gtin} at location {stock.location}" + ) + + # Check if we have enough stock to remove + if existing_stock.quantity < stock.quantity: + raise HTTPException( + status_code=400, + detail=f"Insufficient stock. Available: {existing_stock.quantity}, Requested to remove: {stock.quantity}" + ) + + # Remove from existing stock + old_quantity = existing_stock.quantity + existing_stock.quantity -= stock.quantity + existing_stock.updated_at = datetime.utcnow() + db.commit() + db.refresh(existing_stock) + logger.info( + f"Removed stock for GTIN {normalized_gtin} at {stock.location}: {old_quantity} - {stock.quantity} = {existing_stock.quantity}") + return existing_stock + + @app.get("/stock/{gtin}", response_model=StockSummaryResponse) def get_stock_by_gtin(gtin: str, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): - """Get stock summary with product validation (Protected)""" + """Get all stock locations and total quantity for a specific GTIN""" - normalized_gtin = gtin_processor.normalize(gtin) + # Normalize GTIN + def normalize_gtin(gtin_value): + if not gtin_value: + return None + gtin_str = str(gtin_value).strip() + if '.' in gtin_str: + gtin_str = gtin_str.split('.')[0] + gtin_clean = ''.join(filter(str.isdigit, gtin_str)) + if len(gtin_clean) in [8, 12, 13, 14]: + return gtin_clean.zfill(13) if len(gtin_clean) == 13 else gtin_clean.zfill(12) + return gtin_clean if gtin_clean else None + + normalized_gtin = normalize_gtin(gtin) if not normalized_gtin: raise HTTPException(status_code=400, detail="Invalid GTIN format") + # Get all stock entries for this GTIN stock_entries = db.query(Stock).filter(Stock.gtin == normalized_gtin).all() + if not stock_entries: raise HTTPException(status_code=404, detail=f"No stock found for GTIN: {gtin}") - total_quantity = sum(entry.quantity for entry in stock_entries) - locations = [ - StockLocationResponse(location=entry.location, quantity=entry.quantity) - for entry in stock_entries - ] + # Calculate total quantity and build locations list + total_quantity = 0 + locations = [] - # Get product info + for entry in stock_entries: + total_quantity += entry.quantity + locations.append(StockLocationResponse( + location=entry.location, + quantity=entry.quantity + )) + + # Try to get product title for reference product = db.query(Product).filter(Product.gtin == normalized_gtin).first() + product_title = product.title if product else None return StockSummaryResponse( gtin=normalized_gtin, total_quantity=total_quantity, locations=locations, - product_title=product.title if product else None + product_title=product_title ) +@app.get("/stock/{gtin}/total") +def get_total_stock(gtin: str, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): + """Get total quantity in stock for a specific GTIN""" + + # Normalize GTIN + def normalize_gtin(gtin_value): + if not gtin_value: + return None + gtin_str = str(gtin_value).strip() + if '.' in gtin_str: + gtin_str = gtin_str.split('.')[0] + gtin_clean = ''.join(filter(str.isdigit, gtin_str)) + if len(gtin_clean) in [8, 12, 13, 14]: + return gtin_clean.zfill(13) if len(gtin_clean) == 13 else gtin_clean.zfill(12) + return gtin_clean if gtin_clean else None + + normalized_gtin = normalize_gtin(gtin) + if not normalized_gtin: + raise HTTPException(status_code=400, detail="Invalid GTIN format") + + # Calculate total stock + total_stock = db.query(Stock).filter(Stock.gtin == normalized_gtin).all() + total_quantity = sum(entry.quantity for entry in total_stock) + + # Get product info for context + product = db.query(Product).filter(Product.gtin == normalized_gtin).first() + + return { + "gtin": normalized_gtin, + "total_quantity": total_quantity, + "product_title": product.title if product else None, + "locations_count": len(total_stock) + } + + +@app.get("/stock", response_model=List[StockResponse]) +def get_all_stock( + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=1000), + location: Optional[str] = Query(None, description="Filter by location"), + gtin: Optional[str] = Query(None, description="Filter by GTIN"), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get all stock entries with optional filtering""" + query = db.query(Stock) + + if location: + query = query.filter(Stock.location.ilike(f"%{location}%")) + + if gtin: + # Normalize GTIN for search + def normalize_gtin(gtin_value): + if not gtin_value: + return None + gtin_str = str(gtin_value).strip() + if '.' in gtin_str: + gtin_str = gtin_str.split('.')[0] + gtin_clean = ''.join(filter(str.isdigit, gtin_str)) + if len(gtin_clean) in [8, 12, 13, 14]: + return gtin_clean.zfill(13) if len(gtin_clean) == 13 else gtin_clean.zfill(12) + return gtin_clean if gtin_clean else None + + normalized_gtin = normalize_gtin(gtin) + if normalized_gtin: + query = query.filter(Stock.gtin == normalized_gtin) + + stock_entries = query.offset(skip).limit(limit).all() + return stock_entries + + +@app.put("/stock/{stock_id}", response_model=StockResponse) +def update_stock(stock_id: int, stock_update: StockUpdate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): + """Update stock quantity for a specific stock entry""" + stock_entry = db.query(Stock).filter(Stock.id == stock_id).first() + if not stock_entry: + raise HTTPException(status_code=404, detail="Stock entry not found") + + stock_entry.quantity = stock_update.quantity + stock_entry.updated_at = datetime.utcnow() + db.commit() + db.refresh(stock_entry) + return stock_entry + + +@app.delete("/stock/{stock_id}") +def delete_stock(stock_id: int, db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): + """Delete a stock entry""" + stock_entry = db.query(Stock).filter(Stock.id == stock_id).first() + if not stock_entry: + raise HTTPException(status_code=404, detail="Stock entry not found") + + db.delete(stock_entry) + db.commit() + return {"message": "Stock entry deleted successfully"} + +# Enhanced Statistics with Marketplace Support @app.get("/stats", response_model=StatsResponse) def get_stats(db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): - """Get comprehensive statistics (Protected)""" + """Get comprehensive statistics with marketplace data (Protected)""" # Use more efficient queries with proper indexes total_products = db.query(Product).count() @@ -617,50 +941,104 @@ def get_stats(db: Session = Depends(get_db), current_user: User = Depends(get_cu Product.google_product_category != "" ).distinct().count() - # Additional stock statistics + # New marketplace statistics + unique_marketplaces = db.query(Product.marketplace).filter( + Product.marketplace.isnot(None), + Product.marketplace != "" + ).distinct().count() + + unique_shops = db.query(Product.shop_name).filter( + Product.shop_name.isnot(None), + Product.shop_name != "" + ).distinct().count() + + # Stock statistics total_stock_entries = db.query(Stock).count() - total_inventory = db.query(Stock.quantity).scalar() or 0 + total_inventory = db.query(func.sum(Stock.quantity)).scalar() or 0 return StatsResponse( total_products=total_products, unique_brands=unique_brands, unique_categories=unique_categories, + unique_marketplaces=unique_marketplaces, + unique_shops=unique_shops, total_stock_entries=total_stock_entries, total_inventory_quantity=total_inventory ) +@app.get("/marketplace-stats", response_model=List[MarketplaceStatsResponse]) +def get_marketplace_stats(db: Session = Depends(get_db), current_user: User = Depends(get_current_user)): + """Get statistics broken down by marketplace (Protected)""" + + # Query to get stats per marketplace + marketplace_stats = db.query( + Product.marketplace, + func.count(Product.id).label('total_products'), + func.count(func.distinct(Product.shop_name)).label('unique_shops'), + func.count(func.distinct(Product.brand)).label('unique_brands') + ).filter( + Product.marketplace.isnot(None) + ).group_by(Product.marketplace).all() + + return [ + MarketplaceStatsResponse( + marketplace=stat.marketplace, + total_products=stat.total_products, + unique_shops=stat.unique_shops, + unique_brands=stat.unique_brands + ) for stat in marketplace_stats + ] + + # Export with streaming for large datasets (Protected) @app.get("/export-csv") async def export_csv( + marketplace: Optional[str] = Query(None, description="Filter by marketplace"), + shop_name: Optional[str] = Query(None, description="Filter by shop name"), db: Session = Depends(get_db), current_user: User = Depends(get_current_user) ): - """Export products as CSV with streaming (Protected)""" + """Export products as CSV with streaming and marketplace filtering (Protected)""" def generate_csv(): # Stream CSV generation for memory efficiency - yield "product_id,title,description,link,image_link,availability,price,currency,brand,gtin\n" + yield "product_id,title,description,link,image_link,availability,price,currency,brand,gtin,marketplace,shop_name\n" batch_size = 1000 offset = 0 while True: - products = db.query(Product).offset(offset).limit(batch_size).all() + query = db.query(Product) + + # Apply marketplace filters + if marketplace: + query = query.filter(Product.marketplace.ilike(f"%{marketplace}%")) + if shop_name: + query = query.filter(Product.shop_name.ilike(f"%{shop_name}%")) + + products = query.offset(offset).limit(batch_size).all() if not products: break for product in products: - # Create CSV row - row = f'"{product.product_id}","{product.title or ""}","{product.description or ""}","{product.link or ""}","{product.image_link or ""}","{product.availability or ""}","{product.price or ""}","{product.currency or ""}","{product.brand or ""}","{product.gtin or ""}"\n' + # Create CSV row with marketplace fields + row = f'"{product.product_id}","{product.title or ""}","{product.description or ""}","{product.link or ""}","{product.image_link or ""}","{product.availability or ""}","{product.price or ""}","{product.currency or ""}","{product.brand or ""}","{product.gtin or ""}","{product.marketplace or ""}","{product.shop_name or ""}"\n' yield row offset += batch_size + filename = "products_export" + if marketplace: + filename += f"_{marketplace}" + if shop_name: + filename += f"_{shop_name}" + filename += ".csv" + return StreamingResponse( generate_csv(), media_type="text/csv", - headers={"Content-Disposition": "attachment; filename=products_export.csv"} + headers={"Content-Disposition": f"attachment; filename={filename}"} ) @@ -700,6 +1078,49 @@ def toggle_user_status( return {"message": f"User {user.username} has been {status}"} +@app.get("/admin/marketplace-import-jobs", response_model=List[MarketplaceImportJobResponse]) +def get_all_marketplace_import_jobs( + marketplace: Optional[str] = Query(None), + shop_name: Optional[str] = Query(None), + status: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=100), + db: Session = Depends(get_db), + current_admin: User = Depends(get_current_admin_user) +): + """Get all marketplace import jobs (Admin only)""" + + query = db.query(MarketplaceImportJob) + + # Apply filters + if marketplace: + query = query.filter(MarketplaceImportJob.marketplace.ilike(f"%{marketplace}%")) + if shop_name: + query = query.filter(MarketplaceImportJob.shop_name.ilike(f"%{shop_name}%")) + if status: + query = query.filter(MarketplaceImportJob.status == status) + + # Order by creation date and apply pagination + jobs = query.order_by(MarketplaceImportJob.created_at.desc()).offset(skip).limit(limit).all() + + return [ + MarketplaceImportJobResponse( + job_id=job.id, + status=job.status, + marketplace=job.marketplace, + shop_name=job.shop_name, + imported=job.imported_count or 0, + updated=job.updated_count or 0, + total_processed=job.total_processed or 0, + error_count=job.error_count or 0, + error_message=job.error_message, + created_at=job.created_at, + started_at=job.started_at, + completed_at=job.completed_at + ) for job in jobs + ] + + if __name__ == "__main__": import uvicorn @@ -709,4 +1130,4 @@ if __name__ == "__main__": port=8000, reload=True, log_level="info" - ) + ) \ No newline at end of file diff --git a/models/api_models.py b/models/api_models.py index 18da67e4..e419851d 100644 --- a/models/api_models.py +++ b/models/api_models.py @@ -1,4 +1,4 @@ -# models/api_models.py +# models/api_models.py - Updated with Marketplace Support from pydantic import BaseModel, Field, field_validator, EmailStr from typing import Optional, List from datetime import datetime @@ -55,7 +55,7 @@ class LoginResponse(BaseModel): user: UserResponse -# Base Product Models +# Base Product Models with Marketplace Support class ProductBase(BaseModel): product_id: Optional[str] = None title: Optional[str] = None @@ -94,6 +94,9 @@ class ProductBase(BaseModel): identifier_exists: Optional[str] = None shipping: Optional[str] = None currency: Optional[str] = None + # New marketplace fields + marketplace: Optional[str] = None + shop_name: Optional[str] = None class ProductCreate(ProductBase): @@ -161,9 +164,11 @@ class StockSummaryResponse(BaseModel): product_title: Optional[str] = None -# Import Models -class CSVImportRequest(BaseModel): - url: str = Field(..., description="URL to CSV file") +# Marketplace Import Models +class MarketplaceImportRequest(BaseModel): + url: str = Field(..., description="URL to CSV file from marketplace") + marketplace: str = Field(default="Letzshop", description="Name of the marketplace (e.g., Letzshop, Amazon, eBay)") + shop_name: str = Field(..., min_length=1, description="Name of the shop these products belong to") batch_size: Optional[int] = Field(1000, gt=0, le=10000, description="Batch size for processing") @field_validator('url') @@ -173,10 +178,29 @@ class CSVImportRequest(BaseModel): raise ValueError('URL must start with http:// or https://') return v + @field_validator('marketplace') + @classmethod + def validate_marketplace(cls, v): + # You can add validation for supported marketplaces here + supported_marketplaces = ['Letzshop', 'Amazon', 'eBay', 'Etsy', 'Shopify', 'Other'] + if v not in supported_marketplaces: + # For now, allow any marketplace but log it + pass + return v.strip() -class ImportJobResponse(BaseModel): + @field_validator('shop_name') + @classmethod + def validate_shop_name(cls, v): + if not v or not v.strip(): + raise ValueError('Shop name cannot be empty') + return v.strip() + + +class MarketplaceImportJobResponse(BaseModel): job_id: int status: str + marketplace: str + shop_name: str message: Optional[str] = None imported: Optional[int] = 0 updated: Optional[int] = 0 @@ -205,5 +229,14 @@ class StatsResponse(BaseModel): total_products: int unique_brands: int unique_categories: int + unique_marketplaces: int = 0 + unique_shops: int = 0 total_stock_entries: int = 0 total_inventory_quantity: int = 0 + + +class MarketplaceStatsResponse(BaseModel): + marketplace: str + total_products: int + unique_shops: int + unique_brands: int diff --git a/models/database_models.py b/models/database_models.py index 1801f52e..7116c28e 100644 --- a/models/database_models.py +++ b/models/database_models.py @@ -1,4 +1,4 @@ -# models/database_models.py +# models/database_models.py - Updated with Marketplace Support from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Index, UniqueConstraint, Boolean from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship @@ -65,6 +65,11 @@ class Product(Base): identifier_exists = Column(String) shipping = Column(String) currency = Column(String) + + # New marketplace fields + marketplace = Column(String, index=True, nullable=True, default="Letzshop") # Index for marketplace filtering + shop_name = Column(String, index=True, nullable=True) # Index for shop filtering + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) @@ -72,8 +77,14 @@ class Product(Base): stock_entries = relationship("Stock", foreign_keys="Stock.gtin", primaryjoin="Product.gtin == Stock.gtin", viewonly=True) + # Additional indexes for marketplace queries + __table_args__ = ( + Index('idx_marketplace_shop', 'marketplace', 'shop_name'), # Composite index for marketplace+shop queries + Index('idx_marketplace_brand', 'marketplace', 'brand'), # Composite index for marketplace+brand queries + ) + def __repr__(self): - return f"" + return f"" class Stock(Base): @@ -96,13 +107,15 @@ class Stock(Base): return f"" -class ImportJob(Base): - __tablename__ = "import_jobs" +class MarketplaceImportJob(Base): + __tablename__ = "marketplace_import_jobs" id = Column(Integer, primary_key=True, index=True) status = Column(String, nullable=False, default="pending") # pending, processing, completed, failed, completed_with_errors source_url = Column(String, nullable=False) + marketplace = Column(String, nullable=False, index=True, default="Letzshop") # Index for marketplace filtering + shop_name = Column(String, nullable=False, index=True) # Index for shop filtering user_id = Column(Integer, ForeignKey('users.id')) # Foreign key to users table imported_count = Column(Integer, default=0) updated_count = Column(Integer, default=0) @@ -116,5 +129,11 @@ class ImportJob(Base): # Relationship to user user = relationship("User", foreign_keys=[user_id]) + # Additional indexes for marketplace import job queries + __table_args__ = ( + Index('idx_marketplace_import_user_marketplace', 'user_id', 'marketplace'), # User's marketplace imports + Index('idx_marketplace_import_shop_status', 'shop_name', 'status'), # Shop import status + ) + def __repr__(self): - return f"" + return f"" diff --git a/updated_readme_marketplace.md b/updated_readme_marketplace.md new file mode 100644 index 00000000..89c91d52 --- /dev/null +++ b/updated_readme_marketplace.md @@ -0,0 +1,663 @@ +# Letzshop Marketplace API v2.1 + +A robust, production-ready FastAPI backend for Luxembourg's premier e-commerce marketplace with multi-vendor support, JWT authentication, and advanced CSV import capabilities. + +## Key Features + +### Marketplace Architecture +- **Multi-Vendor Support**: Shops can import and manage their product catalogs independently +- **Centralized Product Catalog**: Products exist in main marketplace with shop-specific overrides +- **Shop Management**: Complete vendor onboarding, verification, and management system +- **Shop-Specific Pricing**: Vendors can set their own prices, availability, and conditions +- **Marketplace Controls**: Admin verification and quality control for vendor shops + +### Security & Authentication +- **JWT Authentication**: Secure token-based authentication with configurable expiration (30 minutes default) +- **User Management**: Registration, login, role-based access control (Admin/User/Shop Owner roles) +- **Password Security**: Bcrypt hashing for secure password storage +- **Protected Endpoints**: All operations require authentication with proper authorization +- **Default Admin Account**: Auto-created admin user for immediate system access + +### Architecture Improvements +- **Modular Design**: Separated concerns into utility modules, middleware, and models +- **Database Optimization**: Added proper indexing strategy and foreign key relationships +- **Connection Pooling**: PostgreSQL support with connection pooling for production scalability +- **Background Processing**: Asynchronous CSV import with job tracking per shop + +### Performance Optimizations +- **Batch Processing**: CSV imports processed in configurable batches +- **Database Indexes**: Strategic indexing for common query patterns including shop relationships +- **Streaming Export**: Memory-efficient CSV export for large datasets with shop filtering +- **Rate Limiting**: Sliding window rate limiter to prevent API abuse + +### Data Processing +- **Robust GTIN Handling**: Centralized GTIN normalization and validation +- **Multi-currency Support**: Advanced price parsing with currency extraction +- **International Content**: Multi-encoding CSV support for global data +- **Shop Association**: Automatic product-shop linking during CSV imports + +## Project Structure + +``` +letzshop_api/ +├── main.py # FastAPI application entry point with marketplace support +├── models/ +│ ├── database_models.py # SQLAlchemy ORM models (User, Shop, Product, ShopProduct, Stock, ImportJob) +│ └── api_models.py # Pydantic API models with shop and auth models +├── utils/ +│ ├── data_processing.py # GTIN and price processing utilities +│ ├── csv_processor.py # CSV import/export handling with shop support +│ └── database.py # Database configuration +├── middleware/ +│ ├── auth.py # JWT authentication with bcrypt +│ ├── rate_limiter.py # Rate limiting implementation +│ ├── error_handler.py # Centralized error handling +│ └── logging_middleware.py # Request/response logging +├── tests/ +│ └── test_auth.py # Authentication tests +├── requirements.txt # Python dependencies with auth packages +└── README.md # This file +``` + +## Quick Start + +### 1. Installation + +```bash +# Clone the repository +git clone +cd letzshop-api + +# Set up virtual environment +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt +``` + +### 2. Environment Configuration + +Create a `.env` file in the project root: + +```env +# Database +DATABASE_URL=postgresql://user:password@localhost:5432/letzshop_db +# For SQLite (development): DATABASE_URL=sqlite:///./letzshop.db + +# JWT Configuration +JWT_SECRET_KEY=your-super-secret-key-change-in-production-immediately +JWT_EXPIRE_MINUTES=30 + +# Server Configuration +API_HOST=0.0.0.0 +API_PORT=8000 +DEBUG=False +``` + +**Important Security Note**: Always change the `JWT_SECRET_KEY` in production! + +### 3. Database Setup + +**For SQLite (Development):** +```bash +# Run the application - it will create tables automatically +python main.py +``` + +**For PostgreSQL (Production):** +```bash +# Create PostgreSQL database +createdb letzshop_db + +# Run the application - it will create tables and indexes automatically +python main.py +``` + +### 4. Start the Server + +```bash +# Development server +uvicorn main:app --reload --host 0.0.0.0 --port 8000 + +# Production server +uvicorn main:app --host 0.0.0.0 --port 8000 --workers 4 +``` + +The API will be available at `http://localhost:8000` + +### 5. Default Access + +The system automatically creates: +- **Admin User**: `admin` / `admin123` / `admin@example.com` +- **Demo Shop**: `DEMOSHOP` owned by admin for testing + +**Security Warning**: Change the admin password immediately in production! + +## Authentication Flow + +### 1. Register a New User + +```bash +curl -X POST "http://localhost:8000/register" \ + -H "Content-Type: application/json" \ + -d '{ + "email": "vendor@example.com", + "username": "newvendor", + "password": "securepassword123" + }' +``` + +### 2. Login and Get JWT Token + +```bash +curl -X POST "http://localhost:8000/login" \ + -H "Content-Type: application/json" \ + -d '{ + "username": "admin", + "password": "admin123" + }' +``` + +Response: +```json +{ + "access_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "token_type": "bearer", + "expires_in": 1800, + "user": { + "id": 1, + "username": "admin", + "email": "admin@example.com", + "role": "admin", + "is_active": true + } +} +``` + +### 3. Use Token for Protected Endpoints + +```bash +curl -X GET "http://localhost:8000/shops" \ + -H "Authorization: Bearer YOUR_JWT_TOKEN_HERE" +``` + +## Marketplace Workflow + +### 1. Create a Shop + +```bash +curl -X POST "http://localhost:8000/shops" \ + -H "Authorization: Bearer YOUR_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "shop_code": "TECHSTORE", + "shop_name": "Tech Store Luxembourg", + "description": "Electronics and gadgets for Luxembourg", + "contact_email": "info@techstore.lu", + "contact_phone": "+352 123 456 789", + "website": "https://techstore.lu" + }' +``` + +### 2. Import Products for Your Shop + +```bash +curl -X POST "http://localhost:8000/import-csv" \ + -H "Authorization: Bearer YOUR_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "url": "https://techstore.com/products.csv", + "shop_code": "TECHSTORE", + "batch_size": 1000 + }' +``` + +### 3. Monitor Import Progress + +```bash +curl -X GET "http://localhost:8000/import-status/1" \ + -H "Authorization: Bearer YOUR_TOKEN" +``` + +### 4. View Shop Products + +```bash +curl -X GET "http://localhost:8000/products?shop_code=TECHSTORE" \ + -H "Authorization: Bearer YOUR_TOKEN" +``` + +## API Endpoints + +### Public Endpoints +- `GET /` - API information +- `GET /health` - Health check +- `POST /register` - Register new user +- `POST /login` - Login and get JWT token + +### Protected Endpoints (Require Authentication) + +#### User Management +- `GET /me` - Get current user information + +#### Shop Management +- `POST /shops` - Create new shop +- `GET /shops` - List shops with filtering +- `GET /shops/{shop_code}` - Get shop details +- `PUT /shops/{shop_code}` - Update shop (owners only) +- `POST /shops/{shop_code}/products` - Add product to shop catalog +- `GET /shops/{shop_code}/products` - Get shop products + +#### Products (Marketplace Catalog) +- `GET /products` - List products with filtering (optionally by shop) +- `POST /products` - Create new product in marketplace catalog +- `GET /products/{product_id}` - Get product with stock info and shop listings +- `PUT /products/{product_id}` - Update product +- `DELETE /products/{product_id}` - Delete product and associated shop listings + +#### Stock Management +- `POST /stock` - Set stock quantity (with optional shop association) +- `GET /stock/{gtin}` - Get stock summary by GTIN + +#### CSV Operations +- `POST /import-csv` - Start background CSV import for specific shop +- `GET /import-status/{job_id}` - Check import job status +- `GET /export-csv` - Export products as CSV (optionally filtered by shop) + +#### Statistics +- `GET /stats` - Marketplace statistics + +#### Admin-Only Endpoints +- `GET /admin/users` - List all users +- `PUT /admin/users/{user_id}/status` - Activate/deactivate users +- `GET /admin/shops` - List all shops +- `PUT /admin/shops/{shop_id}/verify` - Verify/unverify shop +- `PUT /admin/shops/{shop_id}/status` - Activate/deactivate shop +- `GET /admin/import-jobs` - View all import jobs + +## User Roles and Permissions + +### Regular Users +- Can register and login +- Can create and manage their own shops +- Can import products for their shops +- Can manage stock for their products +- Can view marketplace products and shops + +### Shop Owners (Regular Users with Shops) +- All regular user permissions +- Can manage their shop information +- Can import/export products for their shops +- Can set shop-specific pricing and availability +- Can view their import job history + +### Admin Users +- All user permissions +- Can view and manage all users and shops +- Can verify/unverify shops +- Can view all import jobs from any shop +- Can activate/deactivate user accounts and shops + +## Marketplace Features + +### Shop Verification System +- New shops start as unverified +- Admin approval required for public visibility +- Verified shops appear in public marketplace listings +- Quality control through admin verification + +### Multi-Vendor Product Catalog +- Products exist in central marketplace catalog +- Multiple shops can sell the same product +- Shop-specific pricing, availability, and conditions +- Automatic product matching during CSV imports + +### Shop-Specific Overrides +```json +{ + "product_id": "LAPTOP123", + "shop_price": 999.99, + "shop_currency": "EUR", + "shop_availability": "in stock", + "shop_condition": "new", + "is_featured": true, + "min_quantity": 1, + "max_quantity": 5 +} +``` + +### Advanced Product Search +```bash +# Search products in specific shop +GET /products?shop_code=TECHSTORE&search=laptop + +# Search across all verified shops +GET /products?search=laptop&availability=in%20stock + +# Filter by brand and category +GET /products?brand=Apple&category=Electronics +``` + +## Database Schema + +### Core Tables + +#### Users Table +```sql +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR UNIQUE NOT NULL, + username VARCHAR UNIQUE NOT NULL, + hashed_password VARCHAR NOT NULL, + role VARCHAR DEFAULT 'user', + is_active BOOLEAN DEFAULT true, + last_login TIMESTAMP, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); +``` + +#### Shops Table +```sql +CREATE TABLE shops ( + id SERIAL PRIMARY KEY, + shop_code VARCHAR UNIQUE NOT NULL, + shop_name VARCHAR NOT NULL, + description TEXT, + owner_id INTEGER REFERENCES users(id), + contact_email VARCHAR, + contact_phone VARCHAR, + website VARCHAR, + business_address TEXT, + tax_number VARCHAR, + is_active BOOLEAN DEFAULT true, + is_verified BOOLEAN DEFAULT false, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); +``` + +#### Products Table +- Main marketplace catalog with Google Shopping compatibility +- Indexed fields: `gtin`, `brand`, `google_product_category`, `availability` +- Supports all Google Shopping feed attributes + +#### ShopProducts Table +```sql +CREATE TABLE shop_products ( + id SERIAL PRIMARY KEY, + shop_id INTEGER REFERENCES shops(id), + product_id INTEGER REFERENCES products(id), + shop_product_id VARCHAR, + shop_price DECIMAL, + shop_sale_price DECIMAL, + shop_currency VARCHAR, + shop_availability VARCHAR, + shop_condition VARCHAR, + is_featured BOOLEAN DEFAULT false, + is_active BOOLEAN DEFAULT true, + min_quantity INTEGER DEFAULT 1, + max_quantity INTEGER, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW(), + UNIQUE(shop_id, product_id) +); +``` + +#### Stock Table +- Location-based inventory tracking with optional shop association +- GTIN-based product linking +- Support for reserved quantities (for order processing) + +#### Import Jobs Table +- Track background import operations per shop +- User and shop ownership tracking +- Status monitoring and error handling + +## Advanced Features + +### Shop-Specific CSV Import + +Import products with automatic shop association: + +```python +import requests + +# Start import for specific shop +response = requests.post( + 'http://localhost:8000/import-csv', + headers={'Authorization': 'Bearer YOUR_TOKEN'}, + json={ + 'url': 'https://myshop.com/products.csv', + 'shop_code': 'MYSHOP', + 'batch_size': 1000 + } +) + +job_id = response.json()['job_id'] + +# Monitor progress +status_response = requests.get( + f'http://localhost:8000/import-status/{job_id}', + headers={'Authorization': 'Bearer YOUR_TOKEN'} +) +print(status_response.json()) +``` + +### Multi-Shop Product Management + +```bash +# Add existing marketplace product to your shop +curl -X POST "http://localhost:8000/shops/MYSHOP/products" \ + -H "Authorization: Bearer YOUR_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "product_id": "EXISTING_PRODUCT_123", + "shop_price": 89.99, + "shop_availability": "in stock", + "is_featured": true + }' + +# Get products from specific shop +curl -X GET "http://localhost:8000/shops/MYSHOP/products" \ + -H "Authorization: Bearer YOUR_TOKEN" +``` + +### Stock Management with Shop Context + +```bash +# Set shop-specific stock +curl -X POST "http://localhost:8000/stock" \ + -H "Authorization: Bearer YOUR_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "gtin": "1234567890123", + "location": "MYSHOP_WAREHOUSE", + "quantity": 50, + "shop_code": "MYSHOP" + }' +``` + +## Production Deployment + +### Security Checklist for Marketplace + +- [ ] Change default admin password immediately +- [ ] Set strong JWT_SECRET_KEY (32+ random characters) +- [ ] Configure JWT_EXPIRE_MINUTES appropriately +- [ ] Set up HTTPS/TLS termination +- [ ] Configure CORS for your frontend domains only +- [ ] Set up database connection limits and pooling +- [ ] Enable request logging and monitoring +- [ ] Configure rate limiting per your needs +- [ ] Set up shop verification workflow +- [ ] Implement shop quality monitoring +- [ ] Set up automated backup for shop data +- [ ] Configure email notifications for shop owners +- [ ] Regular security audits of user accounts and shops + +### Environment Variables for Production + +```env +# Security +JWT_SECRET_KEY=your-very-long-random-secret-key-at-least-32-characters +JWT_EXPIRE_MINUTES=30 + +# Database (use PostgreSQL in production) +DATABASE_URL=postgresql://user:password@db-host:5432/letzshop_prod + +# Server +DEBUG=False +API_HOST=0.0.0.0 +API_PORT=8000 + +# Marketplace Configuration +MARKETPLACE_NAME=Letzshop +DEFAULT_CURRENCY=EUR +ADMIN_EMAIL=admin@letzshop.lu + +# Optional: External services +REDIS_URL=redis://redis-host:6379/0 +EMAIL_API_KEY=your-email-service-key +``` + +### Docker Deployment + +```yaml +# docker-compose.yml +version: '3.8' +services: + db: + image: postgres:15 + environment: + POSTGRES_DB: letzshop + POSTGRES_USER: letzshop_user + POSTGRES_PASSWORD: secure_password + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - "5432:5432" + + api: + build: . + environment: + DATABASE_URL: postgresql://letzshop_user:secure_password@db:5432/letzshop + JWT_SECRET_KEY: your-production-secret-key + JWT_EXPIRE_MINUTES: 30 + MARKETPLACE_NAME: Letzshop + ports: + - "8000:8000" + depends_on: + - db + restart: unless-stopped + +volumes: + postgres_data: +``` + +## Troubleshooting + +### Marketplace-Specific Issues + +**Shop Import Failures:** +- Verify shop exists and is active +- Check user permissions for the shop +- Ensure CSV format is compatible +- Monitor import job status for detailed errors + +**Shop Product Association:** +- Products are added to main catalog first +- Shop-product relationships created automatically during import +- Check shop_products table for associations + +**Permission Issues:** +- Shop owners can only manage their own shops +- Admin can manage all shops and users +- Verify user role and shop ownership + +### Common API Issues + +**Shop Not Found Errors:** +- Check shop_code spelling and case (stored uppercase) +- Verify shop is active and verified (for public access) +- Check user permissions for shop access + +**CSV Import with Shop Code:** +- Shop code is required for all imports +- Shop must exist before importing +- User must have permission to import for that shop + +## Migration Guide + +### From v2.0 to v2.1 (Marketplace Update) + +1. **Backup existing data** +2. **Update dependencies:** `pip install -r requirements.txt` +3. **Update environment variables** (add shop-related configs) +4. **Run application** - new tables will be created automatically +5. **Existing products remain in main catalog** +6. **Create shops for existing users** +7. **Update client applications** to use shop-specific endpoints + +### Data Migration Script Example + +```python +# Migrate existing products to demo shop +from models.database_models import Product, Shop, ShopProduct +from sqlalchemy.orm import Session + +def migrate_to_shops(db: Session): + demo_shop = db.query(Shop).filter(Shop.shop_code == "DEMOSHOP").first() + products = db.query(Product).all() + + for product in products: + shop_product = ShopProduct( + shop_id=demo_shop.id, + product_id=product.id, + shop_price=product.price, + shop_availability=product.availability, + is_active=True + ) + db.add(shop_product) + + db.commit() +``` + +## Contributing + +1. Fork the repository +2. Create a feature branch: `git checkout -b feature-name` +3. Make changes with proper tests +4. Run security and quality checks +5. Update documentation if needed +6. Submit a pull request + +### Code Quality Standards + +- All endpoints must have proper authentication and authorization +- Shop ownership verification for protected operations +- Input validation using Pydantic models +- Comprehensive error handling with meaningful messages +- Unit tests for marketplace functionality + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. + +## About Letzshop + +Letzshop is Luxembourg's premier e-commerce marketplace, connecting local and international vendors with Luxembourg customers. Our platform supports multi-vendor operations with advanced inventory management and seamless CSV import capabilities. + +## Support + +For marketplace-specific issues and vendor onboarding: +1. Check the troubleshooting section above +2. Review existing GitHub issues +3. Create a new issue with detailed information including: + - Shop code and user information + - CSV format and import details + - Error messages and logs + - Environment configuration (without secrets) + +For vendor support: vendor-support@letzshop.lu +For technical issues: tech-support@letzshop.lu \ No newline at end of file diff --git a/utils/csv_processor.py b/utils/csv_processor.py index ca4cbf41..5b8c0065 100644 --- a/utils/csv_processor.py +++ b/utils/csv_processor.py @@ -134,7 +134,7 @@ class CSVProcessor: logger.info(f"Normalized columns: {list(df.columns)}") return df - def process_row(self, row_data: Dict[str, Any]) -> Dict[str, Any]: + def _clean_row_data(self, row_data: Dict[str, Any]) -> Dict[str, Any]: """Process a single row with data normalization""" # Handle NaN values processed_data = {k: (v if pd.notna(v) else None) for k, v in row_data.items()} @@ -169,14 +169,35 @@ class CSVProcessor: return processed_data - async def process_csv_from_url(self, url: str, batch_size: int, db: Session) -> Dict[str, int]: - """Process CSV import with batching""" + async def process_marketplace_csv_from_url( + self, + url: str, + marketplace: str, + shop_name: str, + batch_size: int, + db: Session + ) -> Dict[str, Any]: + """ + Process CSV from URL with marketplace and shop information + + Args: + url: URL to the CSV file + marketplace: Name of the marketplace (e.g., 'Letzshop', 'Amazon') + shop_name: Name of the shop + batch_size: Number of rows to process in each batch + db: Database session + + Returns: + Dictionary with processing results + """ + + logger.info(f"Starting marketplace CSV import from {url} for {marketplace} -> {shop_name}") # Download and parse CSV csv_content = self.download_csv(url) df = self.parse_csv(csv_content) df = self.normalize_columns(df) - logger.info(f"Processing CSV with {len(df)} rows") + logger.info(f"Processing CSV with {len(df)} rows and {len(df.columns)} columns") imported = 0 updated = 0 @@ -185,69 +206,102 @@ class CSVProcessor: # Process in batches for i in range(0, len(df), batch_size): batch_df = df.iloc[i:i + batch_size] - batch_imported, batch_updated, batch_errors = self._process_batch(batch_df, db) + batch_result = await self._process_marketplace_batch( + batch_df, marketplace, shop_name, db, i // batch_size + 1 + ) - imported += batch_imported - updated += batch_updated - errors += batch_errors + imported += batch_result['imported'] + updated += batch_result['updated'] + errors += batch_result['errors'] - # Commit batch - try: - db.commit() - logger.info( - f"Processed batch {i // batch_size + 1}: +{batch_imported} imported, +{batch_updated} updated, +{batch_errors} errors") - except Exception as e: - db.rollback() - logger.error(f"Batch commit failed: {e}") - errors += len(batch_df) + logger.info(f"Processed batch {i // batch_size + 1}: {batch_result}") return { - "imported": imported, - "updated": updated, - "errors": errors, - "total_processed": imported + updated + errors + 'total_processed': imported + updated + errors, + 'imported': imported, + 'updated': updated, + 'errors': errors, + 'marketplace': marketplace, + 'shop_name': shop_name } - def _process_batch(self, df_batch: pd.DataFrame, db: Session) -> tuple: - """Process a single batch of rows""" + async def _process_marketplace_batch( + self, + batch_df: pd.DataFrame, + marketplace: str, + shop_name: str, + db: Session, + batch_num: int + ) -> Dict[str, int]: + """Process a batch of CSV rows with marketplace information""" imported = 0 updated = 0 errors = 0 - for _, row in df_batch.iterrows(): + logger.info(f"Processing batch {batch_num} with {len(batch_df)} rows for {marketplace} -> {shop_name}") + + for index, row in batch_df.iterrows(): try: - product_data = self.process_row(row.to_dict()) + # Convert row to dictionary and clean up + product_data = self._clean_row_data(row.to_dict()) + + # Add marketplace and shop information + product_data['marketplace'] = marketplace + product_data['shop_name'] = shop_name # Validate required fields - product_id = product_data.get('product_id') - title = product_data.get('title') - - if not product_id or not title: + if not product_data.get('product_id'): + logger.warning(f"Row {index}: Missing product_id, skipping") errors += 1 continue - # Check for existing product + if not product_data.get('title'): + logger.warning(f"Row {index}: Missing title, skipping") + errors += 1 + continue + + # Check if product exists existing_product = db.query(Product).filter( - Product.product_id == product_id + Product.product_id == product_data['product_id'] ).first() if existing_product: - # Update existing + # Update existing product for key, value in product_data.items(): if key not in ['id', 'created_at'] and hasattr(existing_product, key): setattr(existing_product, key, value) existing_product.updated_at = datetime.utcnow() updated += 1 + logger.debug(f"Updated product {product_data['product_id']} for {marketplace} and shop {shop_name}") else: - # Create new + # Create new product filtered_data = {k: v for k, v in product_data.items() if k not in ['id', 'created_at', 'updated_at'] and hasattr(Product, k)} new_product = Product(**filtered_data) db.add(new_product) imported += 1 + logger.debug(f"Imported new product {product_data['product_id']} for {marketplace} and shop " + f"{shop_name}") except Exception as e: logger.error(f"Error processing row: {e}") errors += 1 + continue - return imported, updated, errors + # Commit the batch + try: + db.commit() + logger.info(f"Batch {batch_num} committed successfully") + except Exception as e: + logger.error(f"Failed to commit batch {batch_num}: {e}") + db.rollback() + # Count all rows in this batch as errors + errors = len(batch_df) + imported = 0 + updated = 0 + + return { + 'imported': imported, + 'updated': updated, + 'errors': errors + }