# tests/performance/test_api_performance.py import time import pytest from models.database.product import Product @pytest.mark.performance @pytest.mark.slow @pytest.mark.database class TestPerformance: def test_product_list_performance(self, client, auth_headers, db): """Test performance of product listing with many products""" # Create multiple products products = [] for i in range(100): product = Product( product_id=f"PERF{i:03d}", title=f"Performance Test Product {i}", price=f"{i}.99", marketplace="Performance", ) products.append(product) db.add_all(products) db.commit() # Time the request start_time = time.time() response = client.get("/api/v1/product?limit=100", headers=auth_headers) end_time = time.time() assert response.status_code == 200 assert len(response.json()["products"]) == 100 assert end_time - start_time < 2.0 # Should complete within 2 seconds def test_search_performance(self, client, auth_headers, db): """Test search performance""" # Create products with searchable content products = [] for i in range(50): product = Product( product_id=f"SEARCH{i:03d}", title=f"Searchable Product {i}", description=f"This is a searchable product number {i}", brand="SearchBrand", marketplace="SearchMarket", ) products.append(product) db.add_all(products) db.commit() # Time search request start_time = time.time() response = client.get("/api/v1/product?search=Searchable", headers=auth_headers) end_time = time.time() assert response.status_code == 200 assert response.json()["total"] == 50 assert end_time - start_time < 1.0 # Search should be fast def test_database_query_performance(self, client, auth_headers, db): """Test database query performance with complex filters""" # Create products with various attributes for filtering products = [] brands = ["Brand1", "Brand2", "Brand3"] marketplaces = ["Market1", "Market2"] for i in range(200): product = Product( product_id=f"COMPLEX{i:03d}", title=f"Complex Product {i}", brand=brands[i % 3], marketplace=marketplaces[i % 2], price=f"{10 + (i % 50)}.99", google_product_category=f"Category{i % 5}", ) products.append(product) db.add_all(products) db.commit() # Test complex filtering performance start_time = time.time() response = client.get( "/api/v1/product?brand=Brand1&marketplace=Market1&limit=50", headers=auth_headers, ) end_time = time.time() assert response.status_code == 200 assert ( end_time - start_time < 1.5 ) # Complex query should still be reasonably fast def test_pagination_performance_large_dataset(self, client, auth_headers, db): """Test pagination performance with large dataset""" # Create a large dataset products = [] for i in range(500): product = Product( product_id=f"LARGE{i:04d}", title=f"Large Dataset Product {i}", marketplace="LargeTest", ) products.append(product) db.add_all(products) db.commit() # Test pagination performance at different offsets offsets = [0, 100, 250, 400] for offset in offsets: start_time = time.time() response = client.get( f"/api/v1/product?skip={offset}&limit=20", headers=auth_headers ) end_time = time.time() assert response.status_code == 200 assert len(response.json()["products"]) == 20 assert ( end_time - start_time < 1.0 ) # Pagination should be fast regardless of offset