Compare commits
393 Commits
7feacd5af8
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| b27d4ba6ff | |||
| 6da48f88c1 | |||
| 516141b41d | |||
| 4f70290af5 | |||
| 3fa159ff2a | |||
| 143248ff0f | |||
| 56c94ac2f4 | |||
| 255ac6525e | |||
| 10e37e749b | |||
| f23990a4d9 | |||
| 62b83b46a4 | |||
| f8b2429533 | |||
| 3883927be0 | |||
| 39e02f0d9b | |||
| 29593f4c61 | |||
| 220f7e3a08 | |||
| 258aa6a34b | |||
| 51bcc9f874 | |||
| eafa086c73 | |||
| ab2daf99bd | |||
| 1cf9fea40a | |||
| cd4f83f2cb | |||
| 457350908a | |||
| e759282116 | |||
| 1df1b2bfca | |||
| 51a2114e02 | |||
| 21e4ac5124 | |||
| 3ade1b9354 | |||
| b5bb9415f6 | |||
| bb3d6f0012 | |||
| c92fe1261b | |||
| ca152cd544 | |||
| 914967edcc | |||
| 64fe58c171 | |||
| 3044490a3e | |||
| adc36246b8 | |||
| dd9dc04328 | |||
| 4a60d75a13 | |||
| e98eddc168 | |||
| 8cd09f3f89 | |||
| 4c1608f78a | |||
| 24219e4d9a | |||
| fde58bea06 | |||
| 52b78ce346 | |||
| f804ff8442 | |||
| d9abb275a5 | |||
| 4b56eb7ab1 | |||
| 27ac7f3e28 | |||
| dfd42c1b10 | |||
| 297b8a8d5a | |||
| 91fb4b0757 | |||
| f4386e97ee | |||
| e8c9fc7e7d | |||
| d591200df8 | |||
| 83af32eb88 | |||
| 2a49e3d30f | |||
| 6e40e16017 | |||
| dd09bcaeec | |||
| 013eafd775 | |||
| 07cd66a0e3 | |||
| 73d453d78a | |||
| d4e9fed719 | |||
| 3e93f64c6b | |||
| 377d2d3ae8 | |||
| b51f9e8e30 | |||
| d380437594 | |||
| cff0af31be | |||
| e492e5f71c | |||
| 9a5b7dd061 | |||
| b3051b423a | |||
| bc951a36d9 | |||
| 2e043260eb | |||
| 1828ac85eb | |||
| 50a4fc38a7 | |||
| 30f3dae5a3 | |||
| 4c750f0268 | |||
| 59b0d8977a | |||
| 2bc03ed97c | |||
| 91963f3b87 | |||
| 3ae0b579d3 | |||
| 972ee1e5d0 | |||
| 70f2803dd3 | |||
| a247622d23 | |||
| 50d50fcbd0 | |||
| b306a5e8f4 | |||
| 28b08580c8 | |||
| 754bfca87d | |||
| 1decb4572c | |||
| d685341b04 | |||
| 0c6d8409c7 | |||
| f81851445e | |||
| 4748368809 | |||
| f310363f7c | |||
| 95f0eac079 | |||
| 11dcfdad73 | |||
| 01f7add8dd | |||
| 0d1007282a | |||
| 2a15c14ee8 | |||
| bc5e227d81 | |||
| 8a70259445 | |||
| 823935c016 | |||
| dab5560de8 | |||
| 157b4c6ec3 | |||
| 211c46ebbc | |||
| d81e9a3fa4 | |||
| fd0de714a4 | |||
| c6b155520c | |||
| 66b77e747d | |||
| 71b5eb1758 | |||
| b4f01210d9 | |||
| 9bceeaac9c | |||
| 332960de30 | |||
| 0455e63a2e | |||
| aaed1b2d01 | |||
| 9dee534b2f | |||
| beef3ce76b | |||
| 884a694718 | |||
| 4cafbe9610 | |||
| 19923ed26b | |||
| 46f8d227b8 | |||
| 95e4956216 | |||
| 77e520bbce | |||
| 518bace534 | |||
| fcde2d68fc | |||
| 5a33f68743 | |||
| 040cbd1962 | |||
| b679c9687d | |||
| 314360a394 | |||
| 44a0c38016 | |||
| da9e1ab293 | |||
| 5de297a804 | |||
| 4429674100 | |||
| 316ec42566 | |||
| 894832c62b | |||
| 1d90bfe044 | |||
| ce0caa5685 | |||
| 33f823aba0 | |||
| edd55cd2fd | |||
| f3344b2859 | |||
| 1107de989b | |||
| a423bcf03e | |||
| 661547f6cf | |||
| 3015a490f9 | |||
| 5b4ed79f87 | |||
| 52a5f941fe | |||
| 6161d69ba2 | |||
| f41f72b86f | |||
| 644bf158cd | |||
| f89c0382f0 | |||
| 11b8e31a29 | |||
| 0ddef13124 | |||
| 60bed05d3f | |||
| 40da2d6b11 | |||
| d96e0ea1b4 | |||
| 7d652716bb | |||
| b6047f5b7d | |||
| 366d4b9765 | |||
| 540205402f | |||
| 07fab01f6a | |||
| 6c07f6cbb2 | |||
| bc7431943a | |||
| adec17cd02 | |||
| a28d5d1de5 | |||
| 502473eee4 | |||
| 183f55c7b3 | |||
| 169a774b9c | |||
| ebbe6d62b8 | |||
| c2c0e3c740 | |||
| 4a1f71a312 | |||
| 5dd5e01dc6 | |||
| 694a1cd1a5 | |||
| 826ef2ddd2 | |||
| a1cc05cd3d | |||
| 19d267587b | |||
| 9a13aee8ed | |||
| 9c39a9703f | |||
| 395707951e | |||
| 34bf961309 | |||
| 44acf5e442 | |||
| b3224ba13d | |||
| 93b7279c3a | |||
| 29d942322d | |||
| 8c8975239a | |||
| f766a72480 | |||
| 618376aa39 | |||
| efca9734d2 | |||
| 6acd783754 | |||
| 8cf5da6914 | |||
| eee33d6a1b | |||
| aefca3115e | |||
| 319900623a | |||
| a77a8a3a98 | |||
| f141cc4e6a | |||
| 2287f4597d | |||
| 8136739233 | |||
| 2ca313c3c7 | |||
| 27802e47c2 | |||
| 14d5ff97f3 | |||
| b9b8ffadcb | |||
| 31ced5f759 | |||
| 802cc6b137 | |||
| 45260b6b82 | |||
| fa758b7e31 | |||
| a099bfdc48 | |||
| cb9a829684 | |||
| c4e9e4e646 | |||
| 8c449d7baa | |||
| 820ab1aaa4 | |||
| 2268f32f51 | |||
| b68d542258 | |||
| a7392de9f6 | |||
| 3c7e4458af | |||
| 8b147f53c6 | |||
| 784bcb9d23 | |||
| b8aa484653 | |||
| 05c53e1865 | |||
| 6dec1e3ca6 | |||
| f631283286 | |||
| f631322b4e | |||
| e61e02fb39 | |||
| b5b73559b5 | |||
| 28dca65a06 | |||
| adbecd360b | |||
| ef9ea29643 | |||
| f8a2394da5 | |||
| 4d07418f44 | |||
| bf64f82613 | |||
| 9684747d08 | |||
| 2078ce35b2 | |||
| 22ae63b414 | |||
| 78ee05f50e | |||
| 6d6eba75bf | |||
| a709adaee8 | |||
| 8d5c8a52e6 | |||
| d8f0cf16c7 | |||
| 93a2d9baff | |||
| 35d1559162 | |||
| ce822af883 | |||
| 4ebd419987 | |||
| 2b29867093 | |||
| 30c4593e0f | |||
| 8c0967e215 | |||
| 86e85a98b8 | |||
| e3a52f6536 | |||
| 4aa6f76e46 | |||
| f95db7c0b1 | |||
| 2b55e7458b | |||
| c82210795f | |||
| cb3bc3c118 | |||
| 962862ccc1 | |||
| 3053bc5d92 | |||
| 79a88b0a36 | |||
| e7f8e61717 | |||
| d480b59df4 | |||
| ce5b54f27b | |||
| 6a82d7c12d | |||
| f1e7baaa6c | |||
| 6b46a78e72 | |||
| d648c921b7 | |||
| 3df75e2e78 | |||
| 92a434530f | |||
| 01146d5c97 | |||
| d0d5aadaf7 | |||
| 56afb9192b | |||
| a4519035df | |||
| c9b2ecbdff | |||
| 1194731f33 | |||
| 12c1c3c511 | |||
| 81cf84ed28 | |||
| a6e6d9be8e | |||
| ec888f2e94 | |||
| 53dfe018c2 | |||
| 3de69e55a1 | |||
| cfce6c0ca4 | |||
| 2833ff1476 | |||
| f47c680cb8 | |||
| 32e4aa6564 | |||
| 6c78827c7f | |||
| 0389294b1a | |||
| cd935988c4 | |||
| 05d31a7fc5 | |||
| 272b62fbd3 | |||
| 32acc76b49 | |||
| d36783a7f1 | |||
| 2fc157d7b2 | |||
| 506171503d | |||
| be248222bc | |||
| 716a4e3d15 | |||
| 467b1510f4 | |||
| 5c8fbd21c7 | |||
| 1f3042547b | |||
| d7a383f3d7 | |||
| b77952bf89 | |||
| ff852f1ab3 | |||
| 42b894094a | |||
| b9ac252a9f | |||
| 51e512ec08 | |||
| f517a7ccd7 | |||
| c47a394a7b | |||
| 1eef69f300 | |||
| 1dcb0e6c33 | |||
| ef21d47533 | |||
| 6c5969e4e1 | |||
| 6a739bf670 | |||
| ffa12f0255 | |||
| 93731b7173 | |||
| e5dbd7ef1a | |||
| 167bb50f4f | |||
| 182610283d | |||
| e23788cb7d | |||
| 573b0180ad | |||
| d9fc52d47a | |||
| a8b29750a5 | |||
| 2c710ad416 | |||
| 682213fdee | |||
| 3d1586f025 | |||
| 64082ca877 | |||
| 67260e9322 | |||
| 44568893fd | |||
| 10fdf91dfa | |||
| 8ee8c398ce | |||
| 3a7cf29386 | |||
| eaab47f2f8 | |||
| 6458ab13d7 | |||
| 0b701fb847 | |||
| f67510b706 | |||
| 8c715cfde3 | |||
| 4bce16fb73 | |||
| 1cb659e3a5 | |||
| 3ec58c1524 | |||
| b382090771 | |||
| 5474fc5301 | |||
| cd596b85b3 | |||
| eedc463207 | |||
| 677e5211f9 | |||
| 10aa75aa69 | |||
| aad18c27ab | |||
| b0db8133a0 | |||
| 1b8a40f1ff | |||
| f84c5d903e | |||
| ef7187b508 | |||
| 488d5a6f0e | |||
| 3c2b559282 | |||
| 62e418c473 | |||
| 688896d856 | |||
| cf08e1a6c8 | |||
| ba130d4171 | |||
| e9253fbd84 | |||
| 34ee7bb7ad | |||
| 481deaa67d | |||
| 11f1909f68 | |||
| 9154eec871 | |||
| b0a40200c1 | |||
| 8dcc4145aa | |||
| 77b76afb3f | |||
| 8968e7d9cd | |||
| 531487f5c9 | |||
| 9c27fa02b0 | |||
| 7c43d6f4a2 | |||
| 9173448645 | |||
| 874e254c11 | |||
| 8abcea154b | |||
| c3bb496a98 | |||
| 779de02f97 | |||
| af3f04a23f | |||
| c58ceb9872 | |||
| 363eb74d22 | |||
| 011a4df2d4 | |||
| 79c985ee39 | |||
| 3a264c0a39 | |||
| 1b24269ef1 | |||
| 9c4f6064b2 | |||
| f20266167d | |||
| e3428cc4aa | |||
| 7852d09dcc | |||
| 0acfa75c8e | |||
| b265d0db51 | |||
| bf5bb69409 | |||
| d9060ed6ea | |||
| 6af9458ad4 | |||
| b9a998fb43 | |||
| ad8f1c9008 | |||
| bfb9b3c119 | |||
| 0437af67ec | |||
| ecb5309879 | |||
| 1da03e41f9 | |||
| d1fe3584ff | |||
| 0b37274140 | |||
| 0984ff7d17 | |||
| c914e10cb8 | |||
| 7a9dda282d | |||
| d201221fb1 | |||
| 68493dc6cb |
@@ -24,7 +24,9 @@ api_endpoint_rules:
|
|||||||
SCHEMA LOCATION: All response schemas must be defined in models/schema/*.py,
|
SCHEMA LOCATION: All response schemas must be defined in models/schema/*.py,
|
||||||
never inline in endpoint files. This ensures schemas are reusable and discoverable.
|
never inline in endpoint files. This ensures schemas are reusable and discoverable.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "return dict"
|
- "return dict"
|
||||||
- "-> dict"
|
- "-> dict"
|
||||||
@@ -82,7 +84,9 @@ api_endpoint_rules:
|
|||||||
# In app/api/v1/admin/my_feature.py
|
# In app/api/v1/admin/my_feature.py
|
||||||
from models.schema.my_feature import MyRequest
|
from models.schema.my_feature import MyRequest
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "from pydantic import"
|
- "from pydantic import"
|
||||||
- "from pydantic.main import"
|
- "from pydantic.main import"
|
||||||
@@ -118,7 +122,9 @@ api_endpoint_rules:
|
|||||||
- db.query() - complex queries are business logic
|
- db.query() - complex queries are business logic
|
||||||
- db.delete() - deleting entities is business logic
|
- db.delete() - deleting entities is business logic
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "db.add("
|
- "db.add("
|
||||||
- "db.delete("
|
- "db.delete("
|
||||||
@@ -139,7 +145,7 @@ api_endpoint_rules:
|
|||||||
- Dependencies (app/api/deps.py) - authentication/authorization validation
|
- Dependencies (app/api/deps.py) - authentication/authorization validation
|
||||||
- Services (app/services/) - business logic validation
|
- Services (app/services/) - business logic validation
|
||||||
|
|
||||||
The global exception handler catches all WizamartException subclasses and
|
The global exception handler catches all OrionException subclasses and
|
||||||
converts them to appropriate HTTP responses.
|
converts them to appropriate HTTP responses.
|
||||||
|
|
||||||
WRONG (endpoint raises exception):
|
WRONG (endpoint raises exception):
|
||||||
@@ -155,7 +161,9 @@ api_endpoint_rules:
|
|||||||
# Dependency guarantees token_vendor_id is present
|
# Dependency guarantees token_vendor_id is present
|
||||||
return order_service.get_orders(db, current_user.token_vendor_id)
|
return order_service.get_orders(db, current_user.token_vendor_id)
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "raise HTTPException"
|
- "raise HTTPException"
|
||||||
- "raise InvalidTokenException"
|
- "raise InvalidTokenException"
|
||||||
@@ -184,7 +192,9 @@ api_endpoint_rules:
|
|||||||
def stripe_webhook(request: Request):
|
def stripe_webhook(request: Request):
|
||||||
...
|
...
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
required_if_not_public:
|
required_if_not_public:
|
||||||
- "Depends(get_current_"
|
- "Depends(get_current_"
|
||||||
auto_exclude_files:
|
auto_exclude_files:
|
||||||
@@ -197,11 +207,14 @@ api_endpoint_rules:
|
|||||||
name: "Multi-tenant endpoints must scope queries to vendor_id"
|
name: "Multi-tenant endpoints must scope queries to vendor_id"
|
||||||
severity: "error"
|
severity: "error"
|
||||||
description: |
|
description: |
|
||||||
All queries in vendor/shop contexts must filter by vendor_id.
|
All queries in vendor/storefront contexts must filter by vendor_id.
|
||||||
Use request.state.vendor_id from middleware.
|
Use request.state.vendor_id from middleware.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/vendor/**/*.py"
|
file_pattern:
|
||||||
file_pattern: "app/api/v1/storefront/**/*.py"
|
- "app/api/v1/vendor/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/store*.py"
|
||||||
|
- "app/api/v1/storefront/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/storefront*.py"
|
||||||
discouraged_patterns:
|
discouraged_patterns:
|
||||||
- "db.query(.*).all()"
|
- "db.query(.*).all()"
|
||||||
|
|
||||||
@@ -248,7 +261,9 @@ api_endpoint_rules:
|
|||||||
- from models.database.*
|
- from models.database.*
|
||||||
- from app.modules.*.models.*
|
- from app.modules.*.models.*
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "from models\\.database\\."
|
- "from models\\.database\\."
|
||||||
- "from app\\.modules\\.[a-z_]+\\.models\\."
|
- "from app\\.modules\\.[a-z_]+\\.models\\."
|
||||||
|
|||||||
@@ -9,7 +9,9 @@ auth_rules:
|
|||||||
description: |
|
description: |
|
||||||
Authentication must use JWT tokens in Authorization: Bearer header
|
Authentication must use JWT tokens in Authorization: Bearer header
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
enforcement: "middleware"
|
enforcement: "middleware"
|
||||||
|
|
||||||
- id: "AUTH-002"
|
- id: "AUTH-002"
|
||||||
@@ -18,7 +20,9 @@ auth_rules:
|
|||||||
description: |
|
description: |
|
||||||
Use Depends(get_current_admin/vendor/customer) for role checks
|
Use Depends(get_current_admin/vendor/customer) for role checks
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/**/*.py"
|
||||||
required: "Depends\\(get_current_"
|
required: "Depends\\(get_current_"
|
||||||
|
|
||||||
- id: "AUTH-003"
|
- id: "AUTH-003"
|
||||||
@@ -36,10 +40,10 @@ auth_rules:
|
|||||||
description: |
|
description: |
|
||||||
Two vendor context patterns exist - use the appropriate one:
|
Two vendor context patterns exist - use the appropriate one:
|
||||||
|
|
||||||
1. SHOP ENDPOINTS (public, no authentication required):
|
1. STOREFRONT ENDPOINTS (public, no authentication required):
|
||||||
- Use: vendor: Vendor = Depends(require_vendor_context())
|
- Use: vendor: Vendor = Depends(require_vendor_context())
|
||||||
- Vendor is detected from URL/subdomain/domain
|
- Vendor is detected from URL/subdomain/domain
|
||||||
- File pattern: app/api/v1/storefront/**/*.py
|
- File pattern: app/api/v1/storefront/**/*.py, app/modules/*/routes/api/storefront*.py
|
||||||
- Mark as public with: # public
|
- Mark as public with: # public
|
||||||
|
|
||||||
2. VENDOR API ENDPOINTS (authenticated):
|
2. VENDOR API ENDPOINTS (authenticated):
|
||||||
@@ -49,15 +53,19 @@ auth_rules:
|
|||||||
- File pattern: app/api/v1/vendor/**/*.py
|
- File pattern: app/api/v1/vendor/**/*.py
|
||||||
|
|
||||||
DEPRECATED for vendor APIs:
|
DEPRECATED for vendor APIs:
|
||||||
- require_vendor_context() - only for shop endpoints
|
- require_vendor_context() - only for storefront endpoints
|
||||||
- getattr(request.state, "vendor", None) without permission dependency
|
- getattr(request.state, "vendor", None) without permission dependency
|
||||||
|
|
||||||
See: docs/backend/vendor-in-token-architecture.md
|
See: docs/backend/vendor-in-token-architecture.md
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/api/v1/vendor/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/vendor/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/store*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "require_vendor_context\\(\\)"
|
- "require_vendor_context\\(\\)"
|
||||||
file_pattern: "app/api/v1/storefront/**/*.py"
|
file_pattern:
|
||||||
|
- "app/api/v1/storefront/**/*.py"
|
||||||
|
- "app/modules/*/routes/api/storefront*.py"
|
||||||
required_patterns:
|
required_patterns:
|
||||||
- "require_vendor_context\\(\\)|# public"
|
- "require_vendor_context\\(\\)|# public"
|
||||||
|
|
||||||
@@ -149,7 +157,9 @@ multi_tenancy_rules:
|
|||||||
description: |
|
description: |
|
||||||
In vendor/shop contexts, all database queries must filter by vendor_id
|
In vendor/shop contexts, all database queries must filter by vendor_id
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
context: "vendor_shop"
|
context: "vendor_shop"
|
||||||
required_pattern: ".filter\\(.*vendor_id.*\\)"
|
required_pattern: ".filter\\(.*vendor_id.*\\)"
|
||||||
|
|
||||||
@@ -159,5 +169,7 @@ multi_tenancy_rules:
|
|||||||
description: |
|
description: |
|
||||||
Queries must never access data from other vendors
|
Queries must never access data from other vendors
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
enforcement: "database_query_level"
|
enforcement: "database_query_level"
|
||||||
|
|||||||
@@ -10,7 +10,9 @@ exception_rules:
|
|||||||
Create domain-specific exceptions in app/exceptions/ for better
|
Create domain-specific exceptions in app/exceptions/ for better
|
||||||
error handling and clarity.
|
error handling and clarity.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/exceptions/**/*.py"
|
file_pattern:
|
||||||
|
- "app/exceptions/**/*.py"
|
||||||
|
- "app/modules/*/exceptions.py"
|
||||||
encouraged_structure: |
|
encouraged_structure: |
|
||||||
class VendorError(Exception):
|
class VendorError(Exception):
|
||||||
"""Base exception for vendor-related errors"""
|
"""Base exception for vendor-related errors"""
|
||||||
@@ -34,21 +36,25 @@ exception_rules:
|
|||||||
description: |
|
description: |
|
||||||
When catching exceptions, log them with context and stack trace.
|
When catching exceptions, log them with context and stack trace.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
encouraged_patterns:
|
encouraged_patterns:
|
||||||
- "logger.error"
|
- "logger.error"
|
||||||
- "exc_info=True"
|
- "exc_info=True"
|
||||||
|
|
||||||
- id: "EXC-004"
|
- id: "EXC-004"
|
||||||
name: "Domain exceptions must inherit from WizamartException"
|
name: "Domain exceptions must inherit from OrionException"
|
||||||
severity: "error"
|
severity: "error"
|
||||||
description: |
|
description: |
|
||||||
All custom domain exceptions must inherit from WizamartException (or its
|
All custom domain exceptions must inherit from OrionException (or its
|
||||||
subclasses like ResourceNotFoundException, ValidationException, etc.).
|
subclasses like ResourceNotFoundException, ValidationException, etc.).
|
||||||
This ensures the global exception handler catches and converts them properly.
|
This ensures the global exception handler catches and converts them properly.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/exceptions/**/*.py"
|
file_pattern:
|
||||||
required_base_class: "WizamartException"
|
- "app/exceptions/**/*.py"
|
||||||
|
- "app/modules/*/exceptions.py"
|
||||||
|
required_base_class: "OrionException"
|
||||||
example_good: |
|
example_good: |
|
||||||
class VendorNotFoundException(ResourceNotFoundException):
|
class VendorNotFoundException(ResourceNotFoundException):
|
||||||
def __init__(self, vendor_code: str):
|
def __init__(self, vendor_code: str):
|
||||||
@@ -59,7 +65,7 @@ exception_rules:
|
|||||||
severity: "error"
|
severity: "error"
|
||||||
description: |
|
description: |
|
||||||
The global exception handler must be set up in app initialization to
|
The global exception handler must be set up in app initialization to
|
||||||
catch WizamartException and convert to HTTP responses.
|
catch OrionException and convert to HTTP responses.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/main.py"
|
file_pattern: "app/main.py"
|
||||||
required_patterns:
|
required_patterns:
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ javascript_rules:
|
|||||||
- Page URLs (not API calls) like window.location.href = `/vendor/${vendorCode}/...`
|
- Page URLs (not API calls) like window.location.href = `/vendor/${vendorCode}/...`
|
||||||
|
|
||||||
Why this matters:
|
Why this matters:
|
||||||
- Including vendorCode causes 404 errors ("/vendor/wizamart/orders" not found)
|
- Including vendorCode causes 404 errors ("/vendor/orion/orders" not found)
|
||||||
- The JWT token already identifies the vendor
|
- The JWT token already identifies the vendor
|
||||||
- Consistent with the API design pattern
|
- Consistent with the API design pattern
|
||||||
pattern:
|
pattern:
|
||||||
@@ -238,6 +238,50 @@ javascript_rules:
|
|||||||
exceptions:
|
exceptions:
|
||||||
- "utils.js"
|
- "utils.js"
|
||||||
|
|
||||||
|
- id: "JS-015"
|
||||||
|
name: "Use confirm_modal macros, not native confirm()"
|
||||||
|
severity: "error"
|
||||||
|
description: |
|
||||||
|
All confirmation dialogs must use the project's confirm_modal or
|
||||||
|
confirm_modal_dynamic Jinja2 macros from shared/macros/modals.html.
|
||||||
|
Never use the native browser confirm() dialog.
|
||||||
|
|
||||||
|
The modal macros provide:
|
||||||
|
- Consistent styled dialogs matching the admin/store theme
|
||||||
|
- Dark mode support
|
||||||
|
- Variant colors (danger=red, warning=yellow, info=blue)
|
||||||
|
- Icon support
|
||||||
|
- Double-confirm pattern for destructive operations
|
||||||
|
|
||||||
|
WRONG (native browser dialog):
|
||||||
|
if (!confirm('Are you sure you want to delete this?')) return;
|
||||||
|
if (!confirm(I18n.t('confirmations.delete'))) return;
|
||||||
|
|
||||||
|
RIGHT (state variable + modal macro):
|
||||||
|
// In JS: add state variable and remove confirm() guard
|
||||||
|
showDeleteModal: false,
|
||||||
|
async deleteItem() {
|
||||||
|
// No confirm() guard — modal already confirmed
|
||||||
|
await apiClient.delete('/admin/items/' + this.item.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// In template: button sets state, macro shows modal
|
||||||
|
<button @click="showDeleteModal = true">Delete</button>
|
||||||
|
{{ confirm_modal('deleteModal', 'Delete Item', 'Are you sure?',
|
||||||
|
'deleteItem()', 'showDeleteModal', 'Delete', 'Cancel', 'danger') }}
|
||||||
|
|
||||||
|
For dynamic messages (containing JS expressions):
|
||||||
|
{{ confirm_modal_dynamic('deleteModal', 'Delete Item',
|
||||||
|
"'Delete ' + item.name + '?'",
|
||||||
|
'deleteItem()', 'showDeleteModal', 'Delete', 'Cancel', 'danger') }}
|
||||||
|
pattern:
|
||||||
|
file_pattern: "static/**/js/**/*.js"
|
||||||
|
anti_patterns:
|
||||||
|
- "confirm\\("
|
||||||
|
exceptions:
|
||||||
|
- "utils.js"
|
||||||
|
- "vendor/"
|
||||||
|
|
||||||
- id: "JS-010"
|
- id: "JS-010"
|
||||||
name: "Use PlatformSettings for pagination rows per page"
|
name: "Use PlatformSettings for pagination rows per page"
|
||||||
severity: "error"
|
severity: "error"
|
||||||
|
|||||||
@@ -111,11 +111,9 @@ language_rules:
|
|||||||
function languageSelector(currentLang, enabledLanguages) { ... }
|
function languageSelector(currentLang, enabledLanguages) { ... }
|
||||||
window.languageSelector = languageSelector;
|
window.languageSelector = languageSelector;
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "static/shop/js/shop-layout.js"
|
file_patterns:
|
||||||
required_patterns:
|
- "static/shop/js/shop-layout.js"
|
||||||
- "function languageSelector"
|
- "static/vendor/js/init-alpine.js"
|
||||||
- "window.languageSelector"
|
|
||||||
file_pattern: "static/vendor/js/init-alpine.js"
|
|
||||||
required_patterns:
|
required_patterns:
|
||||||
- "function languageSelector"
|
- "function languageSelector"
|
||||||
- "window.languageSelector"
|
- "window.languageSelector"
|
||||||
@@ -247,3 +245,26 @@ language_rules:
|
|||||||
pattern:
|
pattern:
|
||||||
file_pattern: "static/locales/*.json"
|
file_pattern: "static/locales/*.json"
|
||||||
check: "valid_json"
|
check: "valid_json"
|
||||||
|
|
||||||
|
- id: "LANG-011"
|
||||||
|
name: "Use $t() not I18n.t() in HTML templates"
|
||||||
|
severity: "error"
|
||||||
|
description: |
|
||||||
|
In HTML templates, never use I18n.t() directly. It evaluates once
|
||||||
|
and does NOT re-evaluate when translations finish loading async.
|
||||||
|
|
||||||
|
WRONG (non-reactive, shows raw key then updates):
|
||||||
|
<span x-text="I18n.t('module.key')"></span>
|
||||||
|
|
||||||
|
RIGHT (reactive, updates when translations load):
|
||||||
|
<span x-text="$t('module.key')"></span>
|
||||||
|
|
||||||
|
BEST (server-side, zero flash):
|
||||||
|
<span>{{ _('module.key') }}</span>
|
||||||
|
|
||||||
|
Note: I18n.t() is fine in .js files where it's called inside
|
||||||
|
async callbacks after I18n.init() has completed.
|
||||||
|
pattern:
|
||||||
|
file_pattern: "**/*.html"
|
||||||
|
anti_patterns:
|
||||||
|
- "I18n.t("
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Architecture Rules - Model Rules
|
# Architecture Rules - Model Rules
|
||||||
# Rules for models/database/*.py and models/schema/*.py files
|
# Rules for models/database/*.py, models/schema/*.py, app/modules/*/models/**/*.py, and app/modules/*/schemas/**/*.py files
|
||||||
|
|
||||||
model_rules:
|
model_rules:
|
||||||
|
|
||||||
@@ -10,7 +10,9 @@ model_rules:
|
|||||||
All database models must inherit from SQLAlchemy Base and use proper
|
All database models must inherit from SQLAlchemy Base and use proper
|
||||||
column definitions with types and constraints.
|
column definitions with types and constraints.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "models/database/**/*.py"
|
file_pattern:
|
||||||
|
- "models/database/**/*.py"
|
||||||
|
- "app/modules/*/models/**/*.py"
|
||||||
required_patterns:
|
required_patterns:
|
||||||
- "class.*\\(Base\\):"
|
- "class.*\\(Base\\):"
|
||||||
|
|
||||||
@@ -21,7 +23,10 @@ model_rules:
|
|||||||
Never mix SQLAlchemy and Pydantic in the same model.
|
Never mix SQLAlchemy and Pydantic in the same model.
|
||||||
SQLAlchemy = database schema, Pydantic = API validation/serialization.
|
SQLAlchemy = database schema, Pydantic = API validation/serialization.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "models/**/*.py"
|
file_pattern:
|
||||||
|
- "models/**/*.py"
|
||||||
|
- "app/modules/*/models/**/*.py"
|
||||||
|
- "app/modules/*/schemas/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "class.*\\(Base, BaseModel\\):"
|
- "class.*\\(Base, BaseModel\\):"
|
||||||
|
|
||||||
@@ -31,7 +36,9 @@ model_rules:
|
|||||||
description: |
|
description: |
|
||||||
Pydantic response models must enable from_attributes to work with SQLAlchemy models.
|
Pydantic response models must enable from_attributes to work with SQLAlchemy models.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "models/schema/**/*.py"
|
file_pattern:
|
||||||
|
- "models/schema/**/*.py"
|
||||||
|
- "app/modules/*/schemas/**/*.py"
|
||||||
required_in_response_models:
|
required_in_response_models:
|
||||||
- "from_attributes = True"
|
- "from_attributes = True"
|
||||||
|
|
||||||
@@ -51,5 +58,7 @@ model_rules:
|
|||||||
Junction/join tables use both entity names in plural:
|
Junction/join tables use both entity names in plural:
|
||||||
- Good: vendor_users, order_items, product_translations
|
- Good: vendor_users, order_items, product_translations
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "models/database/**/*.py"
|
file_pattern:
|
||||||
|
- "models/database/**/*.py"
|
||||||
|
- "app/modules/*/models/**/*.py"
|
||||||
check: "table_naming_plural"
|
check: "table_naming_plural"
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ module_rules:
|
|||||||
en.json
|
en.json
|
||||||
de.json
|
de.json
|
||||||
fr.json
|
fr.json
|
||||||
lu.json
|
lb.json
|
||||||
|
|
||||||
Translation keys are namespaced as {module}.key_name
|
Translation keys are namespaced as {module}.key_name
|
||||||
pattern:
|
pattern:
|
||||||
@@ -154,16 +154,16 @@ module_rules:
|
|||||||
severity: "warning"
|
severity: "warning"
|
||||||
description: |
|
description: |
|
||||||
Self-contained modules should have an exceptions.py file defining
|
Self-contained modules should have an exceptions.py file defining
|
||||||
module-specific exceptions that inherit from WizamartException.
|
module-specific exceptions that inherit from OrionException.
|
||||||
|
|
||||||
Structure:
|
Structure:
|
||||||
app/modules/{module}/exceptions.py
|
app/modules/{module}/exceptions.py
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
# app/modules/analytics/exceptions.py
|
# app/modules/analytics/exceptions.py
|
||||||
from app.exceptions import WizamartException
|
from app.exceptions import OrionException
|
||||||
|
|
||||||
class AnalyticsException(WizamartException):
|
class AnalyticsException(OrionException):
|
||||||
"""Base exception for analytics module."""
|
"""Base exception for analytics module."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -269,14 +269,14 @@ module_rules:
|
|||||||
Module locales/ directory should have translation files for
|
Module locales/ directory should have translation files for
|
||||||
all supported languages to ensure consistent i18n.
|
all supported languages to ensure consistent i18n.
|
||||||
|
|
||||||
Supported languages: en, de, fr, lu
|
Supported languages: en, de, fr, lb
|
||||||
|
|
||||||
Structure:
|
Structure:
|
||||||
app/modules/<code>/locales/
|
app/modules/<code>/locales/
|
||||||
├── en.json
|
├── en.json
|
||||||
├── de.json
|
├── de.json
|
||||||
├── fr.json
|
├── fr.json
|
||||||
└── lu.json
|
└── lb.json
|
||||||
|
|
||||||
Missing translations will fall back to English, but it's
|
Missing translations will fall back to English, but it's
|
||||||
better to have all languages covered.
|
better to have all languages covered.
|
||||||
@@ -286,7 +286,7 @@ module_rules:
|
|||||||
- "en.json"
|
- "en.json"
|
||||||
- "de.json"
|
- "de.json"
|
||||||
- "fr.json"
|
- "fr.json"
|
||||||
- "lu.json"
|
- "lb.json"
|
||||||
|
|
||||||
- id: "MOD-007"
|
- id: "MOD-007"
|
||||||
name: "Module definition must match directory structure"
|
name: "Module definition must match directory structure"
|
||||||
@@ -692,8 +692,9 @@ module_rules:
|
|||||||
name: "Modules with routers should use get_*_with_routers pattern"
|
name: "Modules with routers should use get_*_with_routers pattern"
|
||||||
severity: "info"
|
severity: "info"
|
||||||
description: |
|
description: |
|
||||||
Modules that define routers (admin_router, vendor_router, etc.)
|
Modules that define routers should follow the lazy import pattern
|
||||||
should follow the lazy import pattern with a dedicated function:
|
with a dedicated function. Route files use `router` as the variable
|
||||||
|
name; consumer code distinguishes via `admin_router`/`store_router`.
|
||||||
|
|
||||||
def get_{module}_module_with_routers() -> ModuleDefinition:
|
def get_{module}_module_with_routers() -> ModuleDefinition:
|
||||||
|
|
||||||
@@ -704,12 +705,12 @@ module_rules:
|
|||||||
|
|
||||||
WRONG:
|
WRONG:
|
||||||
# Direct router assignment at module level
|
# Direct router assignment at module level
|
||||||
module.admin_router = admin_router
|
module.admin_router = router
|
||||||
|
|
||||||
RIGHT:
|
RIGHT:
|
||||||
def _get_admin_router():
|
def _get_admin_router():
|
||||||
from app.modules.orders.routes.admin import admin_router
|
from app.modules.orders.routes.api.admin import router
|
||||||
return admin_router
|
return router
|
||||||
|
|
||||||
def get_orders_module_with_routers() -> ModuleDefinition:
|
def get_orders_module_with_routers() -> ModuleDefinition:
|
||||||
orders_module.admin_router = _get_admin_router()
|
orders_module.admin_router = _get_admin_router()
|
||||||
@@ -761,3 +762,96 @@ module_rules:
|
|||||||
file_pattern: "main.py"
|
file_pattern: "main.py"
|
||||||
validates:
|
validates:
|
||||||
- "module_locales mount BEFORE module_static mount"
|
- "module_locales mount BEFORE module_static mount"
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Cross-Module Boundary Rules
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
- id: "MOD-025"
|
||||||
|
name: "Modules must NOT import models from other modules"
|
||||||
|
severity: "error"
|
||||||
|
description: |
|
||||||
|
Modules must access data from other modules through their SERVICE layer,
|
||||||
|
never by importing and querying their models directly.
|
||||||
|
|
||||||
|
This is the "services over models" principle: if module A needs data
|
||||||
|
from module B, it MUST call module B's service methods.
|
||||||
|
|
||||||
|
WRONG (direct model import):
|
||||||
|
# app/modules/orders/services/order_service.py
|
||||||
|
from app.modules.catalog.models import Product # FORBIDDEN
|
||||||
|
|
||||||
|
class OrderService:
|
||||||
|
def get_order_details(self, db, order_id):
|
||||||
|
product = db.query(Product).filter_by(id=pid).first()
|
||||||
|
|
||||||
|
RIGHT (service call):
|
||||||
|
# app/modules/orders/services/order_service.py
|
||||||
|
from app.modules.catalog.services import product_service
|
||||||
|
|
||||||
|
class OrderService:
|
||||||
|
def get_order_details(self, db, order_id):
|
||||||
|
product = product_service.get_product_by_id(db, pid)
|
||||||
|
|
||||||
|
ALSO RIGHT (provider protocol for core→optional):
|
||||||
|
# app/modules/core/services/stats_aggregator.py
|
||||||
|
from app.modules.contracts.metrics import MetricsProviderProtocol
|
||||||
|
# Discover providers through registry, no direct imports
|
||||||
|
|
||||||
|
EXCEPTIONS:
|
||||||
|
- Test fixtures may create models from other modules for setup
|
||||||
|
- TYPE_CHECKING imports for type hints are allowed
|
||||||
|
- Tenancy models (User, Store, Merchant, Platform) may be imported
|
||||||
|
as type hints in route signatures where FastAPI requires it,
|
||||||
|
but queries must go through tenancy services
|
||||||
|
|
||||||
|
WHY THIS MATTERS:
|
||||||
|
- Encapsulation: Modules own their data access patterns
|
||||||
|
- Refactoring: Module B can change its schema without breaking A
|
||||||
|
- Testability: Mock services, not database queries
|
||||||
|
- Consistency: Clear API boundaries between modules
|
||||||
|
- Decoupling: Modules can evolve independently
|
||||||
|
pattern:
|
||||||
|
file_pattern: "app/modules/*/services/**/*.py"
|
||||||
|
anti_patterns:
|
||||||
|
- "from app\\.modules\\.(?!<own_module>)\\.models import"
|
||||||
|
exceptions:
|
||||||
|
- "TYPE_CHECKING"
|
||||||
|
- "tests/"
|
||||||
|
|
||||||
|
- id: "MOD-026"
|
||||||
|
name: "Cross-module data access must use service methods"
|
||||||
|
severity: "error"
|
||||||
|
description: |
|
||||||
|
When a module needs data from another module, it must use that
|
||||||
|
module's public service API. Each module should expose service
|
||||||
|
methods for common data access patterns.
|
||||||
|
|
||||||
|
Service methods a module should expose:
|
||||||
|
- get_{entity}_by_id(db, id) -> Entity or None
|
||||||
|
- list_{entities}(db, filters) -> list[Entity]
|
||||||
|
- get_{entity}_count(db, filters) -> int
|
||||||
|
- search_{entities}(db, query, filters) -> list[Entity]
|
||||||
|
|
||||||
|
WRONG (direct query across module boundary):
|
||||||
|
# In orders module
|
||||||
|
count = db.query(func.count(Product.id)).scalar()
|
||||||
|
|
||||||
|
RIGHT (call catalog service):
|
||||||
|
# In orders module
|
||||||
|
count = product_service.get_product_count(db, store_id=store_id)
|
||||||
|
|
||||||
|
This applies to:
|
||||||
|
- Simple lookups (get by ID)
|
||||||
|
- List/search queries
|
||||||
|
- Aggregation queries (count, sum)
|
||||||
|
- Join queries (should be decomposed into service calls)
|
||||||
|
|
||||||
|
WHY THIS MATTERS:
|
||||||
|
- Single source of truth for data access logic
|
||||||
|
- Easier to add caching, validation, or access control
|
||||||
|
- Clear contract between modules
|
||||||
|
- Simpler testing with service mocks
|
||||||
|
pattern:
|
||||||
|
file_pattern: "app/modules/*/services/**/*.py"
|
||||||
|
check: "cross_module_service_usage"
|
||||||
|
|||||||
@@ -23,7 +23,9 @@ money_handling_rules:
|
|||||||
|
|
||||||
Column naming convention: Use `_cents` suffix for all monetary columns.
|
Column naming convention: Use `_cents` suffix for all monetary columns.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "models/database/**/*.py"
|
file_pattern:
|
||||||
|
- "models/database/**/*.py"
|
||||||
|
- "app/modules/*/models/**/*.py"
|
||||||
required_patterns:
|
required_patterns:
|
||||||
- "_cents = Column(Integer"
|
- "_cents = Column(Integer"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
@@ -79,7 +81,9 @@ money_handling_rules:
|
|||||||
|
|
||||||
Or use model validators to convert before response serialization.
|
Or use model validators to convert before response serialization.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "models/schema/**/*.py"
|
file_pattern:
|
||||||
|
- "models/schema/**/*.py"
|
||||||
|
- "app/modules/*/schemas/**/*.py"
|
||||||
check: "money_response_format"
|
check: "money_response_format"
|
||||||
|
|
||||||
- id: "MON-004"
|
- id: "MON-004"
|
||||||
@@ -124,7 +128,9 @@ money_handling_rules:
|
|||||||
tax = subtotal * 0.17 # Floating point!
|
tax = subtotal * 0.17 # Floating point!
|
||||||
total = subtotal + tax
|
total = subtotal + tax
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
check: "money_arithmetic"
|
check: "money_arithmetic"
|
||||||
|
|
||||||
- id: "MON-006"
|
- id: "MON-006"
|
||||||
|
|||||||
@@ -15,6 +15,10 @@ naming_rules:
|
|||||||
- "__init__.py"
|
- "__init__.py"
|
||||||
- "auth.py"
|
- "auth.py"
|
||||||
- "health.py"
|
- "health.py"
|
||||||
|
- "store.py"
|
||||||
|
- "admin.py"
|
||||||
|
- "platform.py"
|
||||||
|
- "storefront.py"
|
||||||
|
|
||||||
- id: "NAM-002"
|
- id: "NAM-002"
|
||||||
name: "Service files use SINGULAR + 'service' suffix"
|
name: "Service files use SINGULAR + 'service' suffix"
|
||||||
@@ -22,8 +26,17 @@ naming_rules:
|
|||||||
description: |
|
description: |
|
||||||
Service files should use singular name + _service (vendor_service.py)
|
Service files should use singular name + _service (vendor_service.py)
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
check: "service_naming"
|
check: "service_naming"
|
||||||
|
exceptions:
|
||||||
|
- "*_features.py"
|
||||||
|
- "*_metrics.py"
|
||||||
|
- "*_widgets.py"
|
||||||
|
- "*_aggregator.py"
|
||||||
|
- "*_provider.py"
|
||||||
|
- "*_presets.py"
|
||||||
|
|
||||||
- id: "NAM-003"
|
- id: "NAM-003"
|
||||||
name: "Model files use SINGULAR names"
|
name: "Model files use SINGULAR names"
|
||||||
@@ -31,14 +44,16 @@ naming_rules:
|
|||||||
description: |
|
description: |
|
||||||
Both database and schema model files use singular names (product.py)
|
Both database and schema model files use singular names (product.py)
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "models/**/*.py"
|
file_pattern:
|
||||||
|
- "models/**/*.py"
|
||||||
|
- "app/modules/*/models/**/*.py"
|
||||||
check: "singular_naming"
|
check: "singular_naming"
|
||||||
|
|
||||||
- id: "NAM-004"
|
- id: "NAM-004"
|
||||||
name: "Use consistent terminology: vendor not shop"
|
name: "Use consistent terminology: vendor not shop"
|
||||||
severity: "warning"
|
severity: "warning"
|
||||||
description: |
|
description: |
|
||||||
Use 'vendor' consistently, not 'shop' (except for shop frontend)
|
Use 'vendor' consistently, not 'shop' (except for storefront)
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/**/*.py"
|
file_pattern: "app/**/*.py"
|
||||||
discouraged_terms:
|
discouraged_terms:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Architecture Rules - Service Layer Rules
|
# Architecture Rules - Service Layer Rules
|
||||||
# Rules for app/services/**/*.py files
|
# Rules for app/services/**/*.py and app/modules/*/services/**/*.py files
|
||||||
|
|
||||||
service_layer_rules:
|
service_layer_rules:
|
||||||
|
|
||||||
@@ -10,7 +10,9 @@ service_layer_rules:
|
|||||||
Services are business logic layer - they should NOT know about HTTP.
|
Services are business logic layer - they should NOT know about HTTP.
|
||||||
Raise domain-specific exceptions instead (ValueError, custom exceptions).
|
Raise domain-specific exceptions instead (ValueError, custom exceptions).
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "raise HTTPException"
|
- "raise HTTPException"
|
||||||
- "from fastapi import HTTPException"
|
- "from fastapi import HTTPException"
|
||||||
@@ -22,7 +24,9 @@ service_layer_rules:
|
|||||||
Services should raise meaningful domain exceptions, not generic Exception.
|
Services should raise meaningful domain exceptions, not generic Exception.
|
||||||
Create custom exception classes for business rule violations.
|
Create custom exception classes for business rule violations.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
discouraged_patterns:
|
discouraged_patterns:
|
||||||
- "raise Exception\\("
|
- "raise Exception\\("
|
||||||
|
|
||||||
@@ -33,7 +37,9 @@ service_layer_rules:
|
|||||||
Service methods should receive database session as a parameter for testability
|
Service methods should receive database session as a parameter for testability
|
||||||
and transaction control. Never create session inside service.
|
and transaction control. Never create session inside service.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
required_in_method_signature:
|
required_in_method_signature:
|
||||||
- "db: Session"
|
- "db: Session"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
@@ -47,7 +53,9 @@ service_layer_rules:
|
|||||||
Service methods should accept Pydantic models for complex inputs
|
Service methods should accept Pydantic models for complex inputs
|
||||||
to ensure type safety and validation.
|
to ensure type safety and validation.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
encouraged_patterns:
|
encouraged_patterns:
|
||||||
- "BaseModel"
|
- "BaseModel"
|
||||||
|
|
||||||
@@ -57,7 +65,9 @@ service_layer_rules:
|
|||||||
description: |
|
description: |
|
||||||
All database queries must be scoped to vendor_id to prevent cross-tenant data access.
|
All database queries must be scoped to vendor_id to prevent cross-tenant data access.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
check: "vendor_scoping"
|
check: "vendor_scoping"
|
||||||
|
|
||||||
- id: "SVC-006"
|
- id: "SVC-006"
|
||||||
@@ -74,11 +84,22 @@ service_layer_rules:
|
|||||||
|
|
||||||
The endpoint should call db.commit() after all service operations succeed.
|
The endpoint should call db.commit() after all service operations succeed.
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
anti_patterns:
|
anti_patterns:
|
||||||
- "db.commit()"
|
- "db.commit()"
|
||||||
exceptions:
|
exceptions:
|
||||||
- "log_service.py"
|
- "log_service.py"
|
||||||
|
- "card_service.py"
|
||||||
|
- "wallet_service.py"
|
||||||
|
- "program_service.py"
|
||||||
|
- "points_service.py"
|
||||||
|
- "apple_wallet_service.py"
|
||||||
|
- "pin_service.py"
|
||||||
|
- "stamp_service.py"
|
||||||
|
- "google_wallet_service.py"
|
||||||
|
- "theme_presets.py"
|
||||||
|
|
||||||
- id: "SVC-007"
|
- id: "SVC-007"
|
||||||
name: "Service return types must match API response schemas"
|
name: "Service return types must match API response schemas"
|
||||||
@@ -113,5 +134,7 @@ service_layer_rules:
|
|||||||
result = service.get_stats(db)
|
result = service.get_stats(db)
|
||||||
StatsResponse(**result) # Raises if keys don't match
|
StatsResponse(**result) # Raises if keys don't match
|
||||||
pattern:
|
pattern:
|
||||||
file_pattern: "app/services/**/*.py"
|
file_pattern:
|
||||||
|
- "app/services/**/*.py"
|
||||||
|
- "app/modules/*/services/**/*.py"
|
||||||
check: "schema_compatibility"
|
check: "schema_compatibility"
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ rules:
|
|||||||
type: file_exists
|
type: file_exists
|
||||||
paths:
|
paths:
|
||||||
- ".github/PULL_REQUEST_TEMPLATE.md"
|
- ".github/PULL_REQUEST_TEMPLATE.md"
|
||||||
- ".gitlab/merge_request_templates/*.md"
|
- "CONTRIBUTING.md"
|
||||||
message: "Pull request template recommended"
|
message: "Pull request template recommended"
|
||||||
|
|
||||||
- id: CHANGE-REV-002
|
- id: CHANGE-REV-002
|
||||||
@@ -74,7 +74,6 @@ rules:
|
|||||||
type: file_exists
|
type: file_exists
|
||||||
paths:
|
paths:
|
||||||
- ".github/CODEOWNERS"
|
- ".github/CODEOWNERS"
|
||||||
- "CODEOWNERS" # GitLab uses root CODEOWNERS or .gitlab/CODEOWNERS
|
|
||||||
- "CODEOWNERS"
|
- "CODEOWNERS"
|
||||||
message: "Consider defining code owners for critical paths"
|
message: "Consider defining code owners for critical paths"
|
||||||
|
|
||||||
@@ -91,7 +90,7 @@ rules:
|
|||||||
paths:
|
paths:
|
||||||
- ".github/workflows/ci.yml"
|
- ".github/workflows/ci.yml"
|
||||||
- ".github/workflows/test.yml"
|
- ".github/workflows/test.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
message: "CI workflow for automated testing required"
|
message: "CI workflow for automated testing required"
|
||||||
|
|
||||||
- id: CHANGE-CI-002
|
- id: CHANGE-CI-002
|
||||||
@@ -102,7 +101,7 @@ rules:
|
|||||||
type: pattern_recommended
|
type: pattern_recommended
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
patterns:
|
patterns:
|
||||||
- "security|bandit|safety|snyk|trivy"
|
- "security|bandit|safety|snyk|trivy"
|
||||||
message: "Consider security scanning in CI pipeline"
|
message: "Consider security scanning in CI pipeline"
|
||||||
@@ -115,7 +114,7 @@ rules:
|
|||||||
type: pattern_required
|
type: pattern_required
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
patterns:
|
patterns:
|
||||||
- "ruff|flake8|pylint|mypy|lint"
|
- "ruff|flake8|pylint|mypy|lint"
|
||||||
message: "Code quality checks required in CI"
|
message: "Code quality checks required in CI"
|
||||||
@@ -146,7 +145,7 @@ rules:
|
|||||||
paths:
|
paths:
|
||||||
- ".github/workflows/release.yml"
|
- ".github/workflows/release.yml"
|
||||||
- ".github/workflows/deploy.yml"
|
- ".github/workflows/deploy.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
- "Dockerfile"
|
- "Dockerfile"
|
||||||
message: "Automated deployment process recommended"
|
message: "Automated deployment process recommended"
|
||||||
|
|
||||||
@@ -199,7 +198,7 @@ rules:
|
|||||||
paths:
|
paths:
|
||||||
- "Dockerfile"
|
- "Dockerfile"
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
patterns:
|
patterns:
|
||||||
- "tag|version|:v"
|
- "tag|version|:v"
|
||||||
message: "Container image versioning recommended"
|
message: "Container image versioning recommended"
|
||||||
|
|||||||
@@ -122,10 +122,9 @@ rules:
|
|||||||
type: file_exists
|
type: file_exists
|
||||||
paths:
|
paths:
|
||||||
- ".github/PULL_REQUEST_TEMPLATE.md"
|
- ".github/PULL_REQUEST_TEMPLATE.md"
|
||||||
- ".gitlab/merge_request_templates/*.md"
|
|
||||||
- "CONTRIBUTING.md"
|
- "CONTRIBUTING.md"
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
message: "Code review process must be documented/enforced"
|
message: "Code review process must be documented/enforced"
|
||||||
|
|
||||||
- id: COMP-POL-002
|
- id: COMP-POL-002
|
||||||
@@ -138,8 +137,7 @@ rules:
|
|||||||
- ".github/CODEOWNERS"
|
- ".github/CODEOWNERS"
|
||||||
- "CODEOWNERS"
|
- "CODEOWNERS"
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
|
||||||
message: "Document change approval requirements"
|
message: "Document change approval requirements"
|
||||||
|
|
||||||
- id: COMP-POL-003
|
- id: COMP-POL-003
|
||||||
@@ -166,7 +164,7 @@ rules:
|
|||||||
type: file_exists
|
type: file_exists
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/ci.yml"
|
- ".github/workflows/ci.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
- "pytest.ini"
|
- "pytest.ini"
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
patterns:
|
patterns:
|
||||||
@@ -181,7 +179,7 @@ rules:
|
|||||||
type: file_exists
|
type: file_exists
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
patterns:
|
patterns:
|
||||||
- "deploy|release"
|
- "deploy|release"
|
||||||
message: "Deployment process must be automated and logged"
|
message: "Deployment process must be automated and logged"
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ rules:
|
|||||||
paths:
|
paths:
|
||||||
- "SECURITY.md"
|
- "SECURITY.md"
|
||||||
- ".github/SECURITY.md"
|
- ".github/SECURITY.md"
|
||||||
- ".gitlab/SECURITY.md"
|
- ".gitea/SECURITY.md"
|
||||||
message: "Security policy (SECURITY.md) required"
|
message: "Security policy (SECURITY.md) required"
|
||||||
|
|
||||||
- id: DOC-SEC-002
|
- id: DOC-SEC-002
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ rules:
|
|||||||
type: file_exists
|
type: file_exists
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
patterns:
|
patterns:
|
||||||
- "safety|pip-audit|snyk|dependabot"
|
- "safety|pip-audit|snyk|dependabot"
|
||||||
message: "Dependency vulnerability scanning required"
|
message: "Dependency vulnerability scanning required"
|
||||||
@@ -70,7 +70,7 @@ rules:
|
|||||||
type: file_exists
|
type: file_exists
|
||||||
paths:
|
paths:
|
||||||
- ".github/dependabot.yml"
|
- ".github/dependabot.yml"
|
||||||
- ".gitlab-ci.yml" # GitLab uses built-in dependency scanning
|
- ".gitea/workflows/*.yml"
|
||||||
message: "Consider enabling Dependabot for security updates"
|
message: "Consider enabling Dependabot for security updates"
|
||||||
|
|
||||||
- id: THIRD-VULN-003
|
- id: THIRD-VULN-003
|
||||||
@@ -81,7 +81,7 @@ rules:
|
|||||||
type: pattern_recommended
|
type: pattern_recommended
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/*.yml"
|
- ".github/workflows/*.yml"
|
||||||
- ".gitlab-ci.yml"
|
- ".gitea/workflows/*.yml"
|
||||||
patterns:
|
patterns:
|
||||||
- "trivy|grype|snyk.*container"
|
- "trivy|grype|snyk.*container"
|
||||||
message: "Consider container image vulnerability scanning"
|
message: "Consider container image vulnerability scanning"
|
||||||
|
|||||||
21
.dockerignore
Normal file
21
.dockerignore
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
.git
|
||||||
|
.gitea
|
||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
*.pyo
|
||||||
|
site/
|
||||||
|
docs/
|
||||||
|
exports/
|
||||||
|
alembic/versions_backup/
|
||||||
|
*.csv
|
||||||
|
*.md
|
||||||
|
!requirements.txt
|
||||||
|
.pre-commit-config.yaml
|
||||||
|
.architecture-rules/
|
||||||
|
.performance-rules/
|
||||||
|
.security-rules/
|
||||||
|
mkdocs.yml
|
||||||
|
monitoring/
|
||||||
77
.env.example
77
.env.example
@@ -6,7 +6,7 @@ DEBUG=False
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# PROJECT INFORMATION
|
# PROJECT INFORMATION
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
PROJECT_NAME=Wizamart - Multi-Store Marketplace Platform
|
PROJECT_NAME=Orion - Multi-Store Marketplace Platform
|
||||||
DESCRIPTION=Multi-tenants multi-themes ecommerce application
|
DESCRIPTION=Multi-tenants multi-themes ecommerce application
|
||||||
VERSION=2.2.0
|
VERSION=2.2.0
|
||||||
|
|
||||||
@@ -14,17 +14,17 @@ VERSION=2.2.0
|
|||||||
# DATABASE CONFIGURATION (PostgreSQL required)
|
# DATABASE CONFIGURATION (PostgreSQL required)
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Default works with: docker-compose up -d db
|
# Default works with: docker-compose up -d db
|
||||||
DATABASE_URL=postgresql://wizamart_user:secure_password@localhost:5432/wizamart_db
|
DATABASE_URL=postgresql://orion_user:secure_password@localhost:5432/orion_db
|
||||||
|
|
||||||
# For production, use your PostgreSQL connection string:
|
# For production, use your PostgreSQL connection string:
|
||||||
# DATABASE_URL=postgresql://username:password@production-host:5432/wizamart_db
|
# DATABASE_URL=postgresql://username:password@production-host:5432/orion_db
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# ADMIN INITIALIZATION
|
# ADMIN INITIALIZATION
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# These are used by init_production.py to create the platform admin
|
# These are used by init_production.py to create the platform admin
|
||||||
# ⚠️ CHANGE THESE IN PRODUCTION!
|
# ⚠️ CHANGE THESE IN PRODUCTION!
|
||||||
ADMIN_EMAIL=admin@wizamart.com
|
ADMIN_EMAIL=admin@wizard.lu
|
||||||
ADMIN_USERNAME=admin
|
ADMIN_USERNAME=admin
|
||||||
ADMIN_PASSWORD=change-me-in-production
|
ADMIN_PASSWORD=change-me-in-production
|
||||||
ADMIN_FIRST_NAME=Platform
|
ADMIN_FIRST_NAME=Platform
|
||||||
@@ -49,9 +49,9 @@ API_PORT=8000
|
|||||||
# Development
|
# Development
|
||||||
DOCUMENTATION_URL=http://localhost:8001
|
DOCUMENTATION_URL=http://localhost:8001
|
||||||
# Staging
|
# Staging
|
||||||
# DOCUMENTATION_URL=https://staging-docs.wizamart.com
|
# DOCUMENTATION_URL=https://staging-docs.wizard.lu
|
||||||
# Production
|
# Production
|
||||||
# DOCUMENTATION_URL=https://docs.wizamart.com
|
# DOCUMENTATION_URL=https://docs.wizard.lu
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# RATE LIMITING
|
# RATE LIMITING
|
||||||
@@ -67,10 +67,15 @@ LOG_LEVEL=INFO
|
|||||||
LOG_FILE=logs/app.log
|
LOG_FILE=logs/app.log
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# PLATFORM DOMAIN CONFIGURATION
|
# MAIN DOMAIN CONFIGURATION
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Your main platform domain
|
# Your main platform domain
|
||||||
PLATFORM_DOMAIN=wizamart.com
|
MAIN_DOMAIN=wizard.lu
|
||||||
|
|
||||||
|
# Full base URL for outbound links (emails, billing redirects, etc.)
|
||||||
|
# Must include protocol and port if non-standard
|
||||||
|
# Examples: http://localhost:8000, http://acme.localhost:9999, https://wizard.lu
|
||||||
|
APP_BASE_URL=http://localhost:8000
|
||||||
|
|
||||||
# Custom domain features
|
# Custom domain features
|
||||||
# Enable/disable custom domains
|
# Enable/disable custom domains
|
||||||
@@ -85,7 +90,7 @@ SSL_PROVIDER=letsencrypt
|
|||||||
AUTO_PROVISION_SSL=False
|
AUTO_PROVISION_SSL=False
|
||||||
|
|
||||||
# DNS verification
|
# DNS verification
|
||||||
DNS_VERIFICATION_PREFIX=_wizamart-verify
|
DNS_VERIFICATION_PREFIX=_wizard-verify
|
||||||
DNS_VERIFICATION_TTL=3600
|
DNS_VERIFICATION_TTL=3600
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -103,8 +108,8 @@ STRIPE_TRIAL_DAYS=30
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Provider: smtp, sendgrid, mailgun, ses
|
# Provider: smtp, sendgrid, mailgun, ses
|
||||||
EMAIL_PROVIDER=smtp
|
EMAIL_PROVIDER=smtp
|
||||||
EMAIL_FROM_ADDRESS=noreply@wizamart.com
|
EMAIL_FROM_ADDRESS=noreply@wizard.lu
|
||||||
EMAIL_FROM_NAME=Wizamart
|
EMAIL_FROM_NAME=Wizard
|
||||||
EMAIL_REPLY_TO=
|
EMAIL_REPLY_TO=
|
||||||
|
|
||||||
# SMTP Settings (used when EMAIL_PROVIDER=smtp)
|
# SMTP Settings (used when EMAIL_PROVIDER=smtp)
|
||||||
@@ -149,6 +154,10 @@ SEED_ORDERS_PER_STORE=10
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# CELERY / REDIS TASK QUEUE
|
# CELERY / REDIS TASK QUEUE
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
# Redis password (must match docker-compose.yml --requirepass flag)
|
||||||
|
# ⚠️ CHANGE THIS IN PRODUCTION! Generate with: openssl rand -hex 16
|
||||||
|
REDIS_PASSWORD=changeme
|
||||||
|
|
||||||
# Redis connection URL (used for Celery broker and backend)
|
# Redis connection URL (used for Celery broker and backend)
|
||||||
# Default works with: docker-compose up -d redis
|
# Default works with: docker-compose up -d redis
|
||||||
REDIS_URL=redis://localhost:6379/0
|
REDIS_URL=redis://localhost:6379/0
|
||||||
@@ -173,6 +182,14 @@ SENTRY_DSN=
|
|||||||
SENTRY_ENVIRONMENT=production
|
SENTRY_ENVIRONMENT=production
|
||||||
SENTRY_TRACES_SAMPLE_RATE=0.1
|
SENTRY_TRACES_SAMPLE_RATE=0.1
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# MONITORING
|
||||||
|
# =============================================================================
|
||||||
|
ENABLE_METRICS=true
|
||||||
|
GRAFANA_URL=https://grafana.wizard.lu
|
||||||
|
GRAFANA_ADMIN_USER=admin
|
||||||
|
GRAFANA_ADMIN_PASSWORD=changeme
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# CLOUDFLARE R2 STORAGE
|
# CLOUDFLARE R2 STORAGE
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -185,13 +202,49 @@ STORAGE_BACKEND=local
|
|||||||
R2_ACCOUNT_ID=
|
R2_ACCOUNT_ID=
|
||||||
R2_ACCESS_KEY_ID=
|
R2_ACCESS_KEY_ID=
|
||||||
R2_SECRET_ACCESS_KEY=
|
R2_SECRET_ACCESS_KEY=
|
||||||
R2_BUCKET_NAME=wizamart-media
|
R2_BUCKET_NAME=orion-media
|
||||||
|
|
||||||
# Public URL for R2 bucket (optional - for custom domain)
|
# Public URL for R2 bucket (optional - for custom domain)
|
||||||
# If not set, uses Cloudflare's default R2 public URL
|
# If not set, uses Cloudflare's default R2 public URL
|
||||||
# Example: https://media.yoursite.com
|
# Example: https://media.yoursite.com
|
||||||
R2_PUBLIC_URL=
|
R2_PUBLIC_URL=
|
||||||
|
|
||||||
|
# Cloudflare R2 backup bucket (used by scripts/backup.sh --upload)
|
||||||
|
R2_BACKUP_BUCKET=orion-backups
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# LOYALTY MODULE
|
||||||
|
# =============================================================================
|
||||||
|
# Anti-fraud defaults (all optional, shown values are defaults)
|
||||||
|
# LOYALTY_DEFAULT_COOLDOWN_MINUTES=15
|
||||||
|
# LOYALTY_MAX_DAILY_STAMPS=5
|
||||||
|
# LOYALTY_PIN_MAX_FAILED_ATTEMPTS=5
|
||||||
|
# LOYALTY_PIN_LOCKOUT_MINUTES=30
|
||||||
|
|
||||||
|
# Points configuration
|
||||||
|
# LOYALTY_DEFAULT_POINTS_PER_EURO=10
|
||||||
|
|
||||||
|
# Google Wallet integration
|
||||||
|
# See docs/deployment/hetzner-server-setup.md Step 25 for setup guide
|
||||||
|
# Get Issuer ID from https://pay.google.com/business/console
|
||||||
|
# LOYALTY_GOOGLE_ISSUER_ID=3388000000012345678
|
||||||
|
# Production convention: ~/apps/orion/google-wallet-sa.json (app user, mode 600).
|
||||||
|
# Path is validated at startup — file must exist and be readable, otherwise
|
||||||
|
# the app fails fast at import time.
|
||||||
|
# LOYALTY_GOOGLE_SERVICE_ACCOUNT_JSON=~/apps/orion/google-wallet-sa.json
|
||||||
|
# LOYALTY_GOOGLE_WALLET_ORIGINS=["https://yourdomain.com"]
|
||||||
|
# LOYALTY_DEFAULT_LOGO_URL=https://yourdomain.com/path/to/default-logo.png
|
||||||
|
|
||||||
|
# Apple Wallet integration (requires Apple Developer account)
|
||||||
|
# LOYALTY_APPLE_PASS_TYPE_ID=pass.com.example.loyalty
|
||||||
|
# LOYALTY_APPLE_TEAM_ID=ABCD1234
|
||||||
|
# LOYALTY_APPLE_WWDR_CERT_PATH=/path/to/wwdr.pem
|
||||||
|
# LOYALTY_APPLE_SIGNER_CERT_PATH=/path/to/signer.pem
|
||||||
|
# LOYALTY_APPLE_SIGNER_KEY_PATH=/path/to/signer.key
|
||||||
|
|
||||||
|
# QR code size in pixels (default: 300)
|
||||||
|
# LOYALTY_QR_CODE_SIZE=300
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# CLOUDFLARE CDN / PROXY
|
# CLOUDFLARE CDN / PROXY
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|||||||
167
.gitea/workflows/ci.yml
Normal file
167
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
# Gitea Actions CI/CD Configuration
|
||||||
|
# ==================================
|
||||||
|
# Uses GitHub Actions-compatible syntax. Requires Gitea 1.19+ with Actions enabled.
|
||||||
|
# Requires Gitea 1.19+ with Actions enabled.
|
||||||
|
|
||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
branches: [master]
|
||||||
|
|
||||||
|
env:
|
||||||
|
PYTHON_VERSION: "3.11"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Lint
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
ruff:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
run: pip install uv
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv pip install --system -r requirements.txt -r requirements-dev.txt
|
||||||
|
|
||||||
|
- name: Run ruff
|
||||||
|
run: ruff check .
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tests — unit only (integration tests run locally via make test)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 150
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:15
|
||||||
|
env:
|
||||||
|
POSTGRES_DB: orion_test
|
||||||
|
POSTGRES_USER: test_user
|
||||||
|
POSTGRES_PASSWORD: test_password
|
||||||
|
options: >-
|
||||||
|
--health-cmd "pg_isready -U test_user -d orion_test"
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
env:
|
||||||
|
TEST_DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/orion_test"
|
||||||
|
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/orion_test"
|
||||||
|
LOG_LEVEL: "WARNING"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
run: pip install uv
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv pip install --system -r requirements.txt -r requirements-test.txt
|
||||||
|
|
||||||
|
- name: Run unit tests
|
||||||
|
run: python -m pytest -m "unit" -q --tb=short --timeout=120 --no-cov --override-ini="addopts=" -p no:cacheprovider -p no:logging --durations=20
|
||||||
|
|
||||||
|
validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
DATABASE_URL: "postgresql://dummy:dummy@localhost:5432/dummy"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
run: pip install uv
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv pip install --system -r requirements.txt
|
||||||
|
|
||||||
|
- name: Validate architecture patterns
|
||||||
|
run: python scripts/validate/validate_all.py --architecture
|
||||||
|
|
||||||
|
- name: Validate security patterns
|
||||||
|
run: python scripts/validate/validate_all.py --security
|
||||||
|
|
||||||
|
- name: Validate performance patterns
|
||||||
|
run: python scripts/validate/validate_all.py --performance
|
||||||
|
|
||||||
|
- name: Validate audit patterns
|
||||||
|
run: python scripts/validate/validate_all.py --audit
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Security (non-blocking)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
dependency-scanning:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install pip-audit
|
||||||
|
run: pip install pip-audit
|
||||||
|
|
||||||
|
- name: Run pip-audit
|
||||||
|
run: pip-audit --requirement requirements.txt || true
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Build (docs - only on push to master)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
docs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||||
|
needs: [ruff, pytest, validate]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
run: pip install uv
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv pip install --system -r requirements.txt -r requirements-docs.txt
|
||||||
|
|
||||||
|
- name: Build docs
|
||||||
|
run: mkdocs build
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Deploy (master-only, after lint + tests + validate pass)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||||
|
needs: [ruff, pytest, validate]
|
||||||
|
steps:
|
||||||
|
- name: Deploy to production
|
||||||
|
uses: appleboy/ssh-action@v1
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.DEPLOY_HOST }}
|
||||||
|
username: ${{ secrets.DEPLOY_USER }}
|
||||||
|
key: ${{ secrets.DEPLOY_SSH_KEY }}
|
||||||
|
port: 22
|
||||||
|
command_timeout: 10m
|
||||||
|
script: cd ${{ secrets.DEPLOY_PATH }} && bash scripts/deploy.sh
|
||||||
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
## Summary
|
||||||
|
|
||||||
|
<!-- Brief description of what this PR does -->
|
||||||
|
|
||||||
|
## Changes
|
||||||
|
|
||||||
|
-
|
||||||
|
|
||||||
|
## Test plan
|
||||||
|
|
||||||
|
- [ ] Unit tests pass (`python -m pytest tests/unit/`)
|
||||||
|
- [ ] Integration tests pass (`python -m pytest tests/integration/`)
|
||||||
|
- [ ] Architecture validation passes (`python scripts/validate/validate_all.py`)
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
- [ ] Code follows project conventions
|
||||||
|
- [ ] No new warnings introduced
|
||||||
|
- [ ] Database migrations included (if applicable)
|
||||||
9
.github/dependabot.yml
vendored
Normal file
9
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
labels:
|
||||||
|
- "dependencies"
|
||||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -156,11 +156,10 @@ uploads/
|
|||||||
__pypackages__/
|
__pypackages__/
|
||||||
|
|
||||||
# Docker
|
# Docker
|
||||||
docker-compose.override.yml
|
|
||||||
.dockerignore.local
|
.dockerignore.local
|
||||||
*.override.yml
|
|
||||||
|
|
||||||
# Deployment & Security
|
# Deployment & Security
|
||||||
|
.build-info
|
||||||
deployment-local/
|
deployment-local/
|
||||||
*.pem
|
*.pem
|
||||||
*.key
|
*.key
|
||||||
@@ -168,6 +167,11 @@ deployment-local/
|
|||||||
secrets/
|
secrets/
|
||||||
credentials/
|
credentials/
|
||||||
|
|
||||||
|
# Google Cloud service account keys
|
||||||
|
*-service-account.json
|
||||||
|
google-wallet-sa.json
|
||||||
|
orion-*.json
|
||||||
|
|
||||||
# Alembic
|
# Alembic
|
||||||
# Note: Keep alembic/versions/ tracked for migrations
|
# Note: Keep alembic/versions/ tracked for migrations
|
||||||
# alembic/versions/*.pyc is already covered by __pycache__
|
# alembic/versions/*.pyc is already covered by __pycache__
|
||||||
@@ -183,5 +187,8 @@ tailadmin-free-tailwind-dashboard-template/
|
|||||||
static/shared/css/tailwind.css
|
static/shared/css/tailwind.css
|
||||||
|
|
||||||
# Export files
|
# Export files
|
||||||
wizamart_letzshop_export_*.csv
|
orion_letzshop_export_*.csv
|
||||||
exports/
|
exports/
|
||||||
|
|
||||||
|
# Security audit (needs revamping)
|
||||||
|
scripts/security-audit/
|
||||||
|
|||||||
130
.gitlab-ci.yml
130
.gitlab-ci.yml
@@ -1,130 +0,0 @@
|
|||||||
# GitLab CI/CD Configuration
|
|
||||||
# =========================
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- lint
|
|
||||||
- test
|
|
||||||
- security
|
|
||||||
- build
|
|
||||||
|
|
||||||
variables:
|
|
||||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
|
||||||
PYTHON_VERSION: "3.11"
|
|
||||||
|
|
||||||
# Cache dependencies between jobs
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- .cache/pip
|
|
||||||
- .venv/
|
|
||||||
|
|
||||||
# Lint Stage
|
|
||||||
# ----------
|
|
||||||
|
|
||||||
ruff:
|
|
||||||
stage: lint
|
|
||||||
image: python:${PYTHON_VERSION}
|
|
||||||
before_script:
|
|
||||||
- pip install uv
|
|
||||||
- uv sync --frozen
|
|
||||||
script:
|
|
||||||
- .venv/bin/ruff check .
|
|
||||||
rules:
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
|
||||||
|
|
||||||
# Test Stage
|
|
||||||
# ----------
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
stage: test
|
|
||||||
image: python:${PYTHON_VERSION}
|
|
||||||
services:
|
|
||||||
- name: postgres:15
|
|
||||||
alias: postgres
|
|
||||||
variables:
|
|
||||||
# PostgreSQL service configuration
|
|
||||||
POSTGRES_DB: wizamart_test
|
|
||||||
POSTGRES_USER: test_user
|
|
||||||
POSTGRES_PASSWORD: test_password
|
|
||||||
# Application database URL for tests
|
|
||||||
TEST_DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/wizamart_test"
|
|
||||||
# Skip database validation during import (tests use TEST_DATABASE_URL)
|
|
||||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/wizamart_test"
|
|
||||||
before_script:
|
|
||||||
- pip install uv
|
|
||||||
- uv sync --frozen
|
|
||||||
# Wait for PostgreSQL to be ready
|
|
||||||
- apt-get update && apt-get install -y postgresql-client
|
|
||||||
- for i in $(seq 1 30); do pg_isready -h postgres -U test_user && break || sleep 1; done
|
|
||||||
script:
|
|
||||||
- .venv/bin/python -m pytest tests/ -v --tb=short
|
|
||||||
coverage: '/TOTAL.*\s+(\d+%)/'
|
|
||||||
artifacts:
|
|
||||||
reports:
|
|
||||||
junit: report.xml
|
|
||||||
coverage_report:
|
|
||||||
coverage_format: cobertura
|
|
||||||
path: coverage.xml
|
|
||||||
rules:
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
|
||||||
|
|
||||||
architecture:
|
|
||||||
stage: test
|
|
||||||
image: python:${PYTHON_VERSION}
|
|
||||||
variables:
|
|
||||||
# Set DATABASE_URL to satisfy validation (not actually used by validator)
|
|
||||||
DATABASE_URL: "postgresql://dummy:dummy@localhost:5432/dummy"
|
|
||||||
before_script:
|
|
||||||
- pip install uv
|
|
||||||
- uv sync --frozen
|
|
||||||
script:
|
|
||||||
- .venv/bin/python scripts/validate_architecture.py
|
|
||||||
rules:
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
|
||||||
|
|
||||||
# Security Stage
|
|
||||||
# --------------
|
|
||||||
|
|
||||||
dependency_scanning:
|
|
||||||
stage: security
|
|
||||||
image: python:${PYTHON_VERSION}
|
|
||||||
before_script:
|
|
||||||
- pip install pip-audit
|
|
||||||
script:
|
|
||||||
- pip-audit --requirement requirements.txt || true
|
|
||||||
allow_failure: true
|
|
||||||
rules:
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
|
||||||
|
|
||||||
audit:
|
|
||||||
stage: security
|
|
||||||
image: python:${PYTHON_VERSION}
|
|
||||||
before_script:
|
|
||||||
- pip install uv
|
|
||||||
- uv sync --frozen
|
|
||||||
script:
|
|
||||||
- .venv/bin/python scripts/validate_audit.py
|
|
||||||
allow_failure: true
|
|
||||||
rules:
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
|
||||||
|
|
||||||
# Build Stage
|
|
||||||
# -----------
|
|
||||||
|
|
||||||
docs:
|
|
||||||
stage: build
|
|
||||||
image: python:${PYTHON_VERSION}
|
|
||||||
before_script:
|
|
||||||
- pip install uv
|
|
||||||
- uv sync --frozen
|
|
||||||
script:
|
|
||||||
- .venv/bin/mkdocs build
|
|
||||||
artifacts:
|
|
||||||
paths:
|
|
||||||
- site/
|
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
|
|
||||||
<!-- Describe how you tested these changes -->
|
<!-- Describe how you tested these changes -->
|
||||||
- [ ] Unit tests pass (`pytest tests/`)
|
- [ ] Unit tests pass (`pytest tests/`)
|
||||||
- [ ] Architecture validation passes (`python scripts/validate_architecture.py`)
|
- [ ] Architecture validation passes (`python scripts/validate/validate_architecture.py`)
|
||||||
- [ ] Manual testing performed
|
- [ ] Manual testing performed
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|||||||
2
.idea/inspectionProfiles/Project_Default.xml
generated
2
.idea/inspectionProfiles/Project_Default.xml
generated
@@ -7,4 +7,4 @@
|
|||||||
</option>
|
</option>
|
||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
</profile>
|
</profile>
|
||||||
</component>
|
</component>
|
||||||
|
|||||||
2
.idea/inspectionProfiles/profiles_settings.xml
generated
2
.idea/inspectionProfiles/profiles_settings.xml
generated
@@ -3,4 +3,4 @@
|
|||||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||||
<version value="1.0" />
|
<version value="1.0" />
|
||||||
</settings>
|
</settings>
|
||||||
</component>
|
</component>
|
||||||
|
|||||||
2
.idea/modules.xml
generated
2
.idea/modules.xml
generated
@@ -5,4 +5,4 @@
|
|||||||
<module fileurl="file://$PROJECT_DIR$/.idea/fastapi-multitenant-ecommerce.iml" filepath="$PROJECT_DIR$/.idea/fastapi-multitenant-ecommerce.iml" />
|
<module fileurl="file://$PROJECT_DIR$/.idea/fastapi-multitenant-ecommerce.iml" filepath="$PROJECT_DIR$/.idea/fastapi-multitenant-ecommerce.iml" />
|
||||||
</modules>
|
</modules>
|
||||||
</component>
|
</component>
|
||||||
</project>
|
</project>
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Performance Rules Configuration
|
# Performance Rules Configuration
|
||||||
# ================================
|
# ================================
|
||||||
# Performance-focused validation rules for the codebase.
|
# Performance-focused validation rules for the codebase.
|
||||||
# Run with: python scripts/validate_performance.py
|
# Run with: python scripts/validate/validate_performance.py
|
||||||
|
|
||||||
version: "1.0"
|
version: "1.0"
|
||||||
project: "letzshop-product-import"
|
project: "letzshop-product-import"
|
||||||
|
|||||||
@@ -4,12 +4,39 @@
|
|||||||
# Run manually: pre-commit run --all-files
|
# Run manually: pre-commit run --all-files
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
# Architecture validation
|
# Code validators (architecture, security, performance, audit)
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: validate-architecture
|
- id: validate-architecture
|
||||||
name: Validate Architecture Patterns
|
name: Validate Architecture Patterns
|
||||||
entry: python scripts/validate_architecture.py
|
entry: python scripts/validate/validate_architecture.py
|
||||||
|
language: python
|
||||||
|
pass_filenames: false
|
||||||
|
always_run: true
|
||||||
|
additional_dependencies: [pyyaml]
|
||||||
|
verbose: true
|
||||||
|
|
||||||
|
- id: validate-security
|
||||||
|
name: Validate Security Patterns
|
||||||
|
entry: python scripts/validate/validate_all.py --security
|
||||||
|
language: python
|
||||||
|
pass_filenames: false
|
||||||
|
always_run: true
|
||||||
|
additional_dependencies: [pyyaml]
|
||||||
|
verbose: true
|
||||||
|
|
||||||
|
- id: validate-performance
|
||||||
|
name: Validate Performance Patterns
|
||||||
|
entry: python scripts/validate/validate_all.py --performance
|
||||||
|
language: python
|
||||||
|
pass_filenames: false
|
||||||
|
always_run: true
|
||||||
|
additional_dependencies: [pyyaml]
|
||||||
|
verbose: true
|
||||||
|
|
||||||
|
- id: validate-audit
|
||||||
|
name: Validate Audit Patterns
|
||||||
|
entry: python scripts/validate/validate_all.py --audit
|
||||||
language: python
|
language: python
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
always_run: true
|
always_run: true
|
||||||
@@ -23,21 +50,16 @@ repos:
|
|||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
|
exclude: mkdocs.yml # Uses Python tags (!!python/name) unsupported by basic YAML checker
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
args: ['--maxkb=1000']
|
args: ['--maxkb=1000']
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: debug-statements
|
- id: debug-statements
|
||||||
|
|
||||||
# Python formatting (optional - uncomment if you want)
|
# Ruff - linting and import sorting (replaces black + isort)
|
||||||
# - repo: https://github.com/psf/black
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# rev: 23.12.1
|
rev: v0.8.4
|
||||||
# hooks:
|
hooks:
|
||||||
# - id: black
|
- id: ruff
|
||||||
# language_version: python3
|
args: [--fix, --exit-non-zero-on-fix]
|
||||||
|
|
||||||
# Python import sorting (optional)
|
|
||||||
# - repo: https://github.com/pycqa/isort
|
|
||||||
# rev: 5.13.2
|
|
||||||
# hooks:
|
|
||||||
# - id: isort
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Security Rules Configuration
|
# Security Rules Configuration
|
||||||
# ============================
|
# ============================
|
||||||
# Security-focused validation rules for the codebase.
|
# Security-focused validation rules for the codebase.
|
||||||
# Run with: python scripts/validate_security.py
|
# Run with: python scripts/validate/validate_security.py
|
||||||
|
|
||||||
version: "1.0"
|
version: "1.0"
|
||||||
project: "letzshop-product-import"
|
project: "letzshop-product-import"
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ return {
|
|||||||
|
|
||||||
### Duplicate /shop/ Prefix
|
### Duplicate /shop/ Prefix
|
||||||
|
|
||||||
**Problem:** Routes like `/stores/wizamart/shop/shop/products/4`
|
**Problem:** Routes like `/stores/orion/shop/shop/products/4`
|
||||||
|
|
||||||
**Root Cause:**
|
**Root Cause:**
|
||||||
```python
|
```python
|
||||||
@@ -136,7 +136,7 @@ All routes in `shop_pages.py` fixed.
|
|||||||
|
|
||||||
### Missing /shop/ in Template Links
|
### Missing /shop/ in Template Links
|
||||||
|
|
||||||
**Problem:** Links went to `/stores/wizamart/products` instead of `/shop/products`
|
**Problem:** Links went to `/stores/orion/products` instead of `/shop/products`
|
||||||
|
|
||||||
**Fix:** Updated all templates:
|
**Fix:** Updated all templates:
|
||||||
- `shop/base.html` - Header, footer, navigation
|
- `shop/base.html` - Header, footer, navigation
|
||||||
@@ -290,15 +290,15 @@ Comprehensive guide covering:
|
|||||||
### Test URLs
|
### Test URLs
|
||||||
```
|
```
|
||||||
Landing Pages:
|
Landing Pages:
|
||||||
- http://localhost:8000/stores/wizamart/
|
- http://localhost:8000/stores/orion/
|
||||||
- http://localhost:8000/stores/fashionhub/
|
- http://localhost:8000/stores/fashionhub/
|
||||||
- http://localhost:8000/stores/bookstore/
|
- http://localhost:8000/stores/bookstore/
|
||||||
|
|
||||||
Shop Pages:
|
Shop Pages:
|
||||||
- http://localhost:8000/stores/wizamart/shop/
|
- http://localhost:8000/stores/orion/shop/
|
||||||
- http://localhost:8000/stores/wizamart/shop/products
|
- http://localhost:8000/stores/orion/shop/products
|
||||||
- http://localhost:8000/stores/wizamart/shop/products/1
|
- http://localhost:8000/stores/orion/shop/products/1
|
||||||
- http://localhost:8000/stores/wizamart/shop/cart
|
- http://localhost:8000/stores/orion/shop/cart
|
||||||
```
|
```
|
||||||
|
|
||||||
## Breaking Changes
|
## Breaking Changes
|
||||||
|
|||||||
152
Makefile
152
Makefile
@@ -1,7 +1,7 @@
|
|||||||
# Wizamart Multi-Tenant E-Commerce Platform Makefile
|
# Orion Multi-Tenant E-Commerce Platform Makefile
|
||||||
# Cross-platform compatible (Windows & Linux)
|
# Cross-platform compatible (Windows & Linux)
|
||||||
|
|
||||||
.PHONY: install install-dev install-docs install-all dev test test-coverage lint format check docker-build docker-up docker-down clean help tailwind-install tailwind-dev tailwind-build tailwind-watch arch-check arch-check-file arch-check-object test-db-up test-db-down test-db-reset test-db-status celery-worker celery-beat celery-dev flower celery-status celery-purge urls
|
.PHONY: install install-dev install-docs install-all dev test test-coverage lint format check docker-build docker-up docker-down clean help tailwind-install tailwind-dev tailwind-build tailwind-watch arch-check arch-check-file arch-check-object test-db-up test-db-down test-db-reset test-db-status celery-worker celery-beat celery-dev flower celery-status celery-purge urls infra-check test-affected test-affected-dry
|
||||||
|
|
||||||
# Detect OS
|
# Detect OS
|
||||||
ifeq ($(OS),Windows_NT)
|
ifeq ($(OS),Windows_NT)
|
||||||
@@ -44,7 +44,7 @@ setup: install-all migrate-up init-prod
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
dev:
|
dev:
|
||||||
$(PYTHON) -m uvicorn main:app --reload --host 0.0.0.0 --port 9999
|
$(PYTHON) -m uvicorn main:app --reload --host 0.0.0.0 --port $(or $(API_PORT),8000)
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# DATABASE MIGRATIONS
|
# DATABASE MIGRATIONS
|
||||||
@@ -101,58 +101,63 @@ migrate-squash:
|
|||||||
init-prod:
|
init-prod:
|
||||||
@echo "🔧 Initializing production database..."
|
@echo "🔧 Initializing production database..."
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Step 0/5: Ensuring database exists (running migrations)..."
|
@echo "Step 0/6: Ensuring database exists (running migrations)..."
|
||||||
@$(PYTHON) -m alembic upgrade head
|
@$(PYTHON) -m alembic upgrade heads
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Step 1/5: Creating admin user and platform settings..."
|
@echo "Step 1/5: Creating admin user and platform settings..."
|
||||||
$(PYTHON) scripts/init_production.py
|
$(PYTHON) scripts/seed/init_production.py
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Step 2/5: Initializing log settings..."
|
@echo "Step 2/5: Initializing log settings..."
|
||||||
$(PYTHON) scripts/init_log_settings.py
|
$(PYTHON) scripts/seed/init_log_settings.py
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Step 3/5: Creating default CMS content pages..."
|
@echo "Step 3/5: Creating default CMS content pages..."
|
||||||
$(PYTHON) scripts/create_default_content_pages.py
|
$(PYTHON) scripts/seed/create_default_content_pages.py
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Step 4/5: Creating platform pages and landing..."
|
@echo "Step 4/5: Seeding email templates..."
|
||||||
$(PYTHON) scripts/create_platform_pages.py
|
$(PYTHON) scripts/seed/seed_email_templates.py
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Step 5/5: Seeding email templates..."
|
@echo "Step 5/5: Seeding subscription tiers..."
|
||||||
$(PYTHON) scripts/seed_email_templates.py
|
@echo " (Handled by init_production.py Step 6)"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "✅ Production initialization completed"
|
@echo "✅ Production initialization completed"
|
||||||
@echo "✨ Platform is ready for production OR development"
|
@echo "✨ Platform is ready for production OR development"
|
||||||
|
|
||||||
|
seed-tiers:
|
||||||
|
@echo "🏷️ Seeding subscription tiers..."
|
||||||
|
$(PYTHON) -c "from scripts.seed.init_production import *; from app.core.database import SessionLocal; from sqlalchemy import select; db = SessionLocal(); oms = db.execute(select(Platform).where(Platform.code == 'oms')).scalar_one_or_none(); create_subscription_tiers(db, oms) if oms else print('OMS platform not found'); db.commit(); db.close()"
|
||||||
|
@echo "✅ Subscription tiers seeded"
|
||||||
|
|
||||||
# First-time installation - Complete setup with configuration validation
|
# First-time installation - Complete setup with configuration validation
|
||||||
platform-install:
|
platform-install:
|
||||||
@echo "🚀 WIZAMART PLATFORM INSTALLATION"
|
@echo "🚀 ORION PLATFORM INSTALLATION"
|
||||||
@echo "=================================="
|
@echo "=================================="
|
||||||
$(PYTHON) scripts/install.py
|
$(PYTHON) scripts/seed/install.py
|
||||||
|
|
||||||
# Demo data seeding - Cross-platform using Python to set environment
|
# Demo data seeding - Cross-platform using Python to set environment
|
||||||
seed-demo:
|
seed-demo:
|
||||||
@echo "🎪 Seeding demo data (normal mode)..."
|
@echo "🎪 Seeding demo data (normal mode)..."
|
||||||
ifeq ($(DETECTED_OS),Windows)
|
ifeq ($(DETECTED_OS),Windows)
|
||||||
@set SEED_MODE=normal&& $(PYTHON) scripts/seed_demo.py
|
@set SEED_MODE=normal&& $(PYTHON) scripts/seed/seed_demo.py
|
||||||
else
|
else
|
||||||
SEED_MODE=normal $(PYTHON) scripts/seed_demo.py
|
SEED_MODE=normal $(PYTHON) scripts/seed/seed_demo.py
|
||||||
endif
|
endif
|
||||||
@echo "✅ Demo seeding completed"
|
@echo "✅ Demo seeding completed"
|
||||||
|
|
||||||
seed-demo-minimal:
|
seed-demo-minimal:
|
||||||
@echo "🎪 Seeding demo data (minimal mode - 1 store only)..."
|
@echo "🎪 Seeding demo data (minimal mode - 1 store only)..."
|
||||||
ifeq ($(DETECTED_OS),Windows)
|
ifeq ($(DETECTED_OS),Windows)
|
||||||
@set SEED_MODE=minimal&& $(PYTHON) scripts/seed_demo.py
|
@set SEED_MODE=minimal&& $(PYTHON) scripts/seed/seed_demo.py
|
||||||
else
|
else
|
||||||
SEED_MODE=minimal $(PYTHON) scripts/seed_demo.py
|
SEED_MODE=minimal $(PYTHON) scripts/seed/seed_demo.py
|
||||||
endif
|
endif
|
||||||
@echo "✅ Minimal demo seeding completed"
|
@echo "✅ Minimal demo seeding completed"
|
||||||
|
|
||||||
seed-demo-reset:
|
seed-demo-reset:
|
||||||
@echo "⚠️ WARNING: This will DELETE ALL existing data!"
|
@echo "⚠️ WARNING: This will DELETE ALL existing data!"
|
||||||
ifeq ($(DETECTED_OS),Windows)
|
ifeq ($(DETECTED_OS),Windows)
|
||||||
@set SEED_MODE=reset&& $(PYTHON) scripts/seed_demo.py
|
@set SEED_MODE=reset&& $(PYTHON) scripts/seed/seed_demo.py
|
||||||
else
|
else
|
||||||
SEED_MODE=reset $(PYTHON) scripts/seed_demo.py
|
SEED_MODE=reset $(PYTHON) scripts/seed/seed_demo.py
|
||||||
endif
|
endif
|
||||||
|
|
||||||
db-setup: migrate-up init-prod seed-demo
|
db-setup: migrate-up init-prod seed-demo
|
||||||
@@ -167,34 +172,36 @@ db-reset:
|
|||||||
@echo "Applying all migrations..."
|
@echo "Applying all migrations..."
|
||||||
$(PYTHON) -m alembic upgrade head
|
$(PYTHON) -m alembic upgrade head
|
||||||
@echo "Initializing production data..."
|
@echo "Initializing production data..."
|
||||||
$(PYTHON) scripts/init_production.py
|
$(PYTHON) scripts/seed/init_production.py
|
||||||
|
@echo "Initializing log settings..."
|
||||||
|
$(PYTHON) scripts/seed/init_log_settings.py
|
||||||
|
@echo "Creating default CMS content pages..."
|
||||||
|
$(PYTHON) scripts/seed/create_default_content_pages.py
|
||||||
|
@echo "Seeding email templates..."
|
||||||
|
$(PYTHON) scripts/seed/seed_email_templates.py
|
||||||
@echo "Seeding demo data..."
|
@echo "Seeding demo data..."
|
||||||
ifeq ($(DETECTED_OS),Windows)
|
ifeq ($(DETECTED_OS),Windows)
|
||||||
@set SEED_MODE=reset&& set FORCE_RESET=true&& $(PYTHON) scripts/seed_demo.py
|
@set SEED_MODE=reset&& set FORCE_RESET=true&& $(PYTHON) scripts/seed/seed_demo.py
|
||||||
else
|
else
|
||||||
SEED_MODE=reset FORCE_RESET=true $(PYTHON) scripts/seed_demo.py
|
SEED_MODE=reset FORCE_RESET=true $(PYTHON) scripts/seed/seed_demo.py
|
||||||
endif
|
endif
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "✅ Database completely reset!"
|
@echo "✅ Database completely reset!"
|
||||||
|
|
||||||
backup-db:
|
backup-db:
|
||||||
@echo "Creating database backup..."
|
@echo "Creating database backup..."
|
||||||
@$(PYTHON) scripts/backup_database.py
|
@$(PYTHON) scripts/seed/backup_database.py
|
||||||
|
|
||||||
# Utility commands (usually not needed - init-prod handles these)
|
# Utility commands (usually not needed - init-prod handles these)
|
||||||
create-cms-defaults:
|
create-cms-defaults:
|
||||||
@echo "📄 Creating default CMS content pages..."
|
@echo "📄 Creating default CMS content pages..."
|
||||||
$(PYTHON) scripts/create_default_content_pages.py
|
$(PYTHON) scripts/seed/create_default_content_pages.py
|
||||||
@echo "✅ CMS defaults created"
|
@echo "✅ CMS defaults created"
|
||||||
|
|
||||||
create-platform-pages:
|
|
||||||
@echo "🏠 Creating platform pages and landing..."
|
|
||||||
$(PYTHON) scripts/create_platform_pages.py
|
|
||||||
@echo "✅ Platform pages created"
|
|
||||||
|
|
||||||
init-logging:
|
init-logging:
|
||||||
@echo "📝 Initializing log settings..."
|
@echo "📝 Initializing log settings..."
|
||||||
$(PYTHON) scripts/init_log_settings.py
|
$(PYTHON) scripts/seed/init_log_settings.py
|
||||||
@echo "✅ Log settings initialized"
|
@echo "✅ Log settings initialized"
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -227,7 +234,7 @@ test-db-status:
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
# Test database URL
|
# Test database URL
|
||||||
TEST_DB_URL := postgresql://test_user:test_password@localhost:5433/wizamart_test
|
TEST_DB_URL := postgresql://test_user:test_password@localhost:5433/orion_test
|
||||||
|
|
||||||
# Build pytest marker expression from module= and frontend= params
|
# Build pytest marker expression from module= and frontend= params
|
||||||
MARKER_EXPR :=
|
MARKER_EXPR :=
|
||||||
@@ -242,24 +249,21 @@ ifdef frontend
|
|||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# All testpaths
|
|
||||||
TEST_PATHS := tests/
|
|
||||||
|
|
||||||
test:
|
test:
|
||||||
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
||||||
@sleep 2
|
@sleep 2
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) -v $(MARKER_EXPR)
|
$(PYTHON) -m pytest -v $(MARKER_EXPR)
|
||||||
|
|
||||||
test-unit:
|
test-unit:
|
||||||
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
||||||
@sleep 2
|
@sleep 2
|
||||||
ifdef module
|
ifdef module
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) -v -m "unit and $(module)"
|
$(PYTHON) -m pytest -v -m "unit and $(module)"
|
||||||
else
|
else
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) -v -m unit
|
$(PYTHON) -m pytest -v -m unit
|
||||||
endif
|
endif
|
||||||
|
|
||||||
test-integration:
|
test-integration:
|
||||||
@@ -267,29 +271,38 @@ test-integration:
|
|||||||
@sleep 2
|
@sleep 2
|
||||||
ifdef module
|
ifdef module
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) -v -m "integration and $(module)"
|
$(PYTHON) -m pytest -v -m "integration and $(module)"
|
||||||
else
|
else
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) -v -m integration
|
$(PYTHON) -m pytest -v -m integration
|
||||||
endif
|
endif
|
||||||
|
|
||||||
test-coverage:
|
test-coverage:
|
||||||
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
||||||
@sleep 2
|
@sleep 2
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) --cov=app --cov=models --cov=utils --cov=middleware --cov-report=html --cov-report=term-missing $(MARKER_EXPR)
|
$(PYTHON) -m pytest --cov=app --cov=models --cov=utils --cov=middleware --cov-report=html --cov-report=term-missing $(MARKER_EXPR)
|
||||||
|
|
||||||
|
test-affected:
|
||||||
|
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
||||||
|
@sleep 2
|
||||||
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
|
$(PYTHON) scripts/tests/run_affected_tests.py $(AFFECTED_ARGS)
|
||||||
|
|
||||||
|
test-affected-dry:
|
||||||
|
@$(PYTHON) scripts/tests/run_affected_tests.py --dry-run $(AFFECTED_ARGS)
|
||||||
|
|
||||||
test-fast:
|
test-fast:
|
||||||
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
||||||
@sleep 2
|
@sleep 2
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) -v -m "not slow" $(MARKER_EXPR)
|
$(PYTHON) -m pytest -v -m "not slow" $(MARKER_EXPR)
|
||||||
|
|
||||||
test-slow:
|
test-slow:
|
||||||
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
@docker compose -f docker-compose.test.yml up -d 2>/dev/null || true
|
||||||
@sleep 2
|
@sleep 2
|
||||||
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
TEST_DATABASE_URL="$(TEST_DB_URL)" \
|
||||||
$(PYTHON) -m pytest $(TEST_PATHS) -v -m slow
|
$(PYTHON) -m pytest -v -m slow
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# CODE QUALITY
|
# CODE QUALITY
|
||||||
@@ -317,31 +330,31 @@ ci: lint-strict verify-imports test-coverage
|
|||||||
|
|
||||||
verify-imports:
|
verify-imports:
|
||||||
@echo "Verifying critical imports..."
|
@echo "Verifying critical imports..."
|
||||||
$(PYTHON) scripts/verify_critical_imports.py
|
$(PYTHON) scripts/validate/verify_critical_imports.py
|
||||||
|
|
||||||
arch-check:
|
arch-check:
|
||||||
@echo "Running architecture validation..."
|
@echo "Running architecture validation..."
|
||||||
$(PYTHON) scripts/validate_architecture.py
|
$(PYTHON) scripts/validate/validate_architecture.py
|
||||||
|
|
||||||
arch-check-file:
|
arch-check-file:
|
||||||
ifeq ($(DETECTED_OS),Windows)
|
ifeq ($(DETECTED_OS),Windows)
|
||||||
@if "$(file)"=="" (echo Error: Please provide a file. Usage: make arch-check-file file="path/to/file.py") else ($(PYTHON) scripts/validate_architecture.py -f "$(file)")
|
@if "$(file)"=="" (echo Error: Please provide a file. Usage: make arch-check-file file="path/to/file.py") else ($(PYTHON) scripts/validate/validate_architecture.py -f "$(file)")
|
||||||
else
|
else
|
||||||
@if [ -z "$(file)" ]; then \
|
@if [ -z "$(file)" ]; then \
|
||||||
echo "Error: Please provide a file. Usage: make arch-check-file file=\"path/to/file.py\""; \
|
echo "Error: Please provide a file. Usage: make arch-check-file file=\"path/to/file.py\""; \
|
||||||
else \
|
else \
|
||||||
$(PYTHON) scripts/validate_architecture.py -f "$(file)"; \
|
$(PYTHON) scripts/validate/validate_architecture.py -f "$(file)"; \
|
||||||
fi
|
fi
|
||||||
endif
|
endif
|
||||||
|
|
||||||
arch-check-object:
|
arch-check-object:
|
||||||
ifeq ($(DETECTED_OS),Windows)
|
ifeq ($(DETECTED_OS),Windows)
|
||||||
@if "$(name)"=="" (echo Error: Please provide an object name. Usage: make arch-check-object name="merchant") else ($(PYTHON) scripts/validate_architecture.py -o "$(name)")
|
@if "$(name)"=="" (echo Error: Please provide an object name. Usage: make arch-check-object name="merchant") else ($(PYTHON) scripts/validate/validate_architecture.py -o "$(name)")
|
||||||
else
|
else
|
||||||
@if [ -z "$(name)" ]; then \
|
@if [ -z "$(name)" ]; then \
|
||||||
echo "Error: Please provide an object name. Usage: make arch-check-object name=\"merchant\""; \
|
echo "Error: Please provide an object name. Usage: make arch-check-object name=\"merchant\""; \
|
||||||
else \
|
else \
|
||||||
$(PYTHON) scripts/validate_architecture.py -o "$(name)"; \
|
$(PYTHON) scripts/validate/validate_architecture.py -o "$(name)"; \
|
||||||
fi
|
fi
|
||||||
endif
|
endif
|
||||||
|
|
||||||
@@ -391,26 +404,22 @@ tailwind-install:
|
|||||||
@mv tailwindcss-linux-x64 $(TAILWIND_CLI)
|
@mv tailwindcss-linux-x64 $(TAILWIND_CLI)
|
||||||
@echo "Tailwind CLI installed: $$($(TAILWIND_CLI) --help | head -1)"
|
@echo "Tailwind CLI installed: $$($(TAILWIND_CLI) --help | head -1)"
|
||||||
|
|
||||||
|
# All frontends that have a Tailwind build (static/<name>/css/tailwind.css)
|
||||||
|
TAILWIND_FRONTENDS := admin store storefront platform merchant
|
||||||
|
|
||||||
tailwind-dev:
|
tailwind-dev:
|
||||||
@echo "Building Tailwind CSS (development)..."
|
@echo "Building Tailwind CSS (development)..."
|
||||||
$(TAILWIND_CLI) -i static/admin/css/tailwind.css -o static/admin/css/tailwind.output.css
|
@$(foreach fe,$(TAILWIND_FRONTENDS),$(TAILWIND_CLI) -i static/$(fe)/css/tailwind.css -o static/$(fe)/css/tailwind.output.css &&) true
|
||||||
$(TAILWIND_CLI) -i static/store/css/tailwind.css -o static/store/css/tailwind.output.css
|
@echo "Tailwind CSS built ($(TAILWIND_FRONTENDS))"
|
||||||
$(TAILWIND_CLI) -i static/shop/css/tailwind.css -o static/shop/css/tailwind.output.css
|
|
||||||
$(TAILWIND_CLI) -i static/platform/css/tailwind.css -o static/platform/css/tailwind.output.css
|
|
||||||
@echo "Tailwind CSS built (admin + store + shop + platform)"
|
|
||||||
|
|
||||||
tailwind-build:
|
tailwind-build:
|
||||||
@echo "Building Tailwind CSS (production - minified)..."
|
@echo "Building Tailwind CSS (production - minified)..."
|
||||||
$(TAILWIND_CLI) -i static/admin/css/tailwind.css -o static/admin/css/tailwind.output.css --minify
|
@$(foreach fe,$(TAILWIND_FRONTENDS),$(TAILWIND_CLI) -i static/$(fe)/css/tailwind.css -o static/$(fe)/css/tailwind.output.css --minify &&) true
|
||||||
$(TAILWIND_CLI) -i static/store/css/tailwind.css -o static/store/css/tailwind.output.css --minify
|
|
||||||
$(TAILWIND_CLI) -i static/shop/css/tailwind.css -o static/shop/css/tailwind.output.css --minify
|
|
||||||
$(TAILWIND_CLI) -i static/platform/css/tailwind.css -o static/platform/css/tailwind.output.css --minify
|
|
||||||
@echo "Tailwind CSS built and minified for production"
|
@echo "Tailwind CSS built and minified for production"
|
||||||
|
|
||||||
tailwind-watch:
|
tailwind-watch:
|
||||||
@echo "Watching Tailwind CSS for changes..."
|
@echo "Watching Tailwind CSS for changes ($(fe))..."
|
||||||
@echo "Note: This watches admin CSS only. Run in separate terminal."
|
$(TAILWIND_CLI) -i static/$(fe)/css/tailwind.css -o static/$(fe)/css/tailwind.output.css --watch
|
||||||
$(TAILWIND_CLI) -i static/admin/css/tailwind.css -o static/admin/css/tailwind.output.css --watch
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# CELERY / TASK QUEUE
|
# CELERY / TASK QUEUE
|
||||||
@@ -497,6 +506,13 @@ urls-dev:
|
|||||||
urls-prod:
|
urls-prod:
|
||||||
@$(PYTHON) scripts/show_urls.py --prod
|
@$(PYTHON) scripts/show_urls.py --prod
|
||||||
|
|
||||||
|
urls-check:
|
||||||
|
@$(PYTHON) scripts/show_urls.py --check
|
||||||
|
|
||||||
|
infra-check:
|
||||||
|
@echo "Running infrastructure verification..."
|
||||||
|
bash scripts/verify-server.sh
|
||||||
|
|
||||||
check-env:
|
check-env:
|
||||||
@echo "Checking Python environment..."
|
@echo "Checking Python environment..."
|
||||||
@echo "Detected OS: $(DETECTED_OS)"
|
@echo "Detected OS: $(DETECTED_OS)"
|
||||||
@@ -523,7 +539,7 @@ endif
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
help:
|
help:
|
||||||
@echo "Wizamart Platform Development Commands"
|
@echo "Orion Platform Development Commands"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "=== SETUP ==="
|
@echo "=== SETUP ==="
|
||||||
@echo " install - Install production dependencies"
|
@echo " install - Install production dependencies"
|
||||||
@@ -541,7 +557,8 @@ help:
|
|||||||
@echo " migrate-down - Rollback last migration"
|
@echo " migrate-down - Rollback last migration"
|
||||||
@echo " migrate-status - Show migration status"
|
@echo " migrate-status - Show migration status"
|
||||||
@echo " platform-install - First-time setup (validates config + migrate + init)"
|
@echo " platform-install - First-time setup (validates config + migrate + init)"
|
||||||
@echo " init-prod - Initialize platform (admin, CMS, pages, emails)"
|
@echo " init-prod - Initialize platform (admin, CMS, pages, emails, tiers)"
|
||||||
|
@echo " seed-tiers - Seed subscription tiers only"
|
||||||
@echo " seed-demo - Seed demo data (3 merchants + stores)"
|
@echo " seed-demo - Seed demo data (3 merchants + stores)"
|
||||||
@echo " seed-demo-minimal - Seed minimal demo (1 merchant + store)"
|
@echo " seed-demo-minimal - Seed minimal demo (1 merchant + store)"
|
||||||
@echo " seed-demo-reset - DELETE ALL demo data and reseed"
|
@echo " seed-demo-reset - DELETE ALL demo data and reseed"
|
||||||
@@ -558,6 +575,8 @@ help:
|
|||||||
@echo " test-unit module=X - Run unit tests for module X"
|
@echo " test-unit module=X - Run unit tests for module X"
|
||||||
@echo " test-integration - Run integration tests only"
|
@echo " test-integration - Run integration tests only"
|
||||||
@echo " test-coverage - Run tests with coverage"
|
@echo " test-coverage - Run tests with coverage"
|
||||||
|
@echo " test-affected - Run tests for modules affected by changes"
|
||||||
|
@echo " test-affected-dry - Show affected modules without running tests"
|
||||||
@echo " test-fast - Run fast tests only"
|
@echo " test-fast - Run fast tests only"
|
||||||
@echo " test frontend=storefront - Run storefront tests"
|
@echo " test frontend=storefront - Run storefront tests"
|
||||||
@echo ""
|
@echo ""
|
||||||
@@ -581,7 +600,7 @@ help:
|
|||||||
@echo " tailwind-install - Install Tailwind standalone CLI"
|
@echo " tailwind-install - Install Tailwind standalone CLI"
|
||||||
@echo " tailwind-dev - Build Tailwind CSS (development)"
|
@echo " tailwind-dev - Build Tailwind CSS (development)"
|
||||||
@echo " tailwind-build - Build Tailwind CSS (production, minified)"
|
@echo " tailwind-build - Build Tailwind CSS (production, minified)"
|
||||||
@echo " tailwind-watch - Watch and rebuild on changes"
|
@echo " tailwind-watch fe=X - Watch and rebuild on changes (specify frontend)"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "=== CELERY / TASK QUEUE ==="
|
@echo "=== CELERY / TASK QUEUE ==="
|
||||||
@echo " celery-worker - Start Celery worker"
|
@echo " celery-worker - Start Celery worker"
|
||||||
@@ -600,6 +619,8 @@ help:
|
|||||||
@echo " urls - Show all platform/store/storefront URLs"
|
@echo " urls - Show all platform/store/storefront URLs"
|
||||||
@echo " urls-dev - Show development URLs only"
|
@echo " urls-dev - Show development URLs only"
|
||||||
@echo " urls-prod - Show production URLs only"
|
@echo " urls-prod - Show production URLs only"
|
||||||
|
@echo " urls-check - Check dev URLs with curl (server must be running)"
|
||||||
|
@echo " infra-check - Run infrastructure verification (verify-server.sh)"
|
||||||
@echo " clean - Clean build artifacts"
|
@echo " clean - Clean build artifacts"
|
||||||
@echo " check-env - Check Python environment and OS"
|
@echo " check-env - Check Python environment and OS"
|
||||||
@echo ""
|
@echo ""
|
||||||
@@ -631,12 +652,13 @@ help-db:
|
|||||||
@echo ""
|
@echo ""
|
||||||
@echo "PLATFORM INITIALIZATION (Production + Development):"
|
@echo "PLATFORM INITIALIZATION (Production + Development):"
|
||||||
@echo "──────────────────────────────────────────────────────────"
|
@echo "──────────────────────────────────────────────────────────"
|
||||||
@echo " init-prod - Complete platform setup (5 steps):"
|
@echo " init-prod - Complete platform setup (6 steps):"
|
||||||
@echo " 1. Create admin user + settings"
|
@echo " 1. Create admin user + settings"
|
||||||
@echo " 2. Initialize log settings"
|
@echo " 2. Initialize log settings"
|
||||||
@echo " 3. Create CMS defaults"
|
@echo " 3. Create CMS defaults"
|
||||||
@echo " 4. Create platform pages"
|
@echo " 4. Create platform pages"
|
||||||
@echo " 5. Seed email templates"
|
@echo " 5. Seed email templates"
|
||||||
|
@echo " 6. Seed subscription tiers"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "DEMO DATA (Development Only - NEVER in production):"
|
@echo "DEMO DATA (Development Only - NEVER in production):"
|
||||||
@echo "──────────────────────────────────────────────────────────"
|
@echo "──────────────────────────────────────────────────────────"
|
||||||
@@ -671,4 +693,4 @@ help-db:
|
|||||||
@echo " - Email provider settings (SMTP/SendGrid/Mailgun/SES)"
|
@echo " - Email provider settings (SMTP/SendGrid/Mailgun/SES)"
|
||||||
@echo " - ADMIN_PASSWORD (strong password)"
|
@echo " - ADMIN_PASSWORD (strong password)"
|
||||||
@echo " 2. make platform-install # Validates + initializes"
|
@echo " 2. make platform-install # Validates + initializes"
|
||||||
@echo " 3. DO NOT run seed-demo in production!"
|
@echo " 3. DO NOT run seed-demo in production!"
|
||||||
|
|||||||
12
README.md
12
README.md
@@ -34,7 +34,7 @@ This FastAPI application provides a complete ecommerce backend solution designed
|
|||||||
### Project Structure
|
### Project Structure
|
||||||
|
|
||||||
```
|
```
|
||||||
wizamart/
|
orion/
|
||||||
├── main.py # FastAPI application entry point
|
├── main.py # FastAPI application entry point
|
||||||
├── app/
|
├── app/
|
||||||
│ ├── core/
|
│ ├── core/
|
||||||
@@ -179,8 +179,8 @@ make qa
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Clone the repository
|
# Clone the repository
|
||||||
git clone <wizamart-repo>
|
git clone <orion-repo>
|
||||||
cd wizamart-repo
|
cd orion-repo
|
||||||
|
|
||||||
# Create virtual environment
|
# Create virtual environment
|
||||||
python -m venv venv
|
python -m venv venv
|
||||||
@@ -447,7 +447,7 @@ PROD002,"Super Gadget","A fantastic gadget",19.99,EUR,GadgetInc,9876543210987,Am
|
|||||||
- `POST /api/v1/marketplace/import-product` - Start CSV import
|
- `POST /api/v1/marketplace/import-product` - Start CSV import
|
||||||
- `GET /api/v1/marketplace/import-status/{job_id}` - Check import status
|
- `GET /api/v1/marketplace/import-status/{job_id}` - Check import status
|
||||||
- `GET /api/v1/marketplace/import-jobs` - List import jobs
|
- `GET /api/v1/marketplace/import-jobs` - List import jobs
|
||||||
-
|
-
|
||||||
### Inventory Endpoints
|
### Inventory Endpoints
|
||||||
- `POST /api/v1/inventory` - Set inventory quantity
|
- `POST /api/v1/inventory` - Set inventory quantity
|
||||||
- `POST /api/v1/inventory/add` - Add to inventory
|
- `POST /api/v1/inventory/add` - Add to inventory
|
||||||
@@ -700,7 +700,7 @@ make help
|
|||||||
|
|
||||||
This will display all available commands organized by category:
|
This will display all available commands organized by category:
|
||||||
- **Setup**: Installation and environment setup
|
- **Setup**: Installation and environment setup
|
||||||
- **Development**: Development servers and workflows
|
- **Development**: Development servers and workflows
|
||||||
- **Documentation**: Documentation building and deployment
|
- **Documentation**: Documentation building and deployment
|
||||||
- **Testing**: Various test execution options
|
- **Testing**: Various test execution options
|
||||||
- **Code Quality**: Formatting, linting, and quality checks
|
- **Code Quality**: Formatting, linting, and quality checks
|
||||||
@@ -734,4 +734,4 @@ This will display all available commands organized by category:
|
|||||||
- **Health Check**: http://localhost:8000/health
|
- **Health Check**: http://localhost:8000/health
|
||||||
- **Version Info**: http://localhost:8000/
|
- **Version Info**: http://localhost:8000/
|
||||||
|
|
||||||
For issues and feature requests, please create an issue in the repository.
|
For issues and feature requests, please create an issue in the repository.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
If you discover a security vulnerability in this project, please report it responsibly:
|
If you discover a security vulnerability in this project, please report it responsibly:
|
||||||
|
|
||||||
1. **Do not** open a public issue
|
1. **Do not** open a public issue
|
||||||
2. Email the security team at: security@wizamart.com
|
2. Email the security team at: security@orion.lu
|
||||||
3. Include:
|
3. Include:
|
||||||
- Description of the vulnerability
|
- Description of the vulnerability
|
||||||
- Steps to reproduce
|
- Steps to reproduce
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Terminology Guide
|
# Terminology Guide
|
||||||
|
|
||||||
This document defines the standard terminology used throughout the Wizamart codebase.
|
This document defines the standard terminology used throughout the Orion codebase.
|
||||||
|
|
||||||
## Core Multi-Tenant Entities
|
## Core Multi-Tenant Entities
|
||||||
|
|
||||||
|
|||||||
@@ -6,12 +6,12 @@ Landing pages have been created for three stores with different templates.
|
|||||||
|
|
||||||
## 📍 Test URLs
|
## 📍 Test URLs
|
||||||
|
|
||||||
### 1. WizaMart - Modern Template
|
### 1. Orion - Modern Template
|
||||||
**Landing Page:**
|
**Landing Page:**
|
||||||
- http://localhost:8000/stores/wizamart/
|
- http://localhost:8000/stores/orion/
|
||||||
|
|
||||||
**Shop Page:**
|
**Shop Page:**
|
||||||
- http://localhost:8000/stores/wizamart/shop/
|
- http://localhost:8000/stores/orion/shop/
|
||||||
|
|
||||||
**What to expect:**
|
**What to expect:**
|
||||||
- Full-screen hero section with animations
|
- Full-screen hero section with animations
|
||||||
@@ -93,8 +93,8 @@ db.close()
|
|||||||
"
|
"
|
||||||
```
|
```
|
||||||
|
|
||||||
Then visit: http://localhost:8000/stores/wizamart/
|
Then visit: http://localhost:8000/stores/orion/
|
||||||
- Should automatically redirect to: http://localhost:8000/stores/wizamart/shop/
|
- Should automatically redirect to: http://localhost:8000/stores/orion/shop/
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -111,17 +111,17 @@ Or programmatically:
|
|||||||
```python
|
```python
|
||||||
from scripts.create_landing_page import create_landing_page
|
from scripts.create_landing_page import create_landing_page
|
||||||
|
|
||||||
# Change WizaMart to default template
|
# Change Orion to default template
|
||||||
create_landing_page('wizamart', template='default')
|
create_landing_page('orion', template='default')
|
||||||
|
|
||||||
# Change to minimal
|
# Change to minimal
|
||||||
create_landing_page('wizamart', template='minimal')
|
create_landing_page('orion', template='minimal')
|
||||||
|
|
||||||
# Change to full
|
# Change to full
|
||||||
create_landing_page('wizamart', template='full')
|
create_landing_page('orion', template='full')
|
||||||
|
|
||||||
# Change back to modern
|
# Change back to modern
|
||||||
create_landing_page('wizamart', template='modern')
|
create_landing_page('orion', template='modern')
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -130,7 +130,7 @@ create_landing_page('wizamart', template='modern')
|
|||||||
|
|
||||||
| Store | Subdomain | Template | Landing Page URL |
|
| Store | Subdomain | Template | Landing Page URL |
|
||||||
|--------|-----------|----------|------------------|
|
|--------|-----------|----------|------------------|
|
||||||
| WizaMart | wizamart | **modern** | http://localhost:8000/stores/wizamart/ |
|
| Orion | orion | **modern** | http://localhost:8000/stores/orion/ |
|
||||||
| Fashion Hub | fashionhub | **minimal** | http://localhost:8000/stores/fashionhub/ |
|
| Fashion Hub | fashionhub | **minimal** | http://localhost:8000/stores/fashionhub/ |
|
||||||
| The Book Store | bookstore | **full** | http://localhost:8000/stores/bookstore/ |
|
| The Book Store | bookstore | **full** | http://localhost:8000/stores/bookstore/ |
|
||||||
|
|
||||||
@@ -146,7 +146,7 @@ sqlite3 letzshop.db "SELECT id, store_id, slug, title, template, is_published FR
|
|||||||
|
|
||||||
Expected output:
|
Expected output:
|
||||||
```
|
```
|
||||||
8|1|landing|Welcome to WizaMart|modern|1
|
8|1|landing|Welcome to Orion|modern|1
|
||||||
9|2|landing|Fashion Hub - Style & Elegance|minimal|1
|
9|2|landing|Fashion Hub - Style & Elegance|minimal|1
|
||||||
10|3|landing|The Book Store - Your Literary Haven|full|1
|
10|3|landing|The Book Store - Your Literary Haven|full|1
|
||||||
```
|
```
|
||||||
@@ -180,7 +180,7 @@ Expected output:
|
|||||||
|
|
||||||
## ✅ Success Checklist
|
## ✅ Success Checklist
|
||||||
|
|
||||||
- [ ] WizaMart landing page loads (modern template)
|
- [ ] Orion landing page loads (modern template)
|
||||||
- [ ] Fashion Hub landing page loads (minimal template)
|
- [ ] Fashion Hub landing page loads (minimal template)
|
||||||
- [ ] Book Store landing page loads (full template)
|
- [ ] Book Store landing page loads (full template)
|
||||||
- [ ] "Shop Now" buttons work correctly
|
- [ ] "Shop Now" buttons work correctly
|
||||||
|
|||||||
@@ -120,4 +120,4 @@
|
|||||||
won't be supported unlike CMS pages where he can create pretty much anything - btw let s make a note that number of pages should be defined in tiers)
|
won't be supported unlike CMS pages where he can create pretty much anything - btw let s make a note that number of pages should be defined in tiers)
|
||||||
3/ custom domain setup: admin should be contacted to setup. same for SSL. custom emails. (this should be readonly for now)
|
3/ custom domain setup: admin should be contacted to setup. same for SSL. custom emails. (this should be readonly for now)
|
||||||
4/ API keys: stripe keys should be there
|
4/ API keys: stripe keys should be there
|
||||||
5/ sections in settings page are not displayed properly: general , localization etc take 2/3 of the screen size
|
5/ sections in settings page are not displayed properly: general , localization etc take 2/3 of the screen size
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
script_location = alembic
|
script_location = alembic
|
||||||
prepend_sys_path = .
|
prepend_sys_path = .
|
||||||
version_path_separator = space
|
version_path_separator = space
|
||||||
version_locations = alembic/versions app/modules/billing/migrations/versions app/modules/cart/migrations/versions app/modules/catalog/migrations/versions app/modules/cms/migrations/versions app/modules/customers/migrations/versions app/modules/dev_tools/migrations/versions app/modules/inventory/migrations/versions app/modules/loyalty/migrations/versions app/modules/marketplace/migrations/versions app/modules/messaging/migrations/versions app/modules/orders/migrations/versions
|
version_locations = alembic/versions app/modules/billing/migrations/versions app/modules/cart/migrations/versions app/modules/catalog/migrations/versions app/modules/cms/migrations/versions app/modules/customers/migrations/versions app/modules/dev_tools/migrations/versions app/modules/hosting/migrations/versions app/modules/inventory/migrations/versions app/modules/loyalty/migrations/versions app/modules/marketplace/migrations/versions app/modules/messaging/migrations/versions app/modules/orders/migrations/versions app/modules/prospecting/migrations/versions app/modules/tenancy/migrations/versions
|
||||||
# This will be overridden by alembic\env.py using settings.database_url
|
# This will be overridden by alembic\env.py using settings.database_url
|
||||||
sqlalchemy.url =
|
sqlalchemy.url =
|
||||||
# for PROD: sqlalchemy.url = postgresql://username:password@localhost:5432/ecommerce_db
|
# for PROD: sqlalchemy.url = postgresql://username:password@localhost:5432/ecommerce_db
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
Generic single-database configuration.
|
Generic single-database configuration.
|
||||||
|
|||||||
@@ -81,7 +81,6 @@ try:
|
|||||||
from app.modules.billing.models import ( # noqa: F401
|
from app.modules.billing.models import ( # noqa: F401
|
||||||
AddOnProduct,
|
AddOnProduct,
|
||||||
BillingHistory,
|
BillingHistory,
|
||||||
CapacitySnapshot,
|
|
||||||
MerchantFeatureOverride,
|
MerchantFeatureOverride,
|
||||||
MerchantSubscription,
|
MerchantSubscription,
|
||||||
StoreAddOn,
|
StoreAddOn,
|
||||||
@@ -90,7 +89,7 @@ try:
|
|||||||
TierFeatureLimit,
|
TierFeatureLimit,
|
||||||
)
|
)
|
||||||
|
|
||||||
print(" ✓ Billing models (9)")
|
print(" ✓ Billing models (8)")
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
_import_errors.append(f"billing: {e}")
|
_import_errors.append(f"billing: {e}")
|
||||||
print(f" ✗ Billing models failed: {e}")
|
print(f" ✗ Billing models failed: {e}")
|
||||||
@@ -263,6 +262,19 @@ except ImportError as e:
|
|||||||
_import_errors.append(f"dev_tools: {e}")
|
_import_errors.append(f"dev_tools: {e}")
|
||||||
print(f" ✗ Dev Tools models failed: {e}")
|
print(f" ✗ Dev Tools models failed: {e}")
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# MONITORING MODULE (1 model)
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
try:
|
||||||
|
from app.modules.monitoring.models import ( # noqa: F401
|
||||||
|
CapacitySnapshot,
|
||||||
|
)
|
||||||
|
|
||||||
|
print(" ✓ Monitoring models (1)")
|
||||||
|
except ImportError as e:
|
||||||
|
_import_errors.append(f"monitoring: {e}")
|
||||||
|
print(f" ✗ Monitoring models failed: {e}")
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# SUMMARY
|
# SUMMARY
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|||||||
26
alembic/versions/a44f4956cfb1_merge_heads.py
Normal file
26
alembic/versions/a44f4956cfb1_merge_heads.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
"""merge heads
|
||||||
|
|
||||||
|
Revision ID: a44f4956cfb1
|
||||||
|
Revises: z_store_domain_platform_id, tenancy_001
|
||||||
|
Create Date: 2026-02-17 16:10:36.287976
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'a44f4956cfb1'
|
||||||
|
down_revision: Union[str, None] = ('z_store_domain_platform_id', 'tenancy_001')
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
pass
|
||||||
@@ -19,9 +19,9 @@ def upgrade() -> None:
|
|||||||
"platforms",
|
"platforms",
|
||||||
sa.Column("id", sa.Integer(), primary_key=True, index=True),
|
sa.Column("id", sa.Integer(), primary_key=True, index=True),
|
||||||
sa.Column("code", sa.String(50), unique=True, nullable=False, index=True, comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')"),
|
sa.Column("code", sa.String(50), unique=True, nullable=False, index=True, comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')"),
|
||||||
sa.Column("name", sa.String(100), nullable=False, comment="Display name (e.g., 'Wizamart OMS')"),
|
sa.Column("name", sa.String(100), nullable=False, comment="Display name (e.g., 'Orion OMS')"),
|
||||||
sa.Column("description", sa.Text(), nullable=True, comment="Platform description for admin/marketing purposes"),
|
sa.Column("description", sa.Text(), nullable=True, comment="Platform description for admin/marketing purposes"),
|
||||||
sa.Column("domain", sa.String(255), unique=True, nullable=True, index=True, comment="Production domain (e.g., 'oms.lu', 'loyalty.lu')"),
|
sa.Column("domain", sa.String(255), unique=True, nullable=True, index=True, comment="Production domain (e.g., 'omsflow.lu', 'rewardflow.lu')"),
|
||||||
sa.Column("path_prefix", sa.String(50), unique=True, nullable=True, index=True, comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)"),
|
sa.Column("path_prefix", sa.String(50), unique=True, nullable=True, index=True, comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)"),
|
||||||
sa.Column("logo", sa.String(500), nullable=True, comment="Logo URL for light mode"),
|
sa.Column("logo", sa.String(500), nullable=True, comment="Logo URL for light mode"),
|
||||||
sa.Column("logo_dark", sa.String(500), nullable=True, comment="Logo URL for dark mode"),
|
sa.Column("logo_dark", sa.String(500), nullable=True, comment="Logo URL for dark mode"),
|
||||||
|
|||||||
35
alembic/versions/remove_store_platform_is_primary.py
Normal file
35
alembic/versions/remove_store_platform_is_primary.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
"""Remove is_primary from store_platforms
|
||||||
|
|
||||||
|
The platform is always deterministic from the URL context (path in dev,
|
||||||
|
subdomain/domain in prod) and the JWT carries token_platform_id.
|
||||||
|
The is_primary column was a fallback picker that silently returned the
|
||||||
|
wrong platform for multi-platform stores.
|
||||||
|
|
||||||
|
Revision ID: remove_is_primary_001
|
||||||
|
Revises: billing_001
|
||||||
|
Create Date: 2026-03-09
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
revision = "remove_is_primary_001"
|
||||||
|
down_revision = "billing_001"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.drop_index("idx_store_platform_primary", table_name="store_platforms")
|
||||||
|
op.drop_column("store_platforms", "is_primary")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.add_column(
|
||||||
|
"store_platforms",
|
||||||
|
sa.Column("is_primary", sa.Boolean(), nullable=False, server_default="false"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_store_platform_primary", "store_platforms", ["store_id", "is_primary"]
|
||||||
|
)
|
||||||
118
alembic/versions/softdelete_001_add_soft_delete.py
Normal file
118
alembic/versions/softdelete_001_add_soft_delete.py
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
"""Add soft delete columns (deleted_at, deleted_by_id) to business-critical tables.
|
||||||
|
|
||||||
|
Also converts unique constraints on users.email, users.username,
|
||||||
|
stores.store_code, stores.subdomain to partial unique indexes
|
||||||
|
that only apply to non-deleted rows.
|
||||||
|
|
||||||
|
Revision ID: softdelete_001
|
||||||
|
Revises: remove_is_primary_001, customers_002, dev_tools_002, orders_002, tenancy_004
|
||||||
|
Create Date: 2026-03-28
|
||||||
|
"""
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
revision = "softdelete_001"
|
||||||
|
down_revision = (
|
||||||
|
"remove_is_primary_001",
|
||||||
|
"customers_002",
|
||||||
|
"dev_tools_002",
|
||||||
|
"orders_002",
|
||||||
|
"tenancy_004",
|
||||||
|
)
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
# Tables receiving soft-delete columns
|
||||||
|
SOFT_DELETE_TABLES = [
|
||||||
|
"users",
|
||||||
|
"merchants",
|
||||||
|
"stores",
|
||||||
|
"customers",
|
||||||
|
"store_users",
|
||||||
|
"orders",
|
||||||
|
"products",
|
||||||
|
"loyalty_programs",
|
||||||
|
"loyalty_cards",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ======================================================================
|
||||||
|
# Step 1: Add deleted_at and deleted_by_id to all soft-delete tables
|
||||||
|
# ======================================================================
|
||||||
|
for table in SOFT_DELETE_TABLES:
|
||||||
|
op.add_column(table, sa.Column("deleted_at", sa.DateTime(), nullable=True))
|
||||||
|
op.add_column(
|
||||||
|
table,
|
||||||
|
sa.Column(
|
||||||
|
"deleted_by_id",
|
||||||
|
sa.Integer(),
|
||||||
|
sa.ForeignKey("users.id", ondelete="SET NULL"),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index(f"ix_{table}_deleted_at", table, ["deleted_at"])
|
||||||
|
|
||||||
|
# ======================================================================
|
||||||
|
# Step 2: Replace simple unique constraints with partial unique indexes
|
||||||
|
# (only enforce uniqueness among non-deleted rows)
|
||||||
|
# ======================================================================
|
||||||
|
|
||||||
|
# users.email: drop old unique index, create partial
|
||||||
|
op.drop_index("ix_users_email", table_name="users")
|
||||||
|
op.execute(
|
||||||
|
'CREATE UNIQUE INDEX uq_users_email_active ON users (email) '
|
||||||
|
'WHERE deleted_at IS NULL'
|
||||||
|
)
|
||||||
|
# Keep a non-unique index for lookups on all rows (including deleted)
|
||||||
|
op.create_index("ix_users_email", "users", ["email"])
|
||||||
|
|
||||||
|
# users.username: drop old unique index, create partial
|
||||||
|
op.drop_index("ix_users_username", table_name="users")
|
||||||
|
op.execute(
|
||||||
|
'CREATE UNIQUE INDEX uq_users_username_active ON users (username) '
|
||||||
|
'WHERE deleted_at IS NULL'
|
||||||
|
)
|
||||||
|
op.create_index("ix_users_username", "users", ["username"])
|
||||||
|
|
||||||
|
# stores.store_code: drop old unique index, create partial
|
||||||
|
op.drop_index("ix_stores_store_code", table_name="stores")
|
||||||
|
op.execute(
|
||||||
|
'CREATE UNIQUE INDEX uq_stores_store_code_active ON stores (store_code) '
|
||||||
|
'WHERE deleted_at IS NULL'
|
||||||
|
)
|
||||||
|
op.create_index("ix_stores_store_code", "stores", ["store_code"])
|
||||||
|
|
||||||
|
# stores.subdomain: drop old unique index, create partial
|
||||||
|
op.drop_index("ix_stores_subdomain", table_name="stores")
|
||||||
|
op.execute(
|
||||||
|
'CREATE UNIQUE INDEX uq_stores_subdomain_active ON stores (subdomain) '
|
||||||
|
'WHERE deleted_at IS NULL'
|
||||||
|
)
|
||||||
|
op.create_index("ix_stores_subdomain", "stores", ["subdomain"])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Reverse partial unique indexes back to simple unique indexes
|
||||||
|
op.drop_index("ix_stores_subdomain", table_name="stores")
|
||||||
|
op.execute("DROP INDEX IF EXISTS uq_stores_subdomain_active")
|
||||||
|
op.create_index("ix_stores_subdomain", "stores", ["subdomain"], unique=True)
|
||||||
|
|
||||||
|
op.drop_index("ix_stores_store_code", table_name="stores")
|
||||||
|
op.execute("DROP INDEX IF EXISTS uq_stores_store_code_active")
|
||||||
|
op.create_index("ix_stores_store_code", "stores", ["store_code"], unique=True)
|
||||||
|
|
||||||
|
op.drop_index("ix_users_username", table_name="users")
|
||||||
|
op.execute("DROP INDEX IF EXISTS uq_users_username_active")
|
||||||
|
op.create_index("ix_users_username", "users", ["username"], unique=True)
|
||||||
|
|
||||||
|
op.drop_index("ix_users_email", table_name="users")
|
||||||
|
op.execute("DROP INDEX IF EXISTS uq_users_email_active")
|
||||||
|
op.create_index("ix_users_email", "users", ["email"], unique=True)
|
||||||
|
|
||||||
|
# Remove soft-delete columns from all tables
|
||||||
|
for table in reversed(SOFT_DELETE_TABLES):
|
||||||
|
op.drop_index(f"ix_{table}_deleted_at", table_name=table)
|
||||||
|
op.drop_column(table, "deleted_by_id")
|
||||||
|
op.drop_column(table, "deleted_at")
|
||||||
48
alembic/versions/z_add_platform_id_to_store_domains.py
Normal file
48
alembic/versions/z_add_platform_id_to_store_domains.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
"""add platform_id to store_domains
|
||||||
|
|
||||||
|
Revision ID: z_store_domain_platform_id
|
||||||
|
Revises: core_001
|
||||||
|
Create Date: 2026-02-08
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
revision = "z_store_domain_platform_id"
|
||||||
|
down_revision = "core_001"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add platform_id column
|
||||||
|
op.add_column(
|
||||||
|
"store_domains",
|
||||||
|
sa.Column(
|
||||||
|
"platform_id",
|
||||||
|
sa.Integer(),
|
||||||
|
sa.ForeignKey("platforms.id", ondelete="SET NULL"),
|
||||||
|
nullable=True,
|
||||||
|
comment="Platform this domain is associated with (for platform context resolution)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index("idx_store_domain_platform", "store_domains", ["platform_id"])
|
||||||
|
|
||||||
|
# Backfill: set platform_id from the store's primary store_platform
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE store_domains sd
|
||||||
|
SET platform_id = (
|
||||||
|
SELECT sp.platform_id
|
||||||
|
FROM store_platforms sp
|
||||||
|
WHERE sp.store_id = sd.store_id
|
||||||
|
AND sp.is_primary = true
|
||||||
|
LIMIT 1
|
||||||
|
)
|
||||||
|
WHERE sd.platform_id IS NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("idx_store_domain_platform", table_name="store_domains")
|
||||||
|
op.drop_column("store_domains", "platform_id")
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
"""add unique constraints for custom_subdomain and store domain per platform
|
||||||
|
|
||||||
|
Revision ID: z_unique_subdomain_domain
|
||||||
|
Revises: a44f4956cfb1
|
||||||
|
Create Date: 2026-02-26
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
revision = "z_unique_subdomain_domain"
|
||||||
|
down_revision = ("a44f4956cfb1", "tenancy_003")
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# StorePlatform: same custom_subdomain cannot be claimed twice on the same platform
|
||||||
|
op.create_unique_constraint(
|
||||||
|
"uq_custom_subdomain_platform",
|
||||||
|
"store_platforms",
|
||||||
|
["custom_subdomain", "platform_id"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# StoreDomain: a store can have at most one custom domain per platform
|
||||||
|
op.create_unique_constraint(
|
||||||
|
"uq_store_domain_platform",
|
||||||
|
"store_domains",
|
||||||
|
["store_id", "platform_id"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_constraint("uq_store_domain_platform", "store_domains", type_="unique")
|
||||||
|
op.drop_constraint("uq_custom_subdomain_platform", "store_platforms", type_="unique")
|
||||||
@@ -0,0 +1,56 @@
|
|||||||
|
"""add celery_task_id to job tables
|
||||||
|
|
||||||
|
Revision ID: 09d84a46530f
|
||||||
|
Revises: y3d4e5f6g7h8
|
||||||
|
Create Date: 2026-01-11 16:44:59.070110
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "09d84a46530f"
|
||||||
|
down_revision: str | None = "y3d4e5f6g7h8"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Add celery_task_id column to job tracking tables for Celery integration."""
|
||||||
|
# MarketplaceImportJob
|
||||||
|
op.add_column("marketplace_import_jobs", sa.Column("celery_task_id", sa.String(length=255), nullable=True))
|
||||||
|
op.create_index(op.f("ix_marketplace_import_jobs_celery_task_id"), "marketplace_import_jobs", ["celery_task_id"], unique=False)
|
||||||
|
|
||||||
|
# LetzshopHistoricalImportJob
|
||||||
|
op.add_column("letzshop_historical_import_jobs", sa.Column("celery_task_id", sa.String(length=255), nullable=True))
|
||||||
|
op.create_index(op.f("ix_letzshop_historical_import_jobs_celery_task_id"), "letzshop_historical_import_jobs", ["celery_task_id"], unique=False)
|
||||||
|
|
||||||
|
# ArchitectureScan
|
||||||
|
op.add_column("architecture_scans", sa.Column("celery_task_id", sa.String(length=255), nullable=True))
|
||||||
|
op.create_index(op.f("ix_architecture_scans_celery_task_id"), "architecture_scans", ["celery_task_id"], unique=False)
|
||||||
|
|
||||||
|
# TestRun
|
||||||
|
op.add_column("test_runs", sa.Column("celery_task_id", sa.String(length=255), nullable=True))
|
||||||
|
op.create_index(op.f("ix_test_runs_celery_task_id"), "test_runs", ["celery_task_id"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Remove celery_task_id column from job tracking tables."""
|
||||||
|
# TestRun
|
||||||
|
op.drop_index(op.f("ix_test_runs_celery_task_id"), table_name="test_runs")
|
||||||
|
op.drop_column("test_runs", "celery_task_id")
|
||||||
|
|
||||||
|
# ArchitectureScan
|
||||||
|
op.drop_index(op.f("ix_architecture_scans_celery_task_id"), table_name="architecture_scans")
|
||||||
|
op.drop_column("architecture_scans", "celery_task_id")
|
||||||
|
|
||||||
|
# LetzshopHistoricalImportJob
|
||||||
|
op.drop_index(op.f("ix_letzshop_historical_import_jobs_celery_task_id"), table_name="letzshop_historical_import_jobs")
|
||||||
|
op.drop_column("letzshop_historical_import_jobs", "celery_task_id")
|
||||||
|
|
||||||
|
# MarketplaceImportJob
|
||||||
|
op.drop_index(op.f("ix_marketplace_import_jobs_celery_task_id"), table_name="marketplace_import_jobs")
|
||||||
|
op.drop_column("marketplace_import_jobs", "celery_task_id")
|
||||||
@@ -0,0 +1,68 @@
|
|||||||
|
"""add application_logs table for hybrid logging
|
||||||
|
|
||||||
|
Revision ID: 0bd9ffaaced1
|
||||||
|
Revises: 7a7ce92593d5
|
||||||
|
Create Date: 2025-11-29 12:44:55.427245
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "0bd9ffaaced1"
|
||||||
|
down_revision: str | None = "7a7ce92593d5"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create application_logs table
|
||||||
|
op.create_table(
|
||||||
|
"application_logs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("level", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("logger_name", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("module", sa.String(length=200), nullable=True),
|
||||||
|
sa.Column("function_name", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("line_number", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("message", sa.Text(), nullable=False),
|
||||||
|
sa.Column("exception_type", sa.String(length=200), nullable=True),
|
||||||
|
sa.Column("exception_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("stack_trace", sa.Text(), nullable=True),
|
||||||
|
sa.Column("request_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("context", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for better query performance
|
||||||
|
op.create_index(op.f("ix_application_logs_id"), "application_logs", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_application_logs_timestamp"), "application_logs", ["timestamp"], unique=False)
|
||||||
|
op.create_index(op.f("ix_application_logs_level"), "application_logs", ["level"], unique=False)
|
||||||
|
op.create_index(op.f("ix_application_logs_logger_name"), "application_logs", ["logger_name"], unique=False)
|
||||||
|
op.create_index(op.f("ix_application_logs_request_id"), "application_logs", ["request_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_application_logs_user_id"), "application_logs", ["user_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_application_logs_vendor_id"), "application_logs", ["vendor_id"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop indexes
|
||||||
|
op.drop_index(op.f("ix_application_logs_vendor_id"), table_name="application_logs")
|
||||||
|
op.drop_index(op.f("ix_application_logs_user_id"), table_name="application_logs")
|
||||||
|
op.drop_index(op.f("ix_application_logs_request_id"), table_name="application_logs")
|
||||||
|
op.drop_index(op.f("ix_application_logs_logger_name"), table_name="application_logs")
|
||||||
|
op.drop_index(op.f("ix_application_logs_level"), table_name="application_logs")
|
||||||
|
op.drop_index(op.f("ix_application_logs_timestamp"), table_name="application_logs")
|
||||||
|
op.drop_index(op.f("ix_application_logs_id"), table_name="application_logs")
|
||||||
|
|
||||||
|
# Drop table
|
||||||
|
op.drop_table("application_logs")
|
||||||
@@ -0,0 +1,367 @@
|
|||||||
|
"""add letzshop_vendor_cache table
|
||||||
|
|
||||||
|
Revision ID: 1b398cf45e85
|
||||||
|
Revises: 09d84a46530f
|
||||||
|
Create Date: 2026-01-13 19:38:45.423378
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql, sqlite
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "1b398cf45e85"
|
||||||
|
down_revision: str | None = "09d84a46530f"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table("letzshop_vendor_cache",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("letzshop_id", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("slug", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("name", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("company_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("description_en", sa.Text(), nullable=True),
|
||||||
|
sa.Column("description_fr", sa.Text(), nullable=True),
|
||||||
|
sa.Column("description_de", sa.Text(), nullable=True),
|
||||||
|
sa.Column("email", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("phone", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("fax", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("website", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("street", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("street_number", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("city", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("zipcode", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column("country_iso", sa.String(length=5), nullable=True),
|
||||||
|
sa.Column("latitude", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column("longitude", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column("categories", sqlite.JSON(), nullable=True),
|
||||||
|
sa.Column("background_image_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("social_media_links", sqlite.JSON(), nullable=True),
|
||||||
|
sa.Column("opening_hours_en", sa.Text(), nullable=True),
|
||||||
|
sa.Column("opening_hours_fr", sa.Text(), nullable=True),
|
||||||
|
sa.Column("opening_hours_de", sa.Text(), nullable=True),
|
||||||
|
sa.Column("representative_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("representative_title", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("claimed_by_vendor_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("claimed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("last_synced_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("raw_data", sqlite.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["claimed_by_vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_vendor_cache_active", "letzshop_vendor_cache", ["is_active"], unique=False)
|
||||||
|
op.create_index("idx_vendor_cache_city", "letzshop_vendor_cache", ["city"], unique=False)
|
||||||
|
op.create_index("idx_vendor_cache_claimed", "letzshop_vendor_cache", ["claimed_by_vendor_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_vendor_cache_claimed_by_vendor_id"), "letzshop_vendor_cache", ["claimed_by_vendor_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_vendor_cache_id"), "letzshop_vendor_cache", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_vendor_cache_letzshop_id"), "letzshop_vendor_cache", ["letzshop_id"], unique=True)
|
||||||
|
op.create_index(op.f("ix_letzshop_vendor_cache_slug"), "letzshop_vendor_cache", ["slug"], unique=True)
|
||||||
|
op.drop_constraint("architecture_rules_rule_id_key", "architecture_rules", type_="unique")
|
||||||
|
op.alter_column("capacity_snapshots", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("capacity_snapshots", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.create_index(op.f("ix_features_id"), "features", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_features_minimum_tier_id"), "features", ["minimum_tier_id"], unique=False)
|
||||||
|
op.create_index("idx_inv_tx_order", "inventory_transactions", ["order_id"], unique=False)
|
||||||
|
op.alter_column("invoices", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("invoices", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("letzshop_fulfillment_queue", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("letzshop_fulfillment_queue", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("letzshop_sync_logs", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("letzshop_sync_logs", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("media_files", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("media_files", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=False)
|
||||||
|
op.alter_column("order_item_exceptions", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("order_item_exceptions", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("order_items", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("order_items", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("orders", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("orders", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.drop_index("ix_password_reset_tokens_customer_id", table_name="password_reset_tokens")
|
||||||
|
op.create_index(op.f("ix_password_reset_tokens_id"), "password_reset_tokens", ["id"], unique=False)
|
||||||
|
op.alter_column("product_media", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("product_media", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=False)
|
||||||
|
op.alter_column("products", "is_digital",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
nullable=True,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.alter_column("products", "product_type",
|
||||||
|
existing_type=sa.VARCHAR(length=20),
|
||||||
|
nullable=True,
|
||||||
|
existing_server_default=sa.text("'physical'::character varying"))
|
||||||
|
op.drop_index("idx_product_is_digital", table_name="products")
|
||||||
|
op.create_index(op.f("ix_products_is_digital"), "products", ["is_digital"], unique=False)
|
||||||
|
op.drop_constraint("uq_vendor_email_settings_vendor_id", "vendor_email_settings", type_="unique")
|
||||||
|
op.drop_index("ix_vendor_email_templates_lookup", table_name="vendor_email_templates")
|
||||||
|
op.create_index(op.f("ix_vendor_email_templates_id"), "vendor_email_templates", ["id"], unique=False)
|
||||||
|
op.alter_column("vendor_invoice_settings", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("vendor_invoice_settings", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.drop_constraint("vendor_invoice_settings_vendor_id_key", "vendor_invoice_settings", type_="unique")
|
||||||
|
op.alter_column("vendor_letzshop_credentials", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("vendor_letzshop_credentials", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.drop_constraint("vendor_letzshop_credentials_vendor_id_key", "vendor_letzshop_credentials", type_="unique")
|
||||||
|
op.alter_column("vendor_subscriptions", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("vendor_subscriptions", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.drop_constraint("vendor_subscriptions_vendor_id_key", "vendor_subscriptions", type_="unique")
|
||||||
|
op.drop_constraint("fk_vendor_subscriptions_tier_id", "vendor_subscriptions", type_="foreignkey")
|
||||||
|
op.create_foreign_key(None, "vendor_subscriptions", "subscription_tiers", ["tier_id"], ["id"])
|
||||||
|
op.alter_column("vendors", "storefront_locale",
|
||||||
|
existing_type=sa.VARCHAR(length=10),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Currency/number formatting locale (NULL = inherit from platform)",
|
||||||
|
existing_nullable=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column("vendors", "storefront_locale",
|
||||||
|
existing_type=sa.VARCHAR(length=10),
|
||||||
|
comment="Currency/number formatting locale (NULL = inherit from platform)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.drop_constraint(None, "vendor_subscriptions", type_="foreignkey")
|
||||||
|
op.create_foreign_key("fk_vendor_subscriptions_tier_id", "vendor_subscriptions", "subscription_tiers", ["tier_id"], ["id"], ondelete="SET NULL")
|
||||||
|
op.create_unique_constraint("vendor_subscriptions_vendor_id_key", "vendor_subscriptions", ["vendor_id"])
|
||||||
|
op.alter_column("vendor_subscriptions", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("vendor_subscriptions", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.create_unique_constraint("vendor_letzshop_credentials_vendor_id_key", "vendor_letzshop_credentials", ["vendor_id"])
|
||||||
|
op.alter_column("vendor_letzshop_credentials", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("vendor_letzshop_credentials", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.create_unique_constraint("vendor_invoice_settings_vendor_id_key", "vendor_invoice_settings", ["vendor_id"])
|
||||||
|
op.alter_column("vendor_invoice_settings", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("vendor_invoice_settings", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.drop_index(op.f("ix_vendor_email_templates_id"), table_name="vendor_email_templates")
|
||||||
|
op.create_index("ix_vendor_email_templates_lookup", "vendor_email_templates", ["vendor_id", "template_code", "language"], unique=False)
|
||||||
|
op.create_unique_constraint("uq_vendor_email_settings_vendor_id", "vendor_email_settings", ["vendor_id"])
|
||||||
|
op.drop_index(op.f("ix_products_is_digital"), table_name="products")
|
||||||
|
op.create_index("idx_product_is_digital", "products", ["is_digital"], unique=False)
|
||||||
|
op.alter_column("products", "product_type",
|
||||||
|
existing_type=sa.VARCHAR(length=20),
|
||||||
|
nullable=False,
|
||||||
|
existing_server_default=sa.text("'physical'::character varying"))
|
||||||
|
op.alter_column("products", "is_digital",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
nullable=False,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.alter_column("product_media", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=True)
|
||||||
|
op.alter_column("product_media", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=True,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.drop_index(op.f("ix_password_reset_tokens_id"), table_name="password_reset_tokens")
|
||||||
|
op.create_index("ix_password_reset_tokens_customer_id", "password_reset_tokens", ["customer_id"], unique=False)
|
||||||
|
op.alter_column("orders", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("orders", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("order_items", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("order_items", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("order_item_exceptions", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("order_item_exceptions", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("media_files", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=True)
|
||||||
|
op.alter_column("media_files", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
nullable=True,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("letzshop_sync_logs", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("letzshop_sync_logs", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("letzshop_fulfillment_queue", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("letzshop_fulfillment_queue", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("invoices", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.alter_column("invoices", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("CURRENT_TIMESTAMP"))
|
||||||
|
op.drop_index("idx_inv_tx_order", table_name="inventory_transactions")
|
||||||
|
op.drop_index(op.f("ix_features_minimum_tier_id"), table_name="features")
|
||||||
|
op.drop_index(op.f("ix_features_id"), table_name="features")
|
||||||
|
op.alter_column("capacity_snapshots", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("capacity_snapshots", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.create_unique_constraint("architecture_rules_rule_id_key", "architecture_rules", ["rule_id"])
|
||||||
|
op.drop_index(op.f("ix_letzshop_vendor_cache_slug"), table_name="letzshop_vendor_cache")
|
||||||
|
op.drop_index(op.f("ix_letzshop_vendor_cache_letzshop_id"), table_name="letzshop_vendor_cache")
|
||||||
|
op.drop_index(op.f("ix_letzshop_vendor_cache_id"), table_name="letzshop_vendor_cache")
|
||||||
|
op.drop_index(op.f("ix_letzshop_vendor_cache_claimed_by_vendor_id"), table_name="letzshop_vendor_cache")
|
||||||
|
op.drop_index("idx_vendor_cache_claimed", table_name="letzshop_vendor_cache")
|
||||||
|
op.drop_index("idx_vendor_cache_city", table_name="letzshop_vendor_cache")
|
||||||
|
op.drop_index("idx_vendor_cache_active", table_name="letzshop_vendor_cache")
|
||||||
|
op.drop_table("letzshop_vendor_cache")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
"""add_letzshop_historical_import_jobs_table
|
||||||
|
|
||||||
|
Revision ID: 204273a59d73
|
||||||
|
Revises: cb88bc9b5f86
|
||||||
|
Create Date: 2025-12-19 05:40:53.463341
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL)
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "204273a59d73"
|
||||||
|
down_revision: str | None = "cb88bc9b5f86"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table("letzshop_historical_import_jobs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("current_phase", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column("current_page", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("total_pages", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("shipments_fetched", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("orders_processed", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("orders_imported", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("orders_updated", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("orders_skipped", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("products_matched", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("products_not_found", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("confirmed_stats", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("declined_stats", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_historical_import_vendor", "letzshop_historical_import_jobs", ["vendor_id", "status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_historical_import_jobs_id"), "letzshop_historical_import_jobs", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_historical_import_jobs_vendor_id"), "letzshop_historical_import_jobs", ["vendor_id"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index(op.f("ix_letzshop_historical_import_jobs_vendor_id"), table_name="letzshop_historical_import_jobs")
|
||||||
|
op.drop_index(op.f("ix_letzshop_historical_import_jobs_id"), table_name="letzshop_historical_import_jobs")
|
||||||
|
op.drop_index("idx_historical_import_vendor", table_name="letzshop_historical_import_jobs")
|
||||||
|
op.drop_table("letzshop_historical_import_jobs")
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
"""add_order_date_to_letzshop_orders
|
||||||
|
|
||||||
|
Revision ID: 2362c2723a93
|
||||||
|
Revises: 204273a59d73
|
||||||
|
Create Date: 2025-12-19 08:46:23.731912
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "2362c2723a93"
|
||||||
|
down_revision: str | None = "204273a59d73"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add order_date column to letzshop_orders table
|
||||||
|
op.add_column("letzshop_orders", sa.Column("order_date", sa.DateTime(timezone=True), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("letzshop_orders", "order_date")
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
"""add contact fields to vendor
|
||||||
|
|
||||||
|
Revision ID: 28d44d503cac
|
||||||
|
Revises: 9f3a25ea4991
|
||||||
|
Create Date: 2025-12-03 22:26:02.161087
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "28d44d503cac"
|
||||||
|
down_revision: str | None = "9f3a25ea4991"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add nullable contact fields to vendor table
|
||||||
|
# These allow vendor-specific branding/identity, overriding company defaults
|
||||||
|
op.add_column("vendors", sa.Column("contact_email", sa.String(255), nullable=True))
|
||||||
|
op.add_column("vendors", sa.Column("contact_phone", sa.String(50), nullable=True))
|
||||||
|
op.add_column("vendors", sa.Column("website", sa.String(255), nullable=True))
|
||||||
|
op.add_column("vendors", sa.Column("business_address", sa.Text(), nullable=True))
|
||||||
|
op.add_column("vendors", sa.Column("tax_number", sa.String(100), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove contact fields from vendor table
|
||||||
|
op.drop_column("vendors", "tax_number")
|
||||||
|
op.drop_column("vendors", "business_address")
|
||||||
|
op.drop_column("vendors", "website")
|
||||||
|
op.drop_column("vendors", "contact_phone")
|
||||||
|
op.drop_column("vendors", "contact_email")
|
||||||
@@ -0,0 +1,421 @@
|
|||||||
|
"""add_subscription_billing_tables
|
||||||
|
|
||||||
|
Revision ID: 2953ed10d22c
|
||||||
|
Revises: e1bfb453fbe9
|
||||||
|
Create Date: 2025-12-25 18:29:34.167773
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL)
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "2953ed10d22c"
|
||||||
|
down_revision: str | None = "e1bfb453fbe9"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# =========================================================================
|
||||||
|
# Create new subscription and billing tables
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# subscription_tiers - Database-driven tier definitions
|
||||||
|
op.create_table("subscription_tiers",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("code", sa.String(length=30), nullable=False),
|
||||||
|
sa.Column("name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("price_monthly_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("price_annual_cents", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("orders_per_month", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("products_limit", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("team_members", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("order_history_months", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("features", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("stripe_product_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("stripe_price_monthly_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("stripe_price_annual_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("display_order", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("is_public", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_subscription_tiers_code"), "subscription_tiers", ["code"], unique=True)
|
||||||
|
op.create_index(op.f("ix_subscription_tiers_id"), "subscription_tiers", ["id"], unique=False)
|
||||||
|
|
||||||
|
# addon_products - Purchasable add-ons (domains, SSL, email)
|
||||||
|
op.create_table("addon_products",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("code", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("category", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("price_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("billing_period", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("quantity_unit", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("quantity_value", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("stripe_product_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("stripe_price_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("display_order", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_addon_products_category"), "addon_products", ["category"], unique=False)
|
||||||
|
op.create_index(op.f("ix_addon_products_code"), "addon_products", ["code"], unique=True)
|
||||||
|
op.create_index(op.f("ix_addon_products_id"), "addon_products", ["id"], unique=False)
|
||||||
|
|
||||||
|
# billing_history - Invoice and payment history
|
||||||
|
op.create_table("billing_history",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("stripe_invoice_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("stripe_payment_intent_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("invoice_number", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("invoice_date", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("due_date", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("subtotal_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("tax_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("total_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("amount_paid_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("currency", sa.String(length=3), nullable=False),
|
||||||
|
sa.Column("status", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("invoice_pdf_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("hosted_invoice_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("line_items", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_billing_status", "billing_history", ["vendor_id", "status"], unique=False)
|
||||||
|
op.create_index("idx_billing_vendor_date", "billing_history", ["vendor_id", "invoice_date"], unique=False)
|
||||||
|
op.create_index(op.f("ix_billing_history_id"), "billing_history", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_billing_history_status"), "billing_history", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_billing_history_stripe_invoice_id"), "billing_history", ["stripe_invoice_id"], unique=True)
|
||||||
|
op.create_index(op.f("ix_billing_history_vendor_id"), "billing_history", ["vendor_id"], unique=False)
|
||||||
|
|
||||||
|
# vendor_addons - Add-ons purchased by vendor
|
||||||
|
op.create_table("vendor_addons",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("addon_product_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("domain_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("quantity", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("stripe_subscription_item_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("period_start", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("period_end", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["addon_product_id"], ["addon_products.id"], ),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_vendor_addon_product", "vendor_addons", ["vendor_id", "addon_product_id"], unique=False)
|
||||||
|
op.create_index("idx_vendor_addon_status", "vendor_addons", ["vendor_id", "status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendor_addons_addon_product_id"), "vendor_addons", ["addon_product_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendor_addons_domain_name"), "vendor_addons", ["domain_name"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendor_addons_id"), "vendor_addons", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendor_addons_status"), "vendor_addons", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendor_addons_vendor_id"), "vendor_addons", ["vendor_id"], unique=False)
|
||||||
|
|
||||||
|
# stripe_webhook_events - Webhook idempotency tracking
|
||||||
|
op.create_table("stripe_webhook_events",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("event_id", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("event_type", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("status", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("payload_encrypted", sa.Text(), nullable=True),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("subscription_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["subscription_id"], ["vendor_subscriptions.id"], ),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_webhook_event_type_status", "stripe_webhook_events", ["event_type", "status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_stripe_webhook_events_event_id"), "stripe_webhook_events", ["event_id"], unique=True)
|
||||||
|
op.create_index(op.f("ix_stripe_webhook_events_event_type"), "stripe_webhook_events", ["event_type"], unique=False)
|
||||||
|
op.create_index(op.f("ix_stripe_webhook_events_id"), "stripe_webhook_events", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_stripe_webhook_events_status"), "stripe_webhook_events", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_stripe_webhook_events_subscription_id"), "stripe_webhook_events", ["subscription_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_stripe_webhook_events_vendor_id"), "stripe_webhook_events", ["vendor_id"], unique=False)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Add new columns to vendor_subscriptions
|
||||||
|
# =========================================================================
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("stripe_price_id", sa.String(length=100), nullable=True))
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("stripe_payment_method_id", sa.String(length=100), nullable=True))
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("proration_behavior", sa.String(length=50), nullable=True))
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("scheduled_tier_change", sa.String(length=30), nullable=True))
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("scheduled_change_at", sa.DateTime(timezone=True), nullable=True))
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("payment_retry_count", sa.Integer(), server_default="0", nullable=False))
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("last_payment_error", sa.Text(), nullable=True))
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Seed subscription tiers
|
||||||
|
# =========================================================================
|
||||||
|
now = datetime.utcnow()
|
||||||
|
|
||||||
|
subscription_tiers = sa.table(
|
||||||
|
"subscription_tiers",
|
||||||
|
sa.column("code", sa.String),
|
||||||
|
sa.column("name", sa.String),
|
||||||
|
sa.column("description", sa.Text),
|
||||||
|
sa.column("price_monthly_cents", sa.Integer),
|
||||||
|
sa.column("price_annual_cents", sa.Integer),
|
||||||
|
sa.column("orders_per_month", sa.Integer),
|
||||||
|
sa.column("products_limit", sa.Integer),
|
||||||
|
sa.column("team_members", sa.Integer),
|
||||||
|
sa.column("order_history_months", sa.Integer),
|
||||||
|
sa.column("features", sa.JSON),
|
||||||
|
sa.column("display_order", sa.Integer),
|
||||||
|
sa.column("is_active", sa.Boolean),
|
||||||
|
sa.column("is_public", sa.Boolean),
|
||||||
|
sa.column("created_at", sa.DateTime),
|
||||||
|
sa.column("updated_at", sa.DateTime),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.bulk_insert(subscription_tiers, [
|
||||||
|
{
|
||||||
|
"code": "essential",
|
||||||
|
"name": "Essential",
|
||||||
|
"description": "Perfect for solo vendors getting started with Letzshop",
|
||||||
|
"price_monthly_cents": 4900,
|
||||||
|
"price_annual_cents": 49000,
|
||||||
|
"orders_per_month": 100,
|
||||||
|
"products_limit": 200,
|
||||||
|
"team_members": 1,
|
||||||
|
"order_history_months": 6,
|
||||||
|
"features": ["letzshop_sync", "inventory_basic", "invoice_lu", "customer_view"],
|
||||||
|
"display_order": 1,
|
||||||
|
"is_active": True,
|
||||||
|
"is_public": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "professional",
|
||||||
|
"name": "Professional",
|
||||||
|
"description": "For active multi-channel vendors shipping EU-wide",
|
||||||
|
"price_monthly_cents": 9900,
|
||||||
|
"price_annual_cents": 99000,
|
||||||
|
"orders_per_month": 500,
|
||||||
|
"products_limit": None,
|
||||||
|
"team_members": 3,
|
||||||
|
"order_history_months": 24,
|
||||||
|
"features": [
|
||||||
|
"letzshop_sync", "inventory_locations", "inventory_purchase_orders",
|
||||||
|
"invoice_lu", "invoice_eu_vat", "customer_view", "customer_export"
|
||||||
|
],
|
||||||
|
"display_order": 2,
|
||||||
|
"is_active": True,
|
||||||
|
"is_public": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "business",
|
||||||
|
"name": "Business",
|
||||||
|
"description": "For high-volume vendors with teams and data-driven operations",
|
||||||
|
"price_monthly_cents": 19900,
|
||||||
|
"price_annual_cents": 199000,
|
||||||
|
"orders_per_month": 2000,
|
||||||
|
"products_limit": None,
|
||||||
|
"team_members": 10,
|
||||||
|
"order_history_months": None,
|
||||||
|
"features": [
|
||||||
|
"letzshop_sync", "inventory_locations", "inventory_purchase_orders",
|
||||||
|
"invoice_lu", "invoice_eu_vat", "invoice_bulk", "customer_view",
|
||||||
|
"customer_export", "analytics_dashboard", "accounting_export",
|
||||||
|
"api_access", "automation_rules", "team_roles"
|
||||||
|
],
|
||||||
|
"display_order": 3,
|
||||||
|
"is_active": True,
|
||||||
|
"is_public": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "enterprise",
|
||||||
|
"name": "Enterprise",
|
||||||
|
"description": "Custom solutions for large operations and agencies",
|
||||||
|
"price_monthly_cents": 39900,
|
||||||
|
"price_annual_cents": None,
|
||||||
|
"orders_per_month": None,
|
||||||
|
"products_limit": None,
|
||||||
|
"team_members": None,
|
||||||
|
"order_history_months": None,
|
||||||
|
"features": [
|
||||||
|
"letzshop_sync", "inventory_locations", "inventory_purchase_orders",
|
||||||
|
"invoice_lu", "invoice_eu_vat", "invoice_bulk", "customer_view",
|
||||||
|
"customer_export", "analytics_dashboard", "accounting_export",
|
||||||
|
"api_access", "automation_rules", "team_roles", "white_label",
|
||||||
|
"multi_vendor", "custom_integrations", "sla_guarantee", "dedicated_support"
|
||||||
|
],
|
||||||
|
"display_order": 4,
|
||||||
|
"is_active": True,
|
||||||
|
"is_public": False,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Seed add-on products
|
||||||
|
# =========================================================================
|
||||||
|
addon_products = sa.table(
|
||||||
|
"addon_products",
|
||||||
|
sa.column("code", sa.String),
|
||||||
|
sa.column("name", sa.String),
|
||||||
|
sa.column("description", sa.Text),
|
||||||
|
sa.column("category", sa.String),
|
||||||
|
sa.column("price_cents", sa.Integer),
|
||||||
|
sa.column("billing_period", sa.String),
|
||||||
|
sa.column("quantity_unit", sa.String),
|
||||||
|
sa.column("quantity_value", sa.Integer),
|
||||||
|
sa.column("display_order", sa.Integer),
|
||||||
|
sa.column("is_active", sa.Boolean),
|
||||||
|
sa.column("created_at", sa.DateTime),
|
||||||
|
sa.column("updated_at", sa.DateTime),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.bulk_insert(addon_products, [
|
||||||
|
{
|
||||||
|
"code": "domain",
|
||||||
|
"name": "Custom Domain",
|
||||||
|
"description": "Connect your own domain with SSL certificate included",
|
||||||
|
"category": "domain",
|
||||||
|
"price_cents": 1500,
|
||||||
|
"billing_period": "annual",
|
||||||
|
"quantity_unit": None,
|
||||||
|
"quantity_value": None,
|
||||||
|
"display_order": 1,
|
||||||
|
"is_active": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "email_5",
|
||||||
|
"name": "5 Email Addresses",
|
||||||
|
"description": "Professional email addresses on your domain",
|
||||||
|
"category": "email",
|
||||||
|
"price_cents": 500,
|
||||||
|
"billing_period": "monthly",
|
||||||
|
"quantity_unit": "emails",
|
||||||
|
"quantity_value": 5,
|
||||||
|
"display_order": 2,
|
||||||
|
"is_active": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "email_10",
|
||||||
|
"name": "10 Email Addresses",
|
||||||
|
"description": "Professional email addresses on your domain",
|
||||||
|
"category": "email",
|
||||||
|
"price_cents": 900,
|
||||||
|
"billing_period": "monthly",
|
||||||
|
"quantity_unit": "emails",
|
||||||
|
"quantity_value": 10,
|
||||||
|
"display_order": 3,
|
||||||
|
"is_active": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "email_25",
|
||||||
|
"name": "25 Email Addresses",
|
||||||
|
"description": "Professional email addresses on your domain",
|
||||||
|
"category": "email",
|
||||||
|
"price_cents": 1900,
|
||||||
|
"billing_period": "monthly",
|
||||||
|
"quantity_unit": "emails",
|
||||||
|
"quantity_value": 25,
|
||||||
|
"display_order": 4,
|
||||||
|
"is_active": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "storage_10gb",
|
||||||
|
"name": "Additional Storage (10GB)",
|
||||||
|
"description": "Extra storage for product images and files",
|
||||||
|
"category": "storage",
|
||||||
|
"price_cents": 500,
|
||||||
|
"billing_period": "monthly",
|
||||||
|
"quantity_unit": "GB",
|
||||||
|
"quantity_value": 10,
|
||||||
|
"display_order": 5,
|
||||||
|
"is_active": True,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove new columns from vendor_subscriptions
|
||||||
|
op.drop_column("vendor_subscriptions", "last_payment_error")
|
||||||
|
op.drop_column("vendor_subscriptions", "payment_retry_count")
|
||||||
|
op.drop_column("vendor_subscriptions", "scheduled_change_at")
|
||||||
|
op.drop_column("vendor_subscriptions", "scheduled_tier_change")
|
||||||
|
op.drop_column("vendor_subscriptions", "proration_behavior")
|
||||||
|
op.drop_column("vendor_subscriptions", "stripe_payment_method_id")
|
||||||
|
op.drop_column("vendor_subscriptions", "stripe_price_id")
|
||||||
|
|
||||||
|
# Drop stripe_webhook_events
|
||||||
|
op.drop_index(op.f("ix_stripe_webhook_events_vendor_id"), table_name="stripe_webhook_events")
|
||||||
|
op.drop_index(op.f("ix_stripe_webhook_events_subscription_id"), table_name="stripe_webhook_events")
|
||||||
|
op.drop_index(op.f("ix_stripe_webhook_events_status"), table_name="stripe_webhook_events")
|
||||||
|
op.drop_index(op.f("ix_stripe_webhook_events_id"), table_name="stripe_webhook_events")
|
||||||
|
op.drop_index(op.f("ix_stripe_webhook_events_event_type"), table_name="stripe_webhook_events")
|
||||||
|
op.drop_index(op.f("ix_stripe_webhook_events_event_id"), table_name="stripe_webhook_events")
|
||||||
|
op.drop_index("idx_webhook_event_type_status", table_name="stripe_webhook_events")
|
||||||
|
op.drop_table("stripe_webhook_events")
|
||||||
|
|
||||||
|
# Drop vendor_addons
|
||||||
|
op.drop_index(op.f("ix_vendor_addons_vendor_id"), table_name="vendor_addons")
|
||||||
|
op.drop_index(op.f("ix_vendor_addons_status"), table_name="vendor_addons")
|
||||||
|
op.drop_index(op.f("ix_vendor_addons_id"), table_name="vendor_addons")
|
||||||
|
op.drop_index(op.f("ix_vendor_addons_domain_name"), table_name="vendor_addons")
|
||||||
|
op.drop_index(op.f("ix_vendor_addons_addon_product_id"), table_name="vendor_addons")
|
||||||
|
op.drop_index("idx_vendor_addon_status", table_name="vendor_addons")
|
||||||
|
op.drop_index("idx_vendor_addon_product", table_name="vendor_addons")
|
||||||
|
op.drop_table("vendor_addons")
|
||||||
|
|
||||||
|
# Drop billing_history
|
||||||
|
op.drop_index(op.f("ix_billing_history_vendor_id"), table_name="billing_history")
|
||||||
|
op.drop_index(op.f("ix_billing_history_stripe_invoice_id"), table_name="billing_history")
|
||||||
|
op.drop_index(op.f("ix_billing_history_status"), table_name="billing_history")
|
||||||
|
op.drop_index(op.f("ix_billing_history_id"), table_name="billing_history")
|
||||||
|
op.drop_index("idx_billing_vendor_date", table_name="billing_history")
|
||||||
|
op.drop_index("idx_billing_status", table_name="billing_history")
|
||||||
|
op.drop_table("billing_history")
|
||||||
|
|
||||||
|
# Drop addon_products
|
||||||
|
op.drop_index(op.f("ix_addon_products_id"), table_name="addon_products")
|
||||||
|
op.drop_index(op.f("ix_addon_products_code"), table_name="addon_products")
|
||||||
|
op.drop_index(op.f("ix_addon_products_category"), table_name="addon_products")
|
||||||
|
op.drop_table("addon_products")
|
||||||
|
|
||||||
|
# Drop subscription_tiers
|
||||||
|
op.drop_index(op.f("ix_subscription_tiers_id"), table_name="subscription_tiers")
|
||||||
|
op.drop_index(op.f("ix_subscription_tiers_code"), table_name="subscription_tiers")
|
||||||
|
op.drop_table("subscription_tiers")
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
"""add_letzshop_vendor_fields_and_trial_tracking
|
||||||
|
|
||||||
|
Revision ID: 404b3e2d2865
|
||||||
|
Revises: l0a1b2c3d4e5
|
||||||
|
Create Date: 2025-12-27 09:49:44.715243
|
||||||
|
|
||||||
|
Adds:
|
||||||
|
- vendors.letzshop_vendor_id - Link to Letzshop marketplace profile
|
||||||
|
- vendors.letzshop_vendor_slug - Letzshop shop URL slug
|
||||||
|
- vendor_subscriptions.card_collected_at - Track when card was collected for trial
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "404b3e2d2865"
|
||||||
|
down_revision: str | None = "l0a1b2c3d4e5"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add Letzshop vendor identity fields to vendors table
|
||||||
|
op.add_column("vendors", sa.Column("letzshop_vendor_id", sa.String(length=100), nullable=True))
|
||||||
|
op.add_column("vendors", sa.Column("letzshop_vendor_slug", sa.String(length=200), nullable=True))
|
||||||
|
op.create_index(op.f("ix_vendors_letzshop_vendor_id"), "vendors", ["letzshop_vendor_id"], unique=True)
|
||||||
|
op.create_index(op.f("ix_vendors_letzshop_vendor_slug"), "vendors", ["letzshop_vendor_slug"], unique=False)
|
||||||
|
|
||||||
|
# Add card collection tracking to vendor_subscriptions
|
||||||
|
op.add_column("vendor_subscriptions", sa.Column("card_collected_at", sa.DateTime(timezone=True), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove card collection tracking from vendor_subscriptions
|
||||||
|
op.drop_column("vendor_subscriptions", "card_collected_at")
|
||||||
|
|
||||||
|
# Remove Letzshop vendor identity fields from vendors
|
||||||
|
op.drop_index(op.f("ix_vendors_letzshop_vendor_slug"), table_name="vendors")
|
||||||
|
op.drop_index(op.f("ix_vendors_letzshop_vendor_id"), table_name="vendors")
|
||||||
|
op.drop_column("vendors", "letzshop_vendor_slug")
|
||||||
|
op.drop_column("vendors", "letzshop_vendor_id")
|
||||||
@@ -0,0 +1,908 @@
|
|||||||
|
"""Initial migration - all tables
|
||||||
|
|
||||||
|
Revision ID: 4951b2e50581
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-10-27 22:28:33.137564
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "4951b2e50581"
|
||||||
|
down_revision: str | None = None
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("marketplace_product_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("title", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("link", sa.String(), nullable=True),
|
||||||
|
sa.Column("image_link", sa.String(), nullable=True),
|
||||||
|
sa.Column("availability", sa.String(), nullable=True),
|
||||||
|
sa.Column("price", sa.String(), nullable=True),
|
||||||
|
sa.Column("brand", sa.String(), nullable=True),
|
||||||
|
sa.Column("gtin", sa.String(), nullable=True),
|
||||||
|
sa.Column("mpn", sa.String(), nullable=True),
|
||||||
|
sa.Column("condition", sa.String(), nullable=True),
|
||||||
|
sa.Column("adult", sa.String(), nullable=True),
|
||||||
|
sa.Column("multipack", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("is_bundle", sa.String(), nullable=True),
|
||||||
|
sa.Column("age_group", sa.String(), nullable=True),
|
||||||
|
sa.Column("color", sa.String(), nullable=True),
|
||||||
|
sa.Column("gender", sa.String(), nullable=True),
|
||||||
|
sa.Column("material", sa.String(), nullable=True),
|
||||||
|
sa.Column("pattern", sa.String(), nullable=True),
|
||||||
|
sa.Column("size", sa.String(), nullable=True),
|
||||||
|
sa.Column("size_type", sa.String(), nullable=True),
|
||||||
|
sa.Column("size_system", sa.String(), nullable=True),
|
||||||
|
sa.Column("item_group_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("google_product_category", sa.String(), nullable=True),
|
||||||
|
sa.Column("product_type", sa.String(), nullable=True),
|
||||||
|
sa.Column("custom_label_0", sa.String(), nullable=True),
|
||||||
|
sa.Column("custom_label_1", sa.String(), nullable=True),
|
||||||
|
sa.Column("custom_label_2", sa.String(), nullable=True),
|
||||||
|
sa.Column("custom_label_3", sa.String(), nullable=True),
|
||||||
|
sa.Column("custom_label_4", sa.String(), nullable=True),
|
||||||
|
sa.Column("additional_image_link", sa.String(), nullable=True),
|
||||||
|
sa.Column("sale_price", sa.String(), nullable=True),
|
||||||
|
sa.Column("unit_pricing_measure", sa.String(), nullable=True),
|
||||||
|
sa.Column("unit_pricing_base_measure", sa.String(), nullable=True),
|
||||||
|
sa.Column("identifier_exists", sa.String(), nullable=True),
|
||||||
|
sa.Column("shipping", sa.String(), nullable=True),
|
||||||
|
sa.Column("currency", sa.String(), nullable=True),
|
||||||
|
sa.Column("marketplace", sa.String(), nullable=True),
|
||||||
|
sa.Column("vendor_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_marketplace_brand",
|
||||||
|
"marketplace_products",
|
||||||
|
["marketplace", "brand"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_marketplace_vendor",
|
||||||
|
"marketplace_products",
|
||||||
|
["marketplace", "vendor_name"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_availability"),
|
||||||
|
"marketplace_products",
|
||||||
|
["availability"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_brand"),
|
||||||
|
"marketplace_products",
|
||||||
|
["brand"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_google_product_category"),
|
||||||
|
"marketplace_products",
|
||||||
|
["google_product_category"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_gtin"),
|
||||||
|
"marketplace_products",
|
||||||
|
["gtin"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_id"), "marketplace_products", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_marketplace"),
|
||||||
|
"marketplace_products",
|
||||||
|
["marketplace"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_marketplace_product_id"),
|
||||||
|
"marketplace_products",
|
||||||
|
["marketplace_product_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_products_vendor_name"),
|
||||||
|
"marketplace_products",
|
||||||
|
["vendor_name"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"users",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("email", sa.String(), nullable=False),
|
||||||
|
sa.Column("username", sa.String(), nullable=False),
|
||||||
|
sa.Column("first_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("last_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("hashed_password", sa.String(), nullable=False),
|
||||||
|
sa.Column("role", sa.String(), nullable=False),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("last_login", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
|
||||||
|
op.create_index(op.f("ix_users_id"), "users", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_users_username"), "users", ["username"], unique=True)
|
||||||
|
op.create_table(
|
||||||
|
"admin_audit_logs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("admin_user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("action", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("target_type", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("target_id", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("details", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("ip_address", sa.String(length=45), nullable=True),
|
||||||
|
sa.Column("user_agent", sa.Text(), nullable=True),
|
||||||
|
sa.Column("request_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["admin_user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_audit_logs_action"), "admin_audit_logs", ["action"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_audit_logs_admin_user_id"),
|
||||||
|
"admin_audit_logs",
|
||||||
|
["admin_user_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_audit_logs_id"), "admin_audit_logs", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_audit_logs_target_id"),
|
||||||
|
"admin_audit_logs",
|
||||||
|
["target_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_audit_logs_target_type"),
|
||||||
|
"admin_audit_logs",
|
||||||
|
["target_type"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"admin_notifications",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("type", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("priority", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column("title", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("message", sa.Text(), nullable=False),
|
||||||
|
sa.Column("is_read", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("read_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("read_by_user_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("action_required", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("action_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("notification_metadata", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["read_by_user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_notifications_action_required"),
|
||||||
|
"admin_notifications",
|
||||||
|
["action_required"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_notifications_id"), "admin_notifications", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_notifications_is_read"),
|
||||||
|
"admin_notifications",
|
||||||
|
["is_read"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_notifications_priority"),
|
||||||
|
"admin_notifications",
|
||||||
|
["priority"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_notifications_type"),
|
||||||
|
"admin_notifications",
|
||||||
|
["type"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"admin_sessions",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("admin_user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("session_token", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("ip_address", sa.String(length=45), nullable=False),
|
||||||
|
sa.Column("user_agent", sa.Text(), nullable=True),
|
||||||
|
sa.Column("login_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("last_activity_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("logout_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("logout_reason", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["admin_user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_sessions_admin_user_id"),
|
||||||
|
"admin_sessions",
|
||||||
|
["admin_user_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_sessions_id"), "admin_sessions", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_sessions_is_active"),
|
||||||
|
"admin_sessions",
|
||||||
|
["is_active"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_sessions_login_at"), "admin_sessions", ["login_at"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_sessions_session_token"),
|
||||||
|
"admin_sessions",
|
||||||
|
["session_token"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"admin_settings",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("key", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("value", sa.Text(), nullable=False),
|
||||||
|
sa.Column("value_type", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column("category", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("is_encrypted", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("is_public", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("last_modified_by_user_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["last_modified_by_user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_settings_category"), "admin_settings", ["category"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_settings_id"), "admin_settings", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_admin_settings_key"), "admin_settings", ["key"], unique=True
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"platform_alerts",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("alert_type", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("severity", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("title", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("affected_vendors", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("affected_systems", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("is_resolved", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("resolved_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("resolved_by_user_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("resolution_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column("auto_generated", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("occurrence_count", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("first_occurred_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("last_occurred_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["resolved_by_user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_platform_alerts_alert_type"),
|
||||||
|
"platform_alerts",
|
||||||
|
["alert_type"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_platform_alerts_id"), "platform_alerts", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_platform_alerts_is_resolved"),
|
||||||
|
"platform_alerts",
|
||||||
|
["is_resolved"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_platform_alerts_severity"),
|
||||||
|
"platform_alerts",
|
||||||
|
["severity"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"vendors",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_code", sa.String(), nullable=False),
|
||||||
|
sa.Column("subdomain", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("owner_user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("contact_email", sa.String(), nullable=True),
|
||||||
|
sa.Column("contact_phone", sa.String(), nullable=True),
|
||||||
|
sa.Column("website", sa.String(), nullable=True),
|
||||||
|
sa.Column("letzshop_csv_url_fr", sa.String(), nullable=True),
|
||||||
|
sa.Column("letzshop_csv_url_en", sa.String(), nullable=True),
|
||||||
|
sa.Column("letzshop_csv_url_de", sa.String(), nullable=True),
|
||||||
|
sa.Column("business_address", sa.Text(), nullable=True),
|
||||||
|
sa.Column("tax_number", sa.String(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("is_verified", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["owner_user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_vendors_id"), "vendors", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendors_subdomain"), "vendors", ["subdomain"], unique=True)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendors_vendor_code"), "vendors", ["vendor_code"], unique=True
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"customers",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("email", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("hashed_password", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("first_name", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("last_name", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("phone", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("customer_number", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("preferences", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("marketing_consent", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("last_order_date", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("total_orders", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("total_spent", sa.Numeric(precision=10, scale=2), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_customers_customer_number"),
|
||||||
|
"customers",
|
||||||
|
["customer_number"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_customers_email"), "customers", ["email"], unique=False)
|
||||||
|
op.create_index(op.f("ix_customers_id"), "customers", ["id"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("marketplace", sa.String(), nullable=False),
|
||||||
|
sa.Column("source_url", sa.String(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("imported_count", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("updated_count", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("error_count", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("total_processed", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_import_user_marketplace",
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
["user_id", "marketplace"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_import_vendor_created",
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
["vendor_id", "created_at"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_import_vendor_status",
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
["vendor_id", "status"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_import_jobs_id"),
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_import_jobs_marketplace"),
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
["marketplace"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_marketplace_import_jobs_vendor_id"),
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
["vendor_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"products",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("marketplace_product_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("price", sa.Float(), nullable=True),
|
||||||
|
sa.Column("sale_price", sa.Float(), nullable=True),
|
||||||
|
sa.Column("currency", sa.String(), nullable=True),
|
||||||
|
sa.Column("availability", sa.String(), nullable=True),
|
||||||
|
sa.Column("condition", sa.String(), nullable=True),
|
||||||
|
sa.Column("is_featured", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("display_order", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("min_quantity", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("max_quantity", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["marketplace_product_id"],
|
||||||
|
["marketplace_products.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("vendor_id", "marketplace_product_id", name="uq_product"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_product_active", "products", ["vendor_id", "is_active"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_product_featured", "products", ["vendor_id", "is_featured"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_products_id"), "products", ["id"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"roles",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("permissions", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_roles_id"), "roles", ["id"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"vendor_domains",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("domain", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("is_primary", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("ssl_status", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("ssl_verified_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("verification_token", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("is_verified", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("verified_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("vendor_id", "domain", name="uq_vendor_domain"),
|
||||||
|
sa.UniqueConstraint("verification_token"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_domain_active", "vendor_domains", ["domain", "is_active"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_vendor_primary",
|
||||||
|
"vendor_domains",
|
||||||
|
["vendor_id", "is_primary"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_domains_domain"), "vendor_domains", ["domain"], unique=True
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_domains_id"), "vendor_domains", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"vendor_themes",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("theme_name", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("colors", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("font_family_heading", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("font_family_body", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("logo_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("logo_dark_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("favicon_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("banner_url", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("layout_style", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("header_style", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("product_card_style", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("custom_css", sa.Text(), nullable=True),
|
||||||
|
sa.Column("social_links", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("meta_title_template", sa.String(length=200), nullable=True),
|
||||||
|
sa.Column("meta_description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("vendor_id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_vendor_themes_id"), "vendor_themes", ["id"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"customer_addresses",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("customer_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("address_type", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("first_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("last_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("company", sa.String(length=200), nullable=True),
|
||||||
|
sa.Column("address_line_1", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("address_line_2", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("city", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("postal_code", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("country", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("is_default", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["customer_id"],
|
||||||
|
["customers.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_customer_addresses_id"), "customer_addresses", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"inventory",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("location", sa.String(), nullable=False),
|
||||||
|
sa.Column("quantity", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("reserved_quantity", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("gtin", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["product_id"],
|
||||||
|
["products.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint(
|
||||||
|
"product_id", "location", name="uq_inventory_product_location"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_inventory_product_location",
|
||||||
|
"inventory",
|
||||||
|
["product_id", "location"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_inventory_vendor_product",
|
||||||
|
"inventory",
|
||||||
|
["vendor_id", "product_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_inventory_gtin"), "inventory", ["gtin"], unique=False)
|
||||||
|
op.create_index(op.f("ix_inventory_id"), "inventory", ["id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_inventory_location"), "inventory", ["location"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_inventory_product_id"), "inventory", ["product_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_inventory_vendor_id"), "inventory", ["vendor_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"vendor_users",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("role_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("invited_by", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["invited_by"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["role_id"],
|
||||||
|
["roles.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_vendor_users_id"), "vendor_users", ["id"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"orders",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("customer_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_number", sa.String(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("subtotal", sa.Float(), nullable=False),
|
||||||
|
sa.Column("tax_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("shipping_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("discount_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("total_amount", sa.Float(), nullable=False),
|
||||||
|
sa.Column("currency", sa.String(), nullable=True),
|
||||||
|
sa.Column("shipping_address_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("billing_address_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("shipping_method", sa.String(), nullable=True),
|
||||||
|
sa.Column("tracking_number", sa.String(), nullable=True),
|
||||||
|
sa.Column("customer_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column("internal_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column("paid_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("shipped_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("delivered_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("cancelled_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["billing_address_id"],
|
||||||
|
["customer_addresses.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["customer_id"],
|
||||||
|
["customers.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["shipping_address_id"],
|
||||||
|
["customer_addresses.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_orders_customer_id"), "orders", ["customer_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_orders_id"), "orders", ["id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_orders_order_number"), "orders", ["order_number"], unique=True
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_orders_status"), "orders", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_vendor_id"), "orders", ["vendor_id"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"order_items",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_name", sa.String(), nullable=False),
|
||||||
|
sa.Column("product_sku", sa.String(), nullable=True),
|
||||||
|
sa.Column("quantity", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("unit_price", sa.Float(), nullable=False),
|
||||||
|
sa.Column("total_price", sa.Float(), nullable=False),
|
||||||
|
sa.Column("inventory_reserved", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("inventory_fulfilled", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["order_id"],
|
||||||
|
["orders.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["product_id"],
|
||||||
|
["products.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_order_items_id"), "order_items", ["id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_order_items_order_id"), "order_items", ["order_id"], unique=False
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f("ix_order_items_order_id"), table_name="order_items")
|
||||||
|
op.drop_index(op.f("ix_order_items_id"), table_name="order_items")
|
||||||
|
op.drop_table("order_items")
|
||||||
|
op.drop_index(op.f("ix_orders_vendor_id"), table_name="orders")
|
||||||
|
op.drop_index(op.f("ix_orders_status"), table_name="orders")
|
||||||
|
op.drop_index(op.f("ix_orders_order_number"), table_name="orders")
|
||||||
|
op.drop_index(op.f("ix_orders_id"), table_name="orders")
|
||||||
|
op.drop_index(op.f("ix_orders_customer_id"), table_name="orders")
|
||||||
|
op.drop_table("orders")
|
||||||
|
op.drop_index(op.f("ix_vendor_users_id"), table_name="vendor_users")
|
||||||
|
op.drop_table("vendor_users")
|
||||||
|
op.drop_index(op.f("ix_inventory_vendor_id"), table_name="inventory")
|
||||||
|
op.drop_index(op.f("ix_inventory_product_id"), table_name="inventory")
|
||||||
|
op.drop_index(op.f("ix_inventory_location"), table_name="inventory")
|
||||||
|
op.drop_index(op.f("ix_inventory_id"), table_name="inventory")
|
||||||
|
op.drop_index(op.f("ix_inventory_gtin"), table_name="inventory")
|
||||||
|
op.drop_index("idx_inventory_vendor_product", table_name="inventory")
|
||||||
|
op.drop_index("idx_inventory_product_location", table_name="inventory")
|
||||||
|
op.drop_table("inventory")
|
||||||
|
op.drop_index(op.f("ix_customer_addresses_id"), table_name="customer_addresses")
|
||||||
|
op.drop_table("customer_addresses")
|
||||||
|
op.drop_index(op.f("ix_vendor_themes_id"), table_name="vendor_themes")
|
||||||
|
op.drop_table("vendor_themes")
|
||||||
|
op.drop_index(op.f("ix_vendor_domains_id"), table_name="vendor_domains")
|
||||||
|
op.drop_index(op.f("ix_vendor_domains_domain"), table_name="vendor_domains")
|
||||||
|
op.drop_index("idx_vendor_primary", table_name="vendor_domains")
|
||||||
|
op.drop_index("idx_domain_active", table_name="vendor_domains")
|
||||||
|
op.drop_table("vendor_domains")
|
||||||
|
op.drop_index(op.f("ix_roles_id"), table_name="roles")
|
||||||
|
op.drop_table("roles")
|
||||||
|
op.drop_index(op.f("ix_products_id"), table_name="products")
|
||||||
|
op.drop_index("idx_product_featured", table_name="products")
|
||||||
|
op.drop_index("idx_product_active", table_name="products")
|
||||||
|
op.drop_table("products")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_import_jobs_vendor_id"),
|
||||||
|
table_name="marketplace_import_jobs",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_import_jobs_marketplace"),
|
||||||
|
table_name="marketplace_import_jobs",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_import_jobs_id"), table_name="marketplace_import_jobs"
|
||||||
|
)
|
||||||
|
op.drop_index("idx_import_vendor_status", table_name="marketplace_import_jobs")
|
||||||
|
op.drop_index("idx_import_vendor_created", table_name="marketplace_import_jobs")
|
||||||
|
op.drop_index("idx_import_user_marketplace", table_name="marketplace_import_jobs")
|
||||||
|
op.drop_table("marketplace_import_jobs")
|
||||||
|
op.drop_index(op.f("ix_customers_id"), table_name="customers")
|
||||||
|
op.drop_index(op.f("ix_customers_email"), table_name="customers")
|
||||||
|
op.drop_index(op.f("ix_customers_customer_number"), table_name="customers")
|
||||||
|
op.drop_table("customers")
|
||||||
|
op.drop_index(op.f("ix_vendors_vendor_code"), table_name="vendors")
|
||||||
|
op.drop_index(op.f("ix_vendors_subdomain"), table_name="vendors")
|
||||||
|
op.drop_index(op.f("ix_vendors_id"), table_name="vendors")
|
||||||
|
op.drop_table("vendors")
|
||||||
|
op.drop_index(op.f("ix_platform_alerts_severity"), table_name="platform_alerts")
|
||||||
|
op.drop_index(op.f("ix_platform_alerts_is_resolved"), table_name="platform_alerts")
|
||||||
|
op.drop_index(op.f("ix_platform_alerts_id"), table_name="platform_alerts")
|
||||||
|
op.drop_index(op.f("ix_platform_alerts_alert_type"), table_name="platform_alerts")
|
||||||
|
op.drop_table("platform_alerts")
|
||||||
|
op.drop_index(op.f("ix_admin_settings_key"), table_name="admin_settings")
|
||||||
|
op.drop_index(op.f("ix_admin_settings_id"), table_name="admin_settings")
|
||||||
|
op.drop_index(op.f("ix_admin_settings_category"), table_name="admin_settings")
|
||||||
|
op.drop_table("admin_settings")
|
||||||
|
op.drop_index(op.f("ix_admin_sessions_session_token"), table_name="admin_sessions")
|
||||||
|
op.drop_index(op.f("ix_admin_sessions_login_at"), table_name="admin_sessions")
|
||||||
|
op.drop_index(op.f("ix_admin_sessions_is_active"), table_name="admin_sessions")
|
||||||
|
op.drop_index(op.f("ix_admin_sessions_id"), table_name="admin_sessions")
|
||||||
|
op.drop_index(op.f("ix_admin_sessions_admin_user_id"), table_name="admin_sessions")
|
||||||
|
op.drop_table("admin_sessions")
|
||||||
|
op.drop_index(op.f("ix_admin_notifications_type"), table_name="admin_notifications")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_admin_notifications_priority"), table_name="admin_notifications"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_admin_notifications_is_read"), table_name="admin_notifications"
|
||||||
|
)
|
||||||
|
op.drop_index(op.f("ix_admin_notifications_id"), table_name="admin_notifications")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_admin_notifications_action_required"), table_name="admin_notifications"
|
||||||
|
)
|
||||||
|
op.drop_table("admin_notifications")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_admin_audit_logs_target_type"), table_name="admin_audit_logs"
|
||||||
|
)
|
||||||
|
op.drop_index(op.f("ix_admin_audit_logs_target_id"), table_name="admin_audit_logs")
|
||||||
|
op.drop_index(op.f("ix_admin_audit_logs_id"), table_name="admin_audit_logs")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_admin_audit_logs_admin_user_id"), table_name="admin_audit_logs"
|
||||||
|
)
|
||||||
|
op.drop_index(op.f("ix_admin_audit_logs_action"), table_name="admin_audit_logs")
|
||||||
|
op.drop_table("admin_audit_logs")
|
||||||
|
op.drop_index(op.f("ix_users_username"), table_name="users")
|
||||||
|
op.drop_index(op.f("ix_users_id"), table_name="users")
|
||||||
|
op.drop_index(op.f("ix_users_email"), table_name="users")
|
||||||
|
op.drop_table("users")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_products_vendor_name"), table_name="marketplace_products"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_products_marketplace_product_id"),
|
||||||
|
table_name="marketplace_products",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_products_marketplace"), table_name="marketplace_products"
|
||||||
|
)
|
||||||
|
op.drop_index(op.f("ix_marketplace_products_id"), table_name="marketplace_products")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_products_gtin"), table_name="marketplace_products"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_products_google_product_category"),
|
||||||
|
table_name="marketplace_products",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_products_brand"), table_name="marketplace_products"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_marketplace_products_availability"), table_name="marketplace_products"
|
||||||
|
)
|
||||||
|
op.drop_index("idx_marketplace_vendor", table_name="marketplace_products")
|
||||||
|
op.drop_index("idx_marketplace_brand", table_name="marketplace_products")
|
||||||
|
op.drop_table("marketplace_products")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
"""add_order_tracking_fields
|
||||||
|
|
||||||
|
Revision ID: 55b92e155566
|
||||||
|
Revises: d2e3f4a5b6c7
|
||||||
|
Create Date: 2025-12-20 18:07:51.144136
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "55b92e155566"
|
||||||
|
down_revision: str | None = "d2e3f4a5b6c7"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add new tracking fields to orders table
|
||||||
|
op.add_column("orders", sa.Column("tracking_url", sa.String(length=500), nullable=True))
|
||||||
|
op.add_column("orders", sa.Column("shipment_number", sa.String(length=100), nullable=True))
|
||||||
|
op.add_column("orders", sa.Column("shipping_carrier", sa.String(length=50), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("orders", "shipping_carrier")
|
||||||
|
op.drop_column("orders", "shipment_number")
|
||||||
|
op.drop_column("orders", "tracking_url")
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
"""make_vendor_owner_user_id_nullable_for_company_ownership
|
||||||
|
|
||||||
|
Revision ID: 5818330181a5
|
||||||
|
Revises: d0325d7c0f25
|
||||||
|
Create Date: 2025-12-01 20:30:06.158027
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "5818330181a5"
|
||||||
|
down_revision: str | None = "d0325d7c0f25"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""
|
||||||
|
Make vendor.owner_user_id nullable to support company-level ownership.
|
||||||
|
|
||||||
|
Architecture Change:
|
||||||
|
- OLD: Each vendor has its own owner (vendor.owner_user_id)
|
||||||
|
- NEW: Vendors belong to a company, company has one owner (company.owner_user_id)
|
||||||
|
|
||||||
|
This allows one company owner to manage multiple vendor brands.
|
||||||
|
"""
|
||||||
|
# Use batch operations for SQLite compatibility
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column("owner_user_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""
|
||||||
|
Revert vendor.owner_user_id to non-nullable.
|
||||||
|
|
||||||
|
WARNING: This will fail if there are vendors without owner_user_id!
|
||||||
|
"""
|
||||||
|
# Use batch operations for SQLite compatibility
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column("owner_user_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=False)
|
||||||
@@ -0,0 +1,76 @@
|
|||||||
|
"""Ensure content_pages table with all columns
|
||||||
|
|
||||||
|
Revision ID: 72aa309d4007
|
||||||
|
Revises: fef1d20ce8b4
|
||||||
|
Create Date: 2025-11-22 15:16:13.213613
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "72aa309d4007"
|
||||||
|
down_revision: str | None = "fef1d20ce8b4"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"content_pages",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("slug", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("title", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("content", sa.Text(), nullable=False),
|
||||||
|
sa.Column("content_format", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column("meta_description", sa.String(length=300), nullable=True),
|
||||||
|
sa.Column("meta_keywords", sa.String(length=300), nullable=True),
|
||||||
|
sa.Column("is_published", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("published_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("display_order", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("show_in_footer", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("show_in_header", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("created_by", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("updated_by", sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["created_by"], ["users.id"], ondelete="SET NULL"),
|
||||||
|
sa.ForeignKeyConstraint(["updated_by"], ["users.id"], ondelete="SET NULL"),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("vendor_id", "slug", name="uq_vendor_slug"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_slug_published", "content_pages", ["slug", "is_published"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_vendor_published",
|
||||||
|
"content_pages",
|
||||||
|
["vendor_id", "is_published"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_content_pages_id"), "content_pages", ["id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_content_pages_slug"), "content_pages", ["slug"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_content_pages_vendor_id"), "content_pages", ["vendor_id"], unique=False
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f("ix_content_pages_vendor_id"), table_name="content_pages")
|
||||||
|
op.drop_index(op.f("ix_content_pages_slug"), table_name="content_pages")
|
||||||
|
op.drop_index(op.f("ix_content_pages_id"), table_name="content_pages")
|
||||||
|
op.drop_index("idx_vendor_published", table_name="content_pages")
|
||||||
|
op.drop_index("idx_slug_published", table_name="content_pages")
|
||||||
|
op.drop_table("content_pages")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,290 @@
|
|||||||
|
"""add_architecture_quality_tracking_tables
|
||||||
|
|
||||||
|
Revision ID: 7a7ce92593d5
|
||||||
|
Revises: a2064e1dfcd4
|
||||||
|
Create Date: 2025-11-28 09:21:16.545203
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "7a7ce92593d5"
|
||||||
|
down_revision: str | None = "a2064e1dfcd4"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create architecture_scans table
|
||||||
|
op.create_table(
|
||||||
|
"architecture_scans",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"timestamp",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("total_files", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("total_violations", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("errors", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("warnings", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("duration_seconds", sa.Float(), nullable=True),
|
||||||
|
sa.Column("triggered_by", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("git_commit_hash", sa.String(length=40), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_scans_id"), "architecture_scans", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_scans_timestamp"),
|
||||||
|
"architecture_scans",
|
||||||
|
["timestamp"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create architecture_rules table
|
||||||
|
op.create_table(
|
||||||
|
"architecture_rules",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("rule_id", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("category", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("name", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("severity", sa.String(length=10), nullable=False),
|
||||||
|
sa.Column("enabled", sa.Boolean(), nullable=False, server_default="1"),
|
||||||
|
sa.Column("custom_config", sa.JSON(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("rule_id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_rules_id"), "architecture_rules", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_rules_rule_id"),
|
||||||
|
"architecture_rules",
|
||||||
|
["rule_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create architecture_violations table
|
||||||
|
op.create_table(
|
||||||
|
"architecture_violations",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("scan_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("rule_id", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("rule_name", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("severity", sa.String(length=10), nullable=False),
|
||||||
|
sa.Column("file_path", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("line_number", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("message", sa.Text(), nullable=False),
|
||||||
|
sa.Column("context", sa.Text(), nullable=True),
|
||||||
|
sa.Column("suggestion", sa.Text(), nullable=True),
|
||||||
|
sa.Column("status", sa.String(length=20), server_default="open", nullable=True),
|
||||||
|
sa.Column("assigned_to", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("resolved_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("resolved_by", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("resolution_note", sa.Text(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["assigned_to"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["resolved_by"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["scan_id"],
|
||||||
|
["architecture_scans.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_violations_file_path"),
|
||||||
|
"architecture_violations",
|
||||||
|
["file_path"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_violations_id"),
|
||||||
|
"architecture_violations",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_violations_rule_id"),
|
||||||
|
"architecture_violations",
|
||||||
|
["rule_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_violations_scan_id"),
|
||||||
|
"architecture_violations",
|
||||||
|
["scan_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_violations_severity"),
|
||||||
|
"architecture_violations",
|
||||||
|
["severity"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_violations_status"),
|
||||||
|
"architecture_violations",
|
||||||
|
["status"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create violation_assignments table
|
||||||
|
op.create_table(
|
||||||
|
"violation_assignments",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("violation_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"assigned_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("assigned_by", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("due_date", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"priority", sa.String(length=10), server_default="medium", nullable=True
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["assigned_by"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["violation_id"],
|
||||||
|
["architecture_violations.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_violation_assignments_id"),
|
||||||
|
"violation_assignments",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_violation_assignments_violation_id"),
|
||||||
|
"violation_assignments",
|
||||||
|
["violation_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create violation_comments table
|
||||||
|
op.create_table(
|
||||||
|
"violation_comments",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("violation_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("comment", sa.Text(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["user_id"],
|
||||||
|
["users.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["violation_id"],
|
||||||
|
["architecture_violations.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_violation_comments_id"), "violation_comments", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_violation_comments_violation_id"),
|
||||||
|
"violation_comments",
|
||||||
|
["violation_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop tables in reverse order (to respect foreign key constraints)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_violation_comments_violation_id"), table_name="violation_comments"
|
||||||
|
)
|
||||||
|
op.drop_index(op.f("ix_violation_comments_id"), table_name="violation_comments")
|
||||||
|
op.drop_table("violation_comments")
|
||||||
|
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_violation_assignments_violation_id"),
|
||||||
|
table_name="violation_assignments",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_violation_assignments_id"), table_name="violation_assignments"
|
||||||
|
)
|
||||||
|
op.drop_table("violation_assignments")
|
||||||
|
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_violations_status"), table_name="architecture_violations"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_violations_severity"),
|
||||||
|
table_name="architecture_violations",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_violations_scan_id"), table_name="architecture_violations"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_violations_rule_id"), table_name="architecture_violations"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_violations_id"), table_name="architecture_violations"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_violations_file_path"),
|
||||||
|
table_name="architecture_violations",
|
||||||
|
)
|
||||||
|
op.drop_table("architecture_violations")
|
||||||
|
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_rules_rule_id"), table_name="architecture_rules"
|
||||||
|
)
|
||||||
|
op.drop_index(op.f("ix_architecture_rules_id"), table_name="architecture_rules")
|
||||||
|
op.drop_table("architecture_rules")
|
||||||
|
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_scans_timestamp"), table_name="architecture_scans"
|
||||||
|
)
|
||||||
|
op.drop_index(op.f("ix_architecture_scans_id"), table_name="architecture_scans")
|
||||||
|
op.drop_table("architecture_scans")
|
||||||
103
alembic/versions_backup/82ea1b4a3ccb_add_test_run_tables.py
Normal file
103
alembic/versions_backup/82ea1b4a3ccb_add_test_run_tables.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
"""add_test_run_tables
|
||||||
|
|
||||||
|
Revision ID: 82ea1b4a3ccb
|
||||||
|
Revises: b4c5d6e7f8a9
|
||||||
|
Create Date: 2025-12-12 22:48:09.501172
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "82ea1b4a3ccb"
|
||||||
|
down_revision: str | None = "b4c5d6e7f8a9"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create test_collections table
|
||||||
|
op.create_table("test_collections",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("total_tests", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("total_files", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("total_classes", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("unit_tests", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("integration_tests", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("performance_tests", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("system_tests", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("test_files", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("collected_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_test_collections_id"), "test_collections", ["id"], unique=False)
|
||||||
|
|
||||||
|
# Create test_runs table
|
||||||
|
op.create_table("test_runs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("timestamp", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("total_tests", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("passed", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("failed", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("errors", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("skipped", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("xfailed", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("xpassed", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("coverage_percent", sa.Float(), nullable=True),
|
||||||
|
sa.Column("duration_seconds", sa.Float(), nullable=True),
|
||||||
|
sa.Column("triggered_by", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("git_commit_hash", sa.String(length=40), nullable=True),
|
||||||
|
sa.Column("git_branch", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("test_path", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("pytest_args", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("status", sa.String(length=20), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_test_runs_id"), "test_runs", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_test_runs_status"), "test_runs", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_test_runs_timestamp"), "test_runs", ["timestamp"], unique=False)
|
||||||
|
|
||||||
|
# Create test_results table
|
||||||
|
op.create_table("test_results",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("run_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("node_id", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("test_name", sa.String(length=200), nullable=False),
|
||||||
|
sa.Column("test_file", sa.String(length=300), nullable=False),
|
||||||
|
sa.Column("test_class", sa.String(length=200), nullable=True),
|
||||||
|
sa.Column("outcome", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("duration_seconds", sa.Float(), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("traceback", sa.Text(), nullable=True),
|
||||||
|
sa.Column("markers", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("parameters", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["run_id"], ["test_runs.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_test_results_id"), "test_results", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_test_results_node_id"), "test_results", ["node_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_test_results_outcome"), "test_results", ["outcome"], unique=False)
|
||||||
|
op.create_index(op.f("ix_test_results_run_id"), "test_results", ["run_id"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop test_results table first (has foreign key to test_runs)
|
||||||
|
op.drop_index(op.f("ix_test_results_run_id"), table_name="test_results")
|
||||||
|
op.drop_index(op.f("ix_test_results_outcome"), table_name="test_results")
|
||||||
|
op.drop_index(op.f("ix_test_results_node_id"), table_name="test_results")
|
||||||
|
op.drop_index(op.f("ix_test_results_id"), table_name="test_results")
|
||||||
|
op.drop_table("test_results")
|
||||||
|
|
||||||
|
# Drop test_runs table
|
||||||
|
op.drop_index(op.f("ix_test_runs_timestamp"), table_name="test_runs")
|
||||||
|
op.drop_index(op.f("ix_test_runs_status"), table_name="test_runs")
|
||||||
|
op.drop_index(op.f("ix_test_runs_id"), table_name="test_runs")
|
||||||
|
op.drop_table("test_runs")
|
||||||
|
|
||||||
|
# Drop test_collections table
|
||||||
|
op.drop_index(op.f("ix_test_collections_id"), table_name="test_collections")
|
||||||
|
op.drop_table("test_collections")
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
"""add marketplace import errors table
|
||||||
|
|
||||||
|
Revision ID: 91d02647efae
|
||||||
|
Revises: 987b4ecfa503
|
||||||
|
Create Date: 2025-12-13 13:13:46.969503
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "91d02647efae"
|
||||||
|
down_revision: str | None = "987b4ecfa503"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create marketplace_import_errors table to store detailed import error information
|
||||||
|
op.create_table("marketplace_import_errors",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("import_job_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("row_number", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("identifier", sa.String(), nullable=True),
|
||||||
|
sa.Column("error_type", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=False),
|
||||||
|
sa.Column("row_data", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["import_job_id"], ["marketplace_import_jobs.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_import_error_job_id", "marketplace_import_errors", ["import_job_id"], unique=False)
|
||||||
|
op.create_index("idx_import_error_type", "marketplace_import_errors", ["error_type"], unique=False)
|
||||||
|
op.create_index(op.f("ix_marketplace_import_errors_id"), "marketplace_import_errors", ["id"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index(op.f("ix_marketplace_import_errors_id"), table_name="marketplace_import_errors")
|
||||||
|
op.drop_index("idx_import_error_type", table_name="marketplace_import_errors")
|
||||||
|
op.drop_index("idx_import_error_job_id", table_name="marketplace_import_errors")
|
||||||
|
op.drop_table("marketplace_import_errors")
|
||||||
@@ -0,0 +1,179 @@
|
|||||||
|
"""add_letzshop_integration_tables
|
||||||
|
|
||||||
|
Revision ID: 987b4ecfa503
|
||||||
|
Revises: 82ea1b4a3ccb
|
||||||
|
Create Date: 2025-12-13
|
||||||
|
|
||||||
|
This migration adds:
|
||||||
|
- vendor_letzshop_credentials: Per-vendor encrypted API key storage
|
||||||
|
- letzshop_orders: Track imported orders with external IDs
|
||||||
|
- letzshop_fulfillment_queue: Queue outbound operations with retry
|
||||||
|
- letzshop_sync_logs: Audit trail for sync operations
|
||||||
|
- Adds channel fields to orders table for multi-marketplace support
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "987b4ecfa503"
|
||||||
|
down_revision: str | None = "82ea1b4a3ccb"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add channel fields to orders table
|
||||||
|
op.add_column("orders", sa.Column("channel", sa.String(length=50), nullable=True, server_default="direct"))
|
||||||
|
op.add_column("orders", sa.Column("external_order_id", sa.String(length=100), nullable=True))
|
||||||
|
op.add_column("orders", sa.Column("external_channel_data", sa.JSON(), nullable=True))
|
||||||
|
op.create_index(op.f("ix_orders_channel"), "orders", ["channel"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_external_order_id"), "orders", ["external_order_id"], unique=False)
|
||||||
|
|
||||||
|
# Create vendor_letzshop_credentials table
|
||||||
|
op.create_table("vendor_letzshop_credentials",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("api_key_encrypted", sa.Text(), nullable=False),
|
||||||
|
sa.Column("api_endpoint", sa.String(length=255), server_default="https://letzshop.lu/graphql", nullable=True),
|
||||||
|
sa.Column("auto_sync_enabled", sa.Boolean(), server_default="0", nullable=True),
|
||||||
|
sa.Column("sync_interval_minutes", sa.Integer(), server_default="15", nullable=True),
|
||||||
|
sa.Column("last_sync_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("last_sync_status", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("last_sync_error", sa.Text(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("vendor_id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_vendor_letzshop_credentials_id"), "vendor_letzshop_credentials", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendor_letzshop_credentials_vendor_id"), "vendor_letzshop_credentials", ["vendor_id"], unique=True)
|
||||||
|
|
||||||
|
# Create letzshop_orders table
|
||||||
|
op.create_table("letzshop_orders",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("letzshop_order_id", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("letzshop_shipment_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("letzshop_order_number", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("local_order_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("letzshop_state", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("customer_email", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("customer_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("total_amount", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("currency", sa.String(length=10), server_default="EUR", nullable=True),
|
||||||
|
sa.Column("raw_order_data", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("inventory_units", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("sync_status", sa.String(length=50), server_default="pending", nullable=True),
|
||||||
|
sa.Column("last_synced_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("sync_error", sa.Text(), nullable=True),
|
||||||
|
sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("rejected_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("tracking_set_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("tracking_number", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("tracking_carrier", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["local_order_id"], ["orders.id"], ),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_id"), "letzshop_orders", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_letzshop_order_id"), "letzshop_orders", ["letzshop_order_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_letzshop_shipment_id"), "letzshop_orders", ["letzshop_shipment_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_vendor_id"), "letzshop_orders", ["vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_letzshop_order_vendor", "letzshop_orders", ["vendor_id", "letzshop_order_id"], unique=False)
|
||||||
|
op.create_index("idx_letzshop_order_state", "letzshop_orders", ["vendor_id", "letzshop_state"], unique=False)
|
||||||
|
op.create_index("idx_letzshop_order_sync", "letzshop_orders", ["vendor_id", "sync_status"], unique=False)
|
||||||
|
|
||||||
|
# Create letzshop_fulfillment_queue table
|
||||||
|
op.create_table("letzshop_fulfillment_queue",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("letzshop_order_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("operation", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(length=50), server_default="pending", nullable=True),
|
||||||
|
sa.Column("attempts", sa.Integer(), server_default="0", nullable=True),
|
||||||
|
sa.Column("max_attempts", sa.Integer(), server_default="3", nullable=True),
|
||||||
|
sa.Column("last_attempt_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("next_retry_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("response_data", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["letzshop_order_id"], ["letzshop_orders.id"], ),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_letzshop_fulfillment_queue_id"), "letzshop_fulfillment_queue", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), "letzshop_fulfillment_queue", ["vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue", ["status", "vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue", ["status", "next_retry_at"], unique=False)
|
||||||
|
|
||||||
|
# Create letzshop_sync_logs table
|
||||||
|
op.create_table("letzshop_sync_logs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("operation_type", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("direction", sa.String(length=10), nullable=False),
|
||||||
|
sa.Column("status", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("records_processed", sa.Integer(), server_default="0", nullable=True),
|
||||||
|
sa.Column("records_succeeded", sa.Integer(), server_default="0", nullable=True),
|
||||||
|
sa.Column("records_failed", sa.Integer(), server_default="0", nullable=True),
|
||||||
|
sa.Column("error_details", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("started_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("duration_seconds", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("triggered_by", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_letzshop_sync_logs_id"), "letzshop_sync_logs", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_sync_logs_vendor_id"), "letzshop_sync_logs", ["vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_sync_log_vendor_type", "letzshop_sync_logs", ["vendor_id", "operation_type"], unique=False)
|
||||||
|
op.create_index("idx_sync_log_vendor_date", "letzshop_sync_logs", ["vendor_id", "started_at"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop letzshop_sync_logs table
|
||||||
|
op.drop_index("idx_sync_log_vendor_date", table_name="letzshop_sync_logs")
|
||||||
|
op.drop_index("idx_sync_log_vendor_type", table_name="letzshop_sync_logs")
|
||||||
|
op.drop_index(op.f("ix_letzshop_sync_logs_vendor_id"), table_name="letzshop_sync_logs")
|
||||||
|
op.drop_index(op.f("ix_letzshop_sync_logs_id"), table_name="letzshop_sync_logs")
|
||||||
|
op.drop_table("letzshop_sync_logs")
|
||||||
|
|
||||||
|
# Drop letzshop_fulfillment_queue table
|
||||||
|
op.drop_index("idx_fulfillment_queue_retry", table_name="letzshop_fulfillment_queue")
|
||||||
|
op.drop_index("idx_fulfillment_queue_status", table_name="letzshop_fulfillment_queue")
|
||||||
|
op.drop_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), table_name="letzshop_fulfillment_queue")
|
||||||
|
op.drop_index(op.f("ix_letzshop_fulfillment_queue_id"), table_name="letzshop_fulfillment_queue")
|
||||||
|
op.drop_table("letzshop_fulfillment_queue")
|
||||||
|
|
||||||
|
# Drop letzshop_orders table
|
||||||
|
op.drop_index("idx_letzshop_order_sync", table_name="letzshop_orders")
|
||||||
|
op.drop_index("idx_letzshop_order_state", table_name="letzshop_orders")
|
||||||
|
op.drop_index("idx_letzshop_order_vendor", table_name="letzshop_orders")
|
||||||
|
op.drop_index(op.f("ix_letzshop_orders_vendor_id"), table_name="letzshop_orders")
|
||||||
|
op.drop_index(op.f("ix_letzshop_orders_letzshop_shipment_id"), table_name="letzshop_orders")
|
||||||
|
op.drop_index(op.f("ix_letzshop_orders_letzshop_order_id"), table_name="letzshop_orders")
|
||||||
|
op.drop_index(op.f("ix_letzshop_orders_id"), table_name="letzshop_orders")
|
||||||
|
op.drop_table("letzshop_orders")
|
||||||
|
|
||||||
|
# Drop vendor_letzshop_credentials table
|
||||||
|
op.drop_index(op.f("ix_vendor_letzshop_credentials_vendor_id"), table_name="vendor_letzshop_credentials")
|
||||||
|
op.drop_index(op.f("ix_vendor_letzshop_credentials_id"), table_name="vendor_letzshop_credentials")
|
||||||
|
op.drop_table("vendor_letzshop_credentials")
|
||||||
|
|
||||||
|
# Drop channel fields from orders table
|
||||||
|
op.drop_index(op.f("ix_orders_external_order_id"), table_name="orders")
|
||||||
|
op.drop_index(op.f("ix_orders_channel"), table_name="orders")
|
||||||
|
op.drop_column("orders", "external_channel_data")
|
||||||
|
op.drop_column("orders", "external_order_id")
|
||||||
|
op.drop_column("orders", "channel")
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
"""remove_vendor_owner_user_id_column
|
||||||
|
|
||||||
|
Revision ID: 9f3a25ea4991
|
||||||
|
Revises: 5818330181a5
|
||||||
|
Create Date: 2025-12-02 17:58:45.663338
|
||||||
|
|
||||||
|
This migration removes the owner_user_id column from the vendors table.
|
||||||
|
|
||||||
|
Architecture Change:
|
||||||
|
- OLD: Each vendor had its own owner (vendor.owner_user_id)
|
||||||
|
- NEW: Vendors belong to a company, company has one owner (company.owner_user_id)
|
||||||
|
|
||||||
|
The vendor ownership is now determined via the company relationship:
|
||||||
|
- vendor.company.owner_user_id contains the owner
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "9f3a25ea4991"
|
||||||
|
down_revision: str | None = "5818330181a5"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""
|
||||||
|
Remove owner_user_id column from vendors table.
|
||||||
|
|
||||||
|
Ownership is now determined via the company relationship.
|
||||||
|
|
||||||
|
Note: SQLite batch mode recreates the table without the column,
|
||||||
|
so we don't need to explicitly drop constraints.
|
||||||
|
"""
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
# Drop the column - batch mode handles constraints automatically
|
||||||
|
batch_op.drop_column("owner_user_id")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""
|
||||||
|
Re-add owner_user_id column to vendors table.
|
||||||
|
|
||||||
|
WARNING: This will add the column back but NOT restore the data.
|
||||||
|
You will need to manually populate owner_user_id from company.owner_user_id
|
||||||
|
if reverting this migration.
|
||||||
|
"""
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("owner_user_id", sa.Integer(), nullable=True)
|
||||||
|
)
|
||||||
|
batch_op.create_foreign_key(
|
||||||
|
"vendors_owner_user_id_fkey",
|
||||||
|
"users",
|
||||||
|
["owner_user_id"],
|
||||||
|
["id"]
|
||||||
|
)
|
||||||
67
alembic/versions_backup/a2064e1dfcd4_add_cart_items_table.py
Normal file
67
alembic/versions_backup/a2064e1dfcd4_add_cart_items_table.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""add cart_items table
|
||||||
|
|
||||||
|
Revision ID: a2064e1dfcd4
|
||||||
|
Revises: f68d8da5315a
|
||||||
|
Create Date: 2025-11-23 19:52:40.509538
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "a2064e1dfcd4"
|
||||||
|
down_revision: str | None = "f68d8da5315a"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create cart_items table
|
||||||
|
op.create_table(
|
||||||
|
"cart_items",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("session_id", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("quantity", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("price_at_add", sa.Float(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["product_id"],
|
||||||
|
["products.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint(
|
||||||
|
"vendor_id", "session_id", "product_id", name="uq_cart_item"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index(
|
||||||
|
"idx_cart_session", "cart_items", ["vendor_id", "session_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index("idx_cart_created", "cart_items", ["created_at"], unique=False)
|
||||||
|
op.create_index(op.f("ix_cart_items_id"), "cart_items", ["id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_cart_items_session_id"), "cart_items", ["session_id"], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop indexes
|
||||||
|
op.drop_index(op.f("ix_cart_items_session_id"), table_name="cart_items")
|
||||||
|
op.drop_index(op.f("ix_cart_items_id"), table_name="cart_items")
|
||||||
|
op.drop_index("idx_cart_created", table_name="cart_items")
|
||||||
|
op.drop_index("idx_cart_session", table_name="cart_items")
|
||||||
|
|
||||||
|
# Drop table
|
||||||
|
op.drop_table("cart_items")
|
||||||
@@ -0,0 +1,128 @@
|
|||||||
|
"""Add override fields to products table
|
||||||
|
|
||||||
|
Revision ID: a3b4c5d6e7f8
|
||||||
|
Revises: f2b3c4d5e6f7
|
||||||
|
Create Date: 2025-12-11
|
||||||
|
|
||||||
|
This migration:
|
||||||
|
- Renames 'product_id' to 'vendor_sku' for clarity
|
||||||
|
- Adds new override fields (brand, images, digital delivery)
|
||||||
|
- Adds vendor-specific digital fulfillment fields
|
||||||
|
- Changes relationship from one-to-one to one-to-many (same marketplace product
|
||||||
|
can be in multiple vendor catalogs)
|
||||||
|
|
||||||
|
The override pattern: NULL value means "inherit from marketplace_product".
|
||||||
|
Setting a value creates a vendor-specific override.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "a3b4c5d6e7f8"
|
||||||
|
down_revision: str | None = "f2b3c4d5e6f7"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Use batch mode for SQLite compatibility
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
# Rename product_id to vendor_sku for clarity
|
||||||
|
batch_op.alter_column(
|
||||||
|
"product_id",
|
||||||
|
new_column_name="vendor_sku",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add new override fields
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("brand", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("primary_image_url", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("additional_images", sa.JSON(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add digital product override fields
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("download_url", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("license_type", sa.String(50), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add vendor-specific digital fulfillment settings
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("fulfillment_email_template", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add supplier tracking (for products sourced from CodesWholesale, etc.)
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("supplier", sa.String(50), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("supplier_product_id", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("supplier_cost", sa.Float(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add margin/markup tracking
|
||||||
|
op.add_column(
|
||||||
|
"products",
|
||||||
|
sa.Column("margin_percent", sa.Float(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create index for vendor_sku
|
||||||
|
op.create_index(
|
||||||
|
"idx_product_vendor_sku",
|
||||||
|
"products",
|
||||||
|
["vendor_id", "vendor_sku"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create index for supplier queries
|
||||||
|
op.create_index(
|
||||||
|
"idx_product_supplier",
|
||||||
|
"products",
|
||||||
|
["supplier", "supplier_product_id"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop indexes
|
||||||
|
op.drop_index("idx_product_supplier", table_name="products")
|
||||||
|
op.drop_index("idx_product_vendor_sku", table_name="products")
|
||||||
|
|
||||||
|
# Drop new columns
|
||||||
|
op.drop_column("products", "margin_percent")
|
||||||
|
op.drop_column("products", "supplier_cost")
|
||||||
|
op.drop_column("products", "supplier_product_id")
|
||||||
|
op.drop_column("products", "supplier")
|
||||||
|
op.drop_column("products", "fulfillment_email_template")
|
||||||
|
op.drop_column("products", "license_type")
|
||||||
|
op.drop_column("products", "download_url")
|
||||||
|
op.drop_column("products", "additional_images")
|
||||||
|
op.drop_column("products", "primary_image_url")
|
||||||
|
op.drop_column("products", "brand")
|
||||||
|
|
||||||
|
# Use batch mode for SQLite compatibility
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
# Rename vendor_sku back to product_id
|
||||||
|
batch_op.alter_column(
|
||||||
|
"vendor_sku",
|
||||||
|
new_column_name="product_id",
|
||||||
|
)
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
"""add_letzshop_order_locale_and_country_fields
|
||||||
|
|
||||||
|
Revision ID: a9a86cef6cca
|
||||||
|
Revises: fcfdc02d5138
|
||||||
|
Create Date: 2025-12-17 20:55:41.477848
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "a9a86cef6cca"
|
||||||
|
down_revision: str | None = "fcfdc02d5138"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add new columns to letzshop_orders for customer locale and country
|
||||||
|
op.add_column("letzshop_orders", sa.Column("customer_locale", sa.String(length=10), nullable=True))
|
||||||
|
op.add_column("letzshop_orders", sa.Column("shipping_country_iso", sa.String(length=5), nullable=True))
|
||||||
|
op.add_column("letzshop_orders", sa.Column("billing_country_iso", sa.String(length=5), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("letzshop_orders", "billing_country_iso")
|
||||||
|
op.drop_column("letzshop_orders", "shipping_country_iso")
|
||||||
|
op.drop_column("letzshop_orders", "customer_locale")
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
"""add language column to marketplace_import_jobs
|
||||||
|
|
||||||
|
Revision ID: b412e0b49c2e
|
||||||
|
Revises: 91d02647efae
|
||||||
|
Create Date: 2025-12-13 13:35:46.524893
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "b412e0b49c2e"
|
||||||
|
down_revision: str | None = "91d02647efae"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add language column with default value for existing rows
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_import_jobs",
|
||||||
|
sa.Column("language", sa.String(length=5), nullable=False, server_default="en")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("marketplace_import_jobs", "language")
|
||||||
@@ -0,0 +1,132 @@
|
|||||||
|
"""Migrate existing product data to translation tables
|
||||||
|
|
||||||
|
Revision ID: b4c5d6e7f8a9
|
||||||
|
Revises: a3b4c5d6e7f8
|
||||||
|
Create Date: 2025-12-11
|
||||||
|
|
||||||
|
This migration:
|
||||||
|
1. Copies existing title/description from marketplace_products to
|
||||||
|
marketplace_product_translations (default language: 'en')
|
||||||
|
2. Parses existing price strings to numeric values
|
||||||
|
3. Removes the old title/description columns from marketplace_products
|
||||||
|
|
||||||
|
Since we're not live yet, we can safely remove the old columns
|
||||||
|
after migrating the data to the new structure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "b4c5d6e7f8a9"
|
||||||
|
down_revision: str | None = "a3b4c5d6e7f8"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_price(price_str: str) -> float | None:
|
||||||
|
"""Parse price string like '19.99 EUR' to float."""
|
||||||
|
if not price_str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Extract numeric value
|
||||||
|
numbers = re.findall(r"[\d.,]+", str(price_str))
|
||||||
|
if numbers:
|
||||||
|
num_str = numbers[0].replace(",", ".")
|
||||||
|
try:
|
||||||
|
return float(num_str)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
conn = op.get_bind()
|
||||||
|
|
||||||
|
# Step 1: Migrate existing title/description to translations table
|
||||||
|
# Default language is 'en' for existing data
|
||||||
|
conn.execute(
|
||||||
|
text("""
|
||||||
|
INSERT INTO marketplace_product_translations
|
||||||
|
(marketplace_product_id, language, title, description, created_at, updated_at)
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
'en',
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
created_at,
|
||||||
|
updated_at
|
||||||
|
FROM marketplace_products
|
||||||
|
WHERE title IS NOT NULL
|
||||||
|
""")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 2: Parse prices to numeric values
|
||||||
|
# Get all marketplace products with prices
|
||||||
|
result = conn.execute(
|
||||||
|
text("SELECT id, price, sale_price FROM marketplace_products")
|
||||||
|
)
|
||||||
|
|
||||||
|
for row in result:
|
||||||
|
price_numeric = parse_price(row.price) if row.price else None
|
||||||
|
sale_price_numeric = parse_price(row.sale_price) if row.sale_price else None
|
||||||
|
|
||||||
|
if price_numeric is not None or sale_price_numeric is not None:
|
||||||
|
conn.execute(
|
||||||
|
text("""
|
||||||
|
UPDATE marketplace_products
|
||||||
|
SET price_numeric = :price_numeric,
|
||||||
|
sale_price_numeric = :sale_price_numeric
|
||||||
|
WHERE id = :id
|
||||||
|
"""),
|
||||||
|
{
|
||||||
|
"id": row.id,
|
||||||
|
"price_numeric": price_numeric,
|
||||||
|
"sale_price_numeric": sale_price_numeric,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 3: Since we're not live, remove the old title/description columns
|
||||||
|
# from marketplace_products (data is now in translations table)
|
||||||
|
op.drop_column("marketplace_products", "title")
|
||||||
|
op.drop_column("marketplace_products", "description")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Re-add title and description columns
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("title", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Copy data back from translations (only 'en' translations)
|
||||||
|
conn = op.get_bind()
|
||||||
|
conn.execute(
|
||||||
|
text("""
|
||||||
|
UPDATE marketplace_products
|
||||||
|
SET title = (
|
||||||
|
SELECT title FROM marketplace_product_translations
|
||||||
|
WHERE marketplace_product_translations.marketplace_product_id = marketplace_products.id
|
||||||
|
AND marketplace_product_translations.language = 'en'
|
||||||
|
),
|
||||||
|
description = (
|
||||||
|
SELECT description FROM marketplace_product_translations
|
||||||
|
WHERE marketplace_product_translations.marketplace_product_id = marketplace_products.id
|
||||||
|
AND marketplace_product_translations.language = 'en'
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete the migrated translations
|
||||||
|
conn.execute(
|
||||||
|
text("DELETE FROM marketplace_product_translations WHERE language = 'en'")
|
||||||
|
)
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
"""add show_in_legal to content_pages
|
||||||
|
|
||||||
|
Revision ID: ba2c0ce78396
|
||||||
|
Revises: m1b2c3d4e5f6
|
||||||
|
Create Date: 2025-12-28 20:00:24.263518
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "ba2c0ce78396"
|
||||||
|
down_revision: str | None = "m1b2c3d4e5f6"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Add show_in_legal column to content_pages table.
|
||||||
|
|
||||||
|
This column controls whether a page appears in the bottom bar
|
||||||
|
alongside the copyright notice (e.g., Privacy Policy, Terms of Service).
|
||||||
|
"""
|
||||||
|
op.add_column(
|
||||||
|
"content_pages",
|
||||||
|
sa.Column("show_in_legal", sa.Boolean(), nullable=True, default=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set default value for existing rows (PostgreSQL uses true/false for boolean)
|
||||||
|
op.execute("UPDATE content_pages SET show_in_legal = false WHERE show_in_legal IS NULL")
|
||||||
|
|
||||||
|
# Set privacy and terms pages to show in legal by default
|
||||||
|
op.execute("UPDATE content_pages SET show_in_legal = true WHERE slug IN ('privacy', 'terms')")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Remove show_in_legal column from content_pages table."""
|
||||||
|
op.drop_column("content_pages", "show_in_legal")
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
"""add_letzshop_credentials_carrier_fields
|
||||||
|
|
||||||
|
Revision ID: c00d2985701f
|
||||||
|
Revises: 55b92e155566
|
||||||
|
Create Date: 2025-12-20 18:49:53.432904
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "c00d2985701f"
|
||||||
|
down_revision: str | None = "55b92e155566"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add carrier settings and test mode to vendor_letzshop_credentials
|
||||||
|
op.add_column("vendor_letzshop_credentials", sa.Column("test_mode_enabled", sa.Boolean(), nullable=True, server_default="0"))
|
||||||
|
op.add_column("vendor_letzshop_credentials", sa.Column("default_carrier", sa.String(length=50), nullable=True))
|
||||||
|
op.add_column("vendor_letzshop_credentials", sa.Column("carrier_greco_label_url", sa.String(length=500), nullable=True, server_default="https://dispatchweb.fr/Tracky/Home/"))
|
||||||
|
op.add_column("vendor_letzshop_credentials", sa.Column("carrier_colissimo_label_url", sa.String(length=500), nullable=True))
|
||||||
|
op.add_column("vendor_letzshop_credentials", sa.Column("carrier_xpresslogistics_label_url", sa.String(length=500), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("vendor_letzshop_credentials", "carrier_xpresslogistics_label_url")
|
||||||
|
op.drop_column("vendor_letzshop_credentials", "carrier_colissimo_label_url")
|
||||||
|
op.drop_column("vendor_letzshop_credentials", "carrier_greco_label_url")
|
||||||
|
op.drop_column("vendor_letzshop_credentials", "default_carrier")
|
||||||
|
op.drop_column("vendor_letzshop_credentials", "test_mode_enabled")
|
||||||
452
alembic/versions_backup/c1d2e3f4a5b6_unified_order_schema.py
Normal file
452
alembic/versions_backup/c1d2e3f4a5b6_unified_order_schema.py
Normal file
@@ -0,0 +1,452 @@
|
|||||||
|
"""unified_order_schema
|
||||||
|
|
||||||
|
Revision ID: c1d2e3f4a5b6
|
||||||
|
Revises: 2362c2723a93
|
||||||
|
Create Date: 2025-12-19
|
||||||
|
|
||||||
|
This migration implements the unified order schema:
|
||||||
|
- Removes the separate letzshop_orders table
|
||||||
|
- Enhances the orders table with:
|
||||||
|
- Customer/address snapshots (preserved at order time)
|
||||||
|
- External marketplace references
|
||||||
|
- Tracking provider field
|
||||||
|
- Enhances order_items with:
|
||||||
|
- GTIN fields
|
||||||
|
- External item references
|
||||||
|
- Item state for marketplace confirmation flow
|
||||||
|
- Updates letzshop_fulfillment_queue to reference orders table directly
|
||||||
|
|
||||||
|
Design principles:
|
||||||
|
- Single orders table for all channels (direct, letzshop, etc.)
|
||||||
|
- Customer/address data snapshotted at order time
|
||||||
|
- Products must exist in catalog (enforced by FK)
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "c1d2e3f4a5b6"
|
||||||
|
down_revision: str | None = "2362c2723a93"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def table_exists(table_name: str) -> bool:
|
||||||
|
"""Check if a table exists in the database."""
|
||||||
|
bind = op.get_bind()
|
||||||
|
inspector = inspect(bind)
|
||||||
|
return table_name in inspector.get_table_names()
|
||||||
|
|
||||||
|
|
||||||
|
def index_exists(index_name: str, table_name: str) -> bool:
|
||||||
|
"""Check if an index exists on a table."""
|
||||||
|
bind = op.get_bind()
|
||||||
|
inspector = inspect(bind)
|
||||||
|
try:
|
||||||
|
indexes = inspector.get_indexes(table_name)
|
||||||
|
return any(idx["name"] == index_name for idx in indexes)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def safe_drop_index(index_name: str, table_name: str) -> None:
|
||||||
|
"""Drop an index if it exists."""
|
||||||
|
if index_exists(index_name, table_name):
|
||||||
|
op.drop_index(index_name, table_name=table_name)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_drop_table(table_name: str) -> None:
|
||||||
|
"""Drop a table if it exists."""
|
||||||
|
if table_exists(table_name):
|
||||||
|
op.drop_table(table_name)
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# =========================================================================
|
||||||
|
# Step 1: Drop old tables that will be replaced (if they exist)
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# Drop letzshop_fulfillment_queue (references letzshop_orders)
|
||||||
|
if table_exists("letzshop_fulfillment_queue"):
|
||||||
|
safe_drop_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("ix_letzshop_fulfillment_queue_vendor_id", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("ix_letzshop_fulfillment_queue_id", "letzshop_fulfillment_queue")
|
||||||
|
op.drop_table("letzshop_fulfillment_queue")
|
||||||
|
|
||||||
|
# Drop letzshop_orders table (replaced by unified orders)
|
||||||
|
if table_exists("letzshop_orders"):
|
||||||
|
safe_drop_index("idx_letzshop_order_sync", "letzshop_orders")
|
||||||
|
safe_drop_index("idx_letzshop_order_state", "letzshop_orders")
|
||||||
|
safe_drop_index("idx_letzshop_order_vendor", "letzshop_orders")
|
||||||
|
safe_drop_index("ix_letzshop_orders_vendor_id", "letzshop_orders")
|
||||||
|
safe_drop_index("ix_letzshop_orders_letzshop_shipment_id", "letzshop_orders")
|
||||||
|
safe_drop_index("ix_letzshop_orders_letzshop_order_id", "letzshop_orders")
|
||||||
|
safe_drop_index("ix_letzshop_orders_id", "letzshop_orders")
|
||||||
|
op.drop_table("letzshop_orders")
|
||||||
|
|
||||||
|
# Drop order_items (references orders)
|
||||||
|
if table_exists("order_items"):
|
||||||
|
safe_drop_index("ix_order_items_id", "order_items")
|
||||||
|
safe_drop_index("ix_order_items_order_id", "order_items")
|
||||||
|
op.drop_table("order_items")
|
||||||
|
|
||||||
|
# Drop old orders table
|
||||||
|
if table_exists("orders"):
|
||||||
|
safe_drop_index("ix_orders_external_order_id", "orders")
|
||||||
|
safe_drop_index("ix_orders_channel", "orders")
|
||||||
|
safe_drop_index("ix_orders_vendor_id", "orders")
|
||||||
|
safe_drop_index("ix_orders_status", "orders")
|
||||||
|
safe_drop_index("ix_orders_order_number", "orders")
|
||||||
|
safe_drop_index("ix_orders_id", "orders")
|
||||||
|
safe_drop_index("ix_orders_customer_id", "orders")
|
||||||
|
op.drop_table("orders")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 2: Create new unified orders table
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table("orders",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("customer_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_number", sa.String(length=100), nullable=False),
|
||||||
|
|
||||||
|
# Channel/Source
|
||||||
|
sa.Column("channel", sa.String(length=50), nullable=False, server_default="direct"),
|
||||||
|
|
||||||
|
# External references (for marketplace orders)
|
||||||
|
sa.Column("external_order_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("external_shipment_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("external_order_number", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("external_data", sa.JSON(), nullable=True),
|
||||||
|
|
||||||
|
# Status
|
||||||
|
sa.Column("status", sa.String(length=50), nullable=False, server_default="pending"),
|
||||||
|
|
||||||
|
# Financials
|
||||||
|
sa.Column("subtotal", sa.Float(), nullable=True),
|
||||||
|
sa.Column("tax_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("shipping_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("discount_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("total_amount", sa.Float(), nullable=False),
|
||||||
|
sa.Column("currency", sa.String(length=10), server_default="EUR", nullable=True),
|
||||||
|
|
||||||
|
# Customer snapshot
|
||||||
|
sa.Column("customer_first_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("customer_last_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("customer_email", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("customer_phone", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("customer_locale", sa.String(length=10), nullable=True),
|
||||||
|
|
||||||
|
# Shipping address snapshot
|
||||||
|
sa.Column("ship_first_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("ship_last_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("ship_company", sa.String(length=200), nullable=True),
|
||||||
|
sa.Column("ship_address_line_1", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("ship_address_line_2", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("ship_city", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("ship_postal_code", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("ship_country_iso", sa.String(length=5), nullable=False),
|
||||||
|
|
||||||
|
# Billing address snapshot
|
||||||
|
sa.Column("bill_first_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("bill_last_name", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("bill_company", sa.String(length=200), nullable=True),
|
||||||
|
sa.Column("bill_address_line_1", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("bill_address_line_2", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("bill_city", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("bill_postal_code", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("bill_country_iso", sa.String(length=5), nullable=False),
|
||||||
|
|
||||||
|
# Tracking
|
||||||
|
sa.Column("shipping_method", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("tracking_number", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("tracking_provider", sa.String(length=100), nullable=True),
|
||||||
|
|
||||||
|
# Notes
|
||||||
|
sa.Column("customer_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column("internal_notes", sa.Text(), nullable=True),
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
sa.Column("order_date", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("shipped_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("delivered_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
|
||||||
|
# Foreign keys
|
||||||
|
sa.ForeignKeyConstraint(["customer_id"], ["customers.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Indexes for orders
|
||||||
|
op.create_index(op.f("ix_orders_id"), "orders", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_vendor_id"), "orders", ["vendor_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_customer_id"), "orders", ["customer_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_order_number"), "orders", ["order_number"], unique=True)
|
||||||
|
op.create_index(op.f("ix_orders_channel"), "orders", ["channel"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_status"), "orders", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_external_order_id"), "orders", ["external_order_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_external_shipment_id"), "orders", ["external_shipment_id"], unique=False)
|
||||||
|
op.create_index("idx_order_vendor_status", "orders", ["vendor_id", "status"], unique=False)
|
||||||
|
op.create_index("idx_order_vendor_channel", "orders", ["vendor_id", "channel"], unique=False)
|
||||||
|
op.create_index("idx_order_vendor_date", "orders", ["vendor_id", "order_date"], unique=False)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 3: Create new order_items table
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table("order_items",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||||
|
|
||||||
|
# Product snapshot
|
||||||
|
sa.Column("product_name", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("product_sku", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("gtin", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("gtin_type", sa.String(length=20), nullable=True),
|
||||||
|
|
||||||
|
# Pricing
|
||||||
|
sa.Column("quantity", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("unit_price", sa.Float(), nullable=False),
|
||||||
|
sa.Column("total_price", sa.Float(), nullable=False),
|
||||||
|
|
||||||
|
# External references (for marketplace items)
|
||||||
|
sa.Column("external_item_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("external_variant_id", sa.String(length=100), nullable=True),
|
||||||
|
|
||||||
|
# Item state (for marketplace confirmation flow)
|
||||||
|
sa.Column("item_state", sa.String(length=50), nullable=True),
|
||||||
|
|
||||||
|
# Inventory tracking
|
||||||
|
sa.Column("inventory_reserved", sa.Boolean(), server_default="0", nullable=True),
|
||||||
|
sa.Column("inventory_fulfilled", sa.Boolean(), server_default="0", nullable=True),
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
|
||||||
|
# Foreign keys
|
||||||
|
sa.ForeignKeyConstraint(["order_id"], ["orders.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["product_id"], ["products.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Indexes for order_items
|
||||||
|
op.create_index(op.f("ix_order_items_id"), "order_items", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_order_items_order_id"), "order_items", ["order_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_order_items_product_id"), "order_items", ["product_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_order_items_gtin"), "order_items", ["gtin"], unique=False)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 4: Create updated letzshop_fulfillment_queue (references orders)
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table("letzshop_fulfillment_queue",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_id", sa.Integer(), nullable=False),
|
||||||
|
|
||||||
|
# Operation type
|
||||||
|
sa.Column("operation", sa.String(length=50), nullable=False),
|
||||||
|
|
||||||
|
# Operation payload
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
|
||||||
|
# Status and retry
|
||||||
|
sa.Column("status", sa.String(length=50), server_default="pending", nullable=True),
|
||||||
|
sa.Column("attempts", sa.Integer(), server_default="0", nullable=True),
|
||||||
|
sa.Column("max_attempts", sa.Integer(), server_default="3", nullable=True),
|
||||||
|
sa.Column("last_attempt_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("next_retry_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
|
||||||
|
# Response from Letzshop
|
||||||
|
sa.Column("response_data", sa.JSON(), nullable=True),
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
|
||||||
|
# Foreign keys
|
||||||
|
sa.ForeignKeyConstraint(["order_id"], ["orders.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Indexes for letzshop_fulfillment_queue
|
||||||
|
op.create_index(op.f("ix_letzshop_fulfillment_queue_id"), "letzshop_fulfillment_queue", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), "letzshop_fulfillment_queue", ["vendor_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_fulfillment_queue_order_id"), "letzshop_fulfillment_queue", ["order_id"], unique=False)
|
||||||
|
op.create_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue", ["status", "vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue", ["status", "next_retry_at"], unique=False)
|
||||||
|
op.create_index("idx_fulfillment_queue_order", "letzshop_fulfillment_queue", ["order_id"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop new letzshop_fulfillment_queue
|
||||||
|
safe_drop_index("idx_fulfillment_queue_order", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("ix_letzshop_fulfillment_queue_order_id", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("ix_letzshop_fulfillment_queue_vendor_id", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_index("ix_letzshop_fulfillment_queue_id", "letzshop_fulfillment_queue")
|
||||||
|
safe_drop_table("letzshop_fulfillment_queue")
|
||||||
|
|
||||||
|
# Drop new order_items
|
||||||
|
safe_drop_index("ix_order_items_gtin", "order_items")
|
||||||
|
safe_drop_index("ix_order_items_product_id", "order_items")
|
||||||
|
safe_drop_index("ix_order_items_order_id", "order_items")
|
||||||
|
safe_drop_index("ix_order_items_id", "order_items")
|
||||||
|
safe_drop_table("order_items")
|
||||||
|
|
||||||
|
# Drop new orders
|
||||||
|
safe_drop_index("idx_order_vendor_date", "orders")
|
||||||
|
safe_drop_index("idx_order_vendor_channel", "orders")
|
||||||
|
safe_drop_index("idx_order_vendor_status", "orders")
|
||||||
|
safe_drop_index("ix_orders_external_shipment_id", "orders")
|
||||||
|
safe_drop_index("ix_orders_external_order_id", "orders")
|
||||||
|
safe_drop_index("ix_orders_status", "orders")
|
||||||
|
safe_drop_index("ix_orders_channel", "orders")
|
||||||
|
safe_drop_index("ix_orders_order_number", "orders")
|
||||||
|
safe_drop_index("ix_orders_customer_id", "orders")
|
||||||
|
safe_drop_index("ix_orders_vendor_id", "orders")
|
||||||
|
safe_drop_index("ix_orders_id", "orders")
|
||||||
|
safe_drop_table("orders")
|
||||||
|
|
||||||
|
# Recreate old orders table
|
||||||
|
op.create_table("orders",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("customer_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_number", sa.String(), nullable=False),
|
||||||
|
sa.Column("channel", sa.String(length=50), nullable=True, server_default="direct"),
|
||||||
|
sa.Column("external_order_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("external_channel_data", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("subtotal", sa.Float(), nullable=False),
|
||||||
|
sa.Column("tax_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("shipping_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("discount_amount", sa.Float(), nullable=True),
|
||||||
|
sa.Column("total_amount", sa.Float(), nullable=False),
|
||||||
|
sa.Column("currency", sa.String(), nullable=True),
|
||||||
|
sa.Column("shipping_address_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("billing_address_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("shipping_method", sa.String(), nullable=True),
|
||||||
|
sa.Column("tracking_number", sa.String(), nullable=True),
|
||||||
|
sa.Column("customer_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column("internal_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column("paid_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("shipped_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("delivered_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("cancelled_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["billing_address_id"], ["customer_addresses.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["customer_id"], ["customers.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["shipping_address_id"], ["customer_addresses.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_orders_customer_id"), "orders", ["customer_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_id"), "orders", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_order_number"), "orders", ["order_number"], unique=True)
|
||||||
|
op.create_index(op.f("ix_orders_status"), "orders", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_vendor_id"), "orders", ["vendor_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_channel"), "orders", ["channel"], unique=False)
|
||||||
|
op.create_index(op.f("ix_orders_external_order_id"), "orders", ["external_order_id"], unique=False)
|
||||||
|
|
||||||
|
# Recreate old order_items table
|
||||||
|
op.create_table("order_items",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_name", sa.String(), nullable=False),
|
||||||
|
sa.Column("product_sku", sa.String(), nullable=True),
|
||||||
|
sa.Column("quantity", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("unit_price", sa.Float(), nullable=False),
|
||||||
|
sa.Column("total_price", sa.Float(), nullable=False),
|
||||||
|
sa.Column("inventory_reserved", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("inventory_fulfilled", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["order_id"], ["orders.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["product_id"], ["products.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_order_items_id"), "order_items", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_order_items_order_id"), "order_items", ["order_id"], unique=False)
|
||||||
|
|
||||||
|
# Recreate old letzshop_orders table
|
||||||
|
op.create_table("letzshop_orders",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("letzshop_order_id", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("letzshop_shipment_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("letzshop_order_number", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("local_order_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("letzshop_state", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("customer_email", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("customer_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("total_amount", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("currency", sa.String(length=10), server_default="EUR", nullable=True),
|
||||||
|
sa.Column("customer_locale", sa.String(length=10), nullable=True),
|
||||||
|
sa.Column("shipping_country_iso", sa.String(length=5), nullable=True),
|
||||||
|
sa.Column("billing_country_iso", sa.String(length=5), nullable=True),
|
||||||
|
sa.Column("order_date", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("raw_order_data", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("inventory_units", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("sync_status", sa.String(length=50), server_default="pending", nullable=True),
|
||||||
|
sa.Column("last_synced_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("sync_error", sa.Text(), nullable=True),
|
||||||
|
sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("rejected_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("tracking_set_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("tracking_number", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("tracking_carrier", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["local_order_id"], ["orders.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_id"), "letzshop_orders", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_letzshop_order_id"), "letzshop_orders", ["letzshop_order_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_letzshop_shipment_id"), "letzshop_orders", ["letzshop_shipment_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_orders_vendor_id"), "letzshop_orders", ["vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_letzshop_order_vendor", "letzshop_orders", ["vendor_id", "letzshop_order_id"], unique=False)
|
||||||
|
op.create_index("idx_letzshop_order_state", "letzshop_orders", ["vendor_id", "letzshop_state"], unique=False)
|
||||||
|
op.create_index("idx_letzshop_order_sync", "letzshop_orders", ["vendor_id", "sync_status"], unique=False)
|
||||||
|
|
||||||
|
# Recreate old letzshop_fulfillment_queue table
|
||||||
|
op.create_table("letzshop_fulfillment_queue",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("letzshop_order_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("operation", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(length=50), server_default="pending", nullable=True),
|
||||||
|
sa.Column("attempts", sa.Integer(), server_default="0", nullable=True),
|
||||||
|
sa.Column("max_attempts", sa.Integer(), server_default="3", nullable=True),
|
||||||
|
sa.Column("last_attempt_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("next_retry_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("response_data", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["letzshop_order_id"], ["letzshop_orders.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_letzshop_fulfillment_queue_id"), "letzshop_fulfillment_queue", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), "letzshop_fulfillment_queue", ["vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue", ["status", "vendor_id"], unique=False)
|
||||||
|
op.create_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue", ["status", "next_retry_at"], unique=False)
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
"""add_tax_rate_cost_and_letzshop_settings
|
||||||
|
|
||||||
|
Revision ID: c9e22eadf533
|
||||||
|
Revises: e1f2a3b4c5d6
|
||||||
|
Create Date: 2025-12-20 21:13:30.709696
|
||||||
|
|
||||||
|
Adds:
|
||||||
|
- tax_rate_percent to products and marketplace_products (NOT NULL, default 17)
|
||||||
|
- cost_cents to products (for profit calculation)
|
||||||
|
- Letzshop feed settings to vendors (tax_rate, boost_sort, delivery_method, preorder_days)
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "c9e22eadf533"
|
||||||
|
down_revision: str | None = "e1f2a3b4c5d6"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# === MARKETPLACE PRODUCTS: Add tax_rate_percent ===
|
||||||
|
with op.batch_alter_table("marketplace_products", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("tax_rate_percent", sa.Integer(), nullable=False, server_default="17"))
|
||||||
|
|
||||||
|
# === PRODUCTS: Add tax_rate_percent and cost_cents, rename supplier_cost_cents ===
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("tax_rate_percent", sa.Integer(), nullable=False, server_default="17"))
|
||||||
|
batch_op.add_column(sa.Column("cost_cents", sa.Integer(), nullable=True))
|
||||||
|
# Drop old supplier_cost_cents column (data migrated to cost_cents if needed)
|
||||||
|
try:
|
||||||
|
batch_op.drop_column("supplier_cost_cents")
|
||||||
|
except Exception:
|
||||||
|
pass # Column may not exist
|
||||||
|
|
||||||
|
# === VENDORS: Add Letzshop feed settings ===
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("letzshop_default_tax_rate", sa.Integer(), nullable=False, server_default="17"))
|
||||||
|
batch_op.add_column(sa.Column("letzshop_boost_sort", sa.String(length=10), nullable=True, server_default="5.0"))
|
||||||
|
batch_op.add_column(sa.Column("letzshop_delivery_method", sa.String(length=100), nullable=True, server_default="package_delivery"))
|
||||||
|
batch_op.add_column(sa.Column("letzshop_preorder_days", sa.Integer(), nullable=True, server_default="1"))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# === VENDORS: Remove Letzshop feed settings ===
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("letzshop_preorder_days")
|
||||||
|
batch_op.drop_column("letzshop_delivery_method")
|
||||||
|
batch_op.drop_column("letzshop_boost_sort")
|
||||||
|
batch_op.drop_column("letzshop_default_tax_rate")
|
||||||
|
|
||||||
|
# === PRODUCTS: Remove tax_rate_percent and cost_cents ===
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("cost_cents")
|
||||||
|
batch_op.drop_column("tax_rate_percent")
|
||||||
|
batch_op.add_column(sa.Column("supplier_cost_cents", sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# === MARKETPLACE PRODUCTS: Remove tax_rate_percent ===
|
||||||
|
with op.batch_alter_table("marketplace_products", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("tax_rate_percent")
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
"""add_gtin_columns_to_product_table
|
||||||
|
|
||||||
|
Revision ID: cb88bc9b5f86
|
||||||
|
Revises: a9a86cef6cca
|
||||||
|
Create Date: 2025-12-18 20:54:55.185857
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "cb88bc9b5f86"
|
||||||
|
down_revision: str | None = "a9a86cef6cca"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add GTIN (EAN/UPC barcode) columns to products table for order EAN matching
|
||||||
|
# gtin: The barcode number (e.g., "0889698273022")
|
||||||
|
# gtin_type: The format type from Letzshop (e.g., "gtin13", "gtin14", "isbn13")
|
||||||
|
op.add_column("products", sa.Column("gtin", sa.String(length=50), nullable=True))
|
||||||
|
op.add_column("products", sa.Column("gtin_type", sa.String(length=20), nullable=True))
|
||||||
|
|
||||||
|
# Add index for EAN lookups during order matching
|
||||||
|
op.create_index("idx_product_gtin", "products", ["gtin"], unique=False)
|
||||||
|
op.create_index("idx_product_vendor_gtin", "products", ["vendor_id", "gtin"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("idx_product_vendor_gtin", table_name="products")
|
||||||
|
op.drop_index("idx_product_gtin", table_name="products")
|
||||||
|
op.drop_column("products", "gtin_type")
|
||||||
|
op.drop_column("products", "gtin")
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
"""add_companies_table_and_restructure_vendors
|
||||||
|
|
||||||
|
Revision ID: d0325d7c0f25
|
||||||
|
Revises: 0bd9ffaaced1
|
||||||
|
Create Date: 2025-11-30 14:58:17.165142
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "d0325d7c0f25"
|
||||||
|
down_revision: str | None = "0bd9ffaaced1"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create companies table
|
||||||
|
op.create_table(
|
||||||
|
"companies",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("owner_user_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("contact_email", sa.String(), nullable=False),
|
||||||
|
sa.Column("contact_phone", sa.String(), nullable=True),
|
||||||
|
sa.Column("website", sa.String(), nullable=True),
|
||||||
|
sa.Column("business_address", sa.Text(), nullable=True),
|
||||||
|
sa.Column("tax_number", sa.String(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
|
||||||
|
sa.Column("is_verified", sa.Boolean(), nullable=False, server_default="false"),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||||
|
sa.ForeignKeyConstraint(["owner_user_id"], ["users.id"], ),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_companies_id"), "companies", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_companies_name"), "companies", ["name"], unique=False)
|
||||||
|
|
||||||
|
# Use batch mode for SQLite to modify vendors table
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
# Add company_id column
|
||||||
|
batch_op.add_column(sa.Column("company_id", sa.Integer(), nullable=True))
|
||||||
|
batch_op.create_index(batch_op.f("ix_vendors_company_id"), ["company_id"], unique=False)
|
||||||
|
batch_op.create_foreign_key("fk_vendors_company_id", "companies", ["company_id"], ["id"])
|
||||||
|
|
||||||
|
# Remove old contact fields
|
||||||
|
batch_op.drop_column("contact_email")
|
||||||
|
batch_op.drop_column("contact_phone")
|
||||||
|
batch_op.drop_column("website")
|
||||||
|
batch_op.drop_column("business_address")
|
||||||
|
batch_op.drop_column("tax_number")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Use batch mode for SQLite to modify vendors table
|
||||||
|
with op.batch_alter_table("vendors", schema=None) as batch_op:
|
||||||
|
# Re-add contact fields to vendors
|
||||||
|
batch_op.add_column(sa.Column("tax_number", sa.String(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("business_address", sa.Text(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("website", sa.String(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("contact_phone", sa.String(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("contact_email", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Remove company_id from vendors
|
||||||
|
batch_op.drop_constraint("fk_vendors_company_id", type_="foreignkey")
|
||||||
|
batch_op.drop_index(batch_op.f("ix_vendors_company_id"))
|
||||||
|
batch_op.drop_column("company_id")
|
||||||
|
|
||||||
|
# Drop companies table
|
||||||
|
op.drop_index(op.f("ix_companies_name"), table_name="companies")
|
||||||
|
op.drop_index(op.f("ix_companies_id"), table_name="companies")
|
||||||
|
op.drop_table("companies")
|
||||||
@@ -0,0 +1,179 @@
|
|||||||
|
"""add_order_item_exceptions
|
||||||
|
|
||||||
|
Revision ID: d2e3f4a5b6c7
|
||||||
|
Revises: c1d2e3f4a5b6
|
||||||
|
Create Date: 2025-12-20
|
||||||
|
|
||||||
|
This migration adds the Order Item Exception system:
|
||||||
|
- Adds needs_product_match column to order_items table
|
||||||
|
- Creates order_item_exceptions table for tracking unmatched products
|
||||||
|
|
||||||
|
The exception system allows marketplace orders to be imported even when
|
||||||
|
products are not found by GTIN. Items are linked to a placeholder product
|
||||||
|
and exceptions are tracked for QC resolution.
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "d2e3f4a5b6c7"
|
||||||
|
down_revision: str | None = "c1d2e3f4a5b6"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def column_exists(table_name: str, column_name: str) -> bool:
|
||||||
|
"""Check if a column exists in a table."""
|
||||||
|
bind = op.get_bind()
|
||||||
|
inspector = inspect(bind)
|
||||||
|
columns = [col["name"] for col in inspector.get_columns(table_name)]
|
||||||
|
return column_name in columns
|
||||||
|
|
||||||
|
|
||||||
|
def table_exists(table_name: str) -> bool:
|
||||||
|
"""Check if a table exists in the database."""
|
||||||
|
bind = op.get_bind()
|
||||||
|
inspector = inspect(bind)
|
||||||
|
return table_name in inspector.get_table_names()
|
||||||
|
|
||||||
|
|
||||||
|
def index_exists(index_name: str, table_name: str) -> bool:
|
||||||
|
"""Check if an index exists on a table."""
|
||||||
|
bind = op.get_bind()
|
||||||
|
inspector = inspect(bind)
|
||||||
|
try:
|
||||||
|
indexes = inspector.get_indexes(table_name)
|
||||||
|
return any(idx["name"] == index_name for idx in indexes)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# =========================================================================
|
||||||
|
# Step 1: Add needs_product_match column to order_items
|
||||||
|
# =========================================================================
|
||||||
|
if not column_exists("order_items", "needs_product_match"):
|
||||||
|
op.add_column(
|
||||||
|
"order_items",
|
||||||
|
sa.Column(
|
||||||
|
"needs_product_match",
|
||||||
|
sa.Boolean(),
|
||||||
|
server_default="0",
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not index_exists("ix_order_items_needs_product_match", "order_items"):
|
||||||
|
op.create_index(
|
||||||
|
"ix_order_items_needs_product_match",
|
||||||
|
"order_items",
|
||||||
|
["needs_product_match"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 2: Create order_item_exceptions table
|
||||||
|
# =========================================================================
|
||||||
|
if not table_exists("order_item_exceptions"):
|
||||||
|
op.create_table(
|
||||||
|
"order_item_exceptions",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_item_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("original_gtin", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("original_product_name", sa.String(length=500), nullable=True),
|
||||||
|
sa.Column("original_sku", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"exception_type",
|
||||||
|
sa.String(length=50),
|
||||||
|
nullable=False,
|
||||||
|
server_default="product_not_found"
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"status",
|
||||||
|
sa.String(length=50),
|
||||||
|
nullable=False,
|
||||||
|
server_default="pending"
|
||||||
|
),
|
||||||
|
sa.Column("resolved_product_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("resolved_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("resolved_by", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("resolution_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["order_item_id"],
|
||||||
|
["order_items.id"],
|
||||||
|
ondelete="CASCADE"
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["resolved_product_id"], ["products.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["resolved_by"], ["users.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index(
|
||||||
|
"ix_order_item_exceptions_id",
|
||||||
|
"order_item_exceptions",
|
||||||
|
["id"]
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_order_item_exceptions_vendor_id",
|
||||||
|
"order_item_exceptions",
|
||||||
|
["vendor_id"]
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_order_item_exceptions_status",
|
||||||
|
"order_item_exceptions",
|
||||||
|
["status"]
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_exception_vendor_status",
|
||||||
|
"order_item_exceptions",
|
||||||
|
["vendor_id", "status"]
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_exception_gtin",
|
||||||
|
"order_item_exceptions",
|
||||||
|
["vendor_id", "original_gtin"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unique constraint on order_item_id (one exception per item)
|
||||||
|
op.create_index(
|
||||||
|
"uq_order_item_exception",
|
||||||
|
"order_item_exceptions",
|
||||||
|
["order_item_id"],
|
||||||
|
unique=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop order_item_exceptions table
|
||||||
|
if table_exists("order_item_exceptions"):
|
||||||
|
op.drop_index("uq_order_item_exception", table_name="order_item_exceptions")
|
||||||
|
op.drop_index("idx_exception_gtin", table_name="order_item_exceptions")
|
||||||
|
op.drop_index("idx_exception_vendor_status", table_name="order_item_exceptions")
|
||||||
|
op.drop_index("ix_order_item_exceptions_status", table_name="order_item_exceptions")
|
||||||
|
op.drop_index("ix_order_item_exceptions_vendor_id", table_name="order_item_exceptions")
|
||||||
|
op.drop_index("ix_order_item_exceptions_id", table_name="order_item_exceptions")
|
||||||
|
op.drop_table("order_item_exceptions")
|
||||||
|
|
||||||
|
# Remove needs_product_match column from order_items
|
||||||
|
if column_exists("order_items", "needs_product_match"):
|
||||||
|
if index_exists("ix_order_items_needs_product_match", "order_items"):
|
||||||
|
op.drop_index("ix_order_items_needs_product_match", table_name="order_items")
|
||||||
|
op.drop_column("order_items", "needs_product_match")
|
||||||
@@ -0,0 +1,332 @@
|
|||||||
|
"""add email templates and logs tables
|
||||||
|
|
||||||
|
Revision ID: d7a4a3f06394
|
||||||
|
Revises: 404b3e2d2865
|
||||||
|
Create Date: 2025-12-27 20:48:00.661523
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "d7a4a3f06394"
|
||||||
|
down_revision: str | None = "404b3e2d2865"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create email_templates table
|
||||||
|
op.create_table("email_templates",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("code", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("language", sa.String(length=5), nullable=False),
|
||||||
|
sa.Column("name", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("category", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("subject", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("body_html", sa.Text(), nullable=False),
|
||||||
|
sa.Column("body_text", sa.Text(), nullable=True),
|
||||||
|
sa.Column("variables", sa.Text(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_email_templates_category"), "email_templates", ["category"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_templates_code"), "email_templates", ["code"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_templates_id"), "email_templates", ["id"], unique=False)
|
||||||
|
|
||||||
|
# Create email_logs table
|
||||||
|
op.create_table("email_logs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("template_code", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("template_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("recipient_email", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("recipient_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("subject", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("body_html", sa.Text(), nullable=True),
|
||||||
|
sa.Column("body_text", sa.Text(), nullable=True),
|
||||||
|
sa.Column("from_email", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("from_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("reply_to", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("status", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("sent_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("delivered_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("opened_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("clicked_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("retry_count", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("provider", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("provider_message_id", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("related_type", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("related_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("extra_data", sa.Text(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["template_id"], ["email_templates.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["user_id"], ["users.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_email_logs_id"), "email_logs", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_logs_provider_message_id"), "email_logs", ["provider_message_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_logs_recipient_email"), "email_logs", ["recipient_email"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_logs_status"), "email_logs", ["status"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_logs_template_code"), "email_logs", ["template_code"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_logs_user_id"), "email_logs", ["user_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_email_logs_vendor_id"), "email_logs", ["vendor_id"], unique=False)
|
||||||
|
|
||||||
|
# application_logs - alter columns
|
||||||
|
op.alter_column("application_logs", "created_at", existing_type=sa.DATETIME(), nullable=False)
|
||||||
|
op.alter_column("application_logs", "updated_at", existing_type=sa.DATETIME(), nullable=False)
|
||||||
|
|
||||||
|
# capacity_snapshots indexes (PostgreSQL IF EXISTS/IF NOT EXISTS)
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_date"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_capacity_snapshots_date ON capacity_snapshots (snapshot_date)"))
|
||||||
|
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_snapshot_date ON capacity_snapshots (snapshot_date)"))
|
||||||
|
|
||||||
|
# cart_items - alter columns
|
||||||
|
op.alter_column("cart_items", "created_at", existing_type=sa.DATETIME(), nullable=False)
|
||||||
|
op.alter_column("cart_items", "updated_at", existing_type=sa.DATETIME(), nullable=False)
|
||||||
|
|
||||||
|
# customer_addresses index rename
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_customers_addresses_id"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_customer_addresses_id ON customer_addresses (id)"))
|
||||||
|
|
||||||
|
# inventory - alter columns and constraints
|
||||||
|
op.alter_column("inventory", "warehouse", existing_type=sa.VARCHAR(), nullable=False)
|
||||||
|
op.alter_column("inventory", "bin_location", existing_type=sa.VARCHAR(), nullable=False)
|
||||||
|
op.alter_column("inventory", "location", existing_type=sa.VARCHAR(), nullable=True)
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_inventory_product_location"))
|
||||||
|
op.execute(text("ALTER TABLE inventory DROP CONSTRAINT IF EXISTS uq_inventory_product_location"))
|
||||||
|
op.execute(text("""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_inventory_product_warehouse_bin') THEN
|
||||||
|
ALTER TABLE inventory ADD CONSTRAINT uq_inventory_product_warehouse_bin UNIQUE (product_id, warehouse, bin_location);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# marketplace_import_errors and translations indexes
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_import_errors_import_job_id ON marketplace_import_errors (import_job_id)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_product_translations_id ON marketplace_product_translations (id)"))
|
||||||
|
|
||||||
|
# marketplace_products - alter columns
|
||||||
|
op.alter_column("marketplace_products", "is_digital", existing_type=sa.BOOLEAN(), nullable=True)
|
||||||
|
op.alter_column("marketplace_products", "is_active", existing_type=sa.BOOLEAN(), nullable=True)
|
||||||
|
|
||||||
|
# marketplace_products indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_mp_is_active"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_mp_platform"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_mp_sku"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_is_active ON marketplace_products (is_active)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_is_digital ON marketplace_products (is_digital)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_mpn ON marketplace_products (mpn)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_platform ON marketplace_products (platform)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_sku ON marketplace_products (sku)"))
|
||||||
|
|
||||||
|
# order_item_exceptions - constraints and indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS uq_order_item_exception"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_item_exceptions_original_gtin ON order_item_exceptions (original_gtin)"))
|
||||||
|
op.execute(text("""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_order_item_exceptions_order_item_id') THEN
|
||||||
|
ALTER TABLE order_item_exceptions ADD CONSTRAINT uq_order_item_exceptions_order_item_id UNIQUE (order_item_id);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# order_items - alter column
|
||||||
|
op.alter_column("order_items", "needs_product_match", existing_type=sa.BOOLEAN(), nullable=True)
|
||||||
|
|
||||||
|
# order_items indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_order_items_gtin"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_order_items_product_id"))
|
||||||
|
|
||||||
|
# product_translations index
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_product_translations_id ON product_translations (id)"))
|
||||||
|
|
||||||
|
# products indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_product_active"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_product_featured"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_product_gtin"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_gtin"))
|
||||||
|
|
||||||
|
# products constraint
|
||||||
|
op.execute(text("ALTER TABLE products DROP CONSTRAINT IF EXISTS uq_product"))
|
||||||
|
op.execute(text("""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_vendor_marketplace_product') THEN
|
||||||
|
ALTER TABLE products ADD CONSTRAINT uq_vendor_marketplace_product UNIQUE (vendor_id, marketplace_product_id);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# products new indexes
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_active ON products (vendor_id, is_active)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_featured ON products (vendor_id, is_featured)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_products_gtin ON products (gtin)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_products_vendor_sku ON products (vendor_sku)"))
|
||||||
|
|
||||||
|
# vendor_domains indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendors_domains_domain"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendors_domains_id"))
|
||||||
|
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_vendor_domains_domain ON vendor_domains (domain)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_domains_id ON vendor_domains (id)"))
|
||||||
|
|
||||||
|
# vendor_subscriptions - alter column and FK
|
||||||
|
op.alter_column("vendor_subscriptions", "payment_retry_count", existing_type=sa.INTEGER(), nullable=False)
|
||||||
|
op.execute(text("""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_vendor_subscriptions_tier_id') THEN
|
||||||
|
ALTER TABLE vendor_subscriptions ADD CONSTRAINT fk_vendor_subscriptions_tier_id
|
||||||
|
FOREIGN KEY (tier_id) REFERENCES subscription_tiers(id);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# vendor_themes indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendors_themes_id"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_themes_id ON vendor_themes (id)"))
|
||||||
|
|
||||||
|
# vendor_users indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendors_users_id"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendors_users_invitation_token"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_users_id ON vendor_users (id)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_users_invitation_token ON vendor_users (invitation_token)"))
|
||||||
|
|
||||||
|
# vendors - alter column
|
||||||
|
op.alter_column("vendors", "company_id", existing_type=sa.INTEGER(), nullable=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# vendors
|
||||||
|
op.alter_column("vendors", "company_id", existing_type=sa.INTEGER(), nullable=True)
|
||||||
|
|
||||||
|
# vendor_users indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendor_users_invitation_token"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendor_users_id"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_users_invitation_token ON vendor_users (invitation_token)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_users_id ON vendor_users (id)"))
|
||||||
|
|
||||||
|
# vendor_themes indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendor_themes_id"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_themes_id ON vendor_themes (id)"))
|
||||||
|
|
||||||
|
# vendor_subscriptions
|
||||||
|
op.execute(text("ALTER TABLE vendor_subscriptions DROP CONSTRAINT IF EXISTS fk_vendor_subscriptions_tier_id"))
|
||||||
|
op.alter_column("vendor_subscriptions", "payment_retry_count", existing_type=sa.INTEGER(), nullable=True)
|
||||||
|
|
||||||
|
# vendor_domains indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendor_domains_id"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_vendor_domains_domain"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_domains_id ON vendor_domains (id)"))
|
||||||
|
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_vendors_domains_domain ON vendor_domains (domain)"))
|
||||||
|
|
||||||
|
# products constraint and indexes
|
||||||
|
op.execute(text("ALTER TABLE products DROP CONSTRAINT IF EXISTS uq_vendor_marketplace_product"))
|
||||||
|
op.execute(text("""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_product') THEN
|
||||||
|
ALTER TABLE products ADD CONSTRAINT uq_product UNIQUE (vendor_id, marketplace_product_id);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
"""))
|
||||||
|
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_products_vendor_sku"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_products_gtin"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_featured"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_active"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_gtin ON products (vendor_id, gtin)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_gtin ON products (gtin)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_featured ON products (vendor_id, is_featured)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_active ON products (vendor_id, is_active)"))
|
||||||
|
|
||||||
|
# product_translations
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_product_translations_id"))
|
||||||
|
|
||||||
|
# order_items
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_product_id ON order_items (product_id)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_gtin ON order_items (gtin)"))
|
||||||
|
op.alter_column("order_items", "needs_product_match", existing_type=sa.BOOLEAN(), nullable=False)
|
||||||
|
|
||||||
|
# order_item_exceptions
|
||||||
|
op.execute(text("ALTER TABLE order_item_exceptions DROP CONSTRAINT IF EXISTS uq_order_item_exceptions_order_item_id"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_order_item_exceptions_original_gtin"))
|
||||||
|
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS uq_order_item_exception ON order_item_exceptions (order_item_id)"))
|
||||||
|
|
||||||
|
# marketplace_products indexes
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_sku"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_platform"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_mpn"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_is_digital"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_is_active"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_sku ON marketplace_products (sku)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_platform ON marketplace_products (platform)"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_is_active ON marketplace_products (is_active)"))
|
||||||
|
|
||||||
|
# marketplace_products columns
|
||||||
|
op.alter_column("marketplace_products", "is_active", existing_type=sa.BOOLEAN(), nullable=False)
|
||||||
|
op.alter_column("marketplace_products", "is_digital", existing_type=sa.BOOLEAN(), nullable=False)
|
||||||
|
|
||||||
|
# marketplace imports
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_product_translations_id"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_import_errors_import_job_id"))
|
||||||
|
|
||||||
|
# inventory
|
||||||
|
op.execute(text("ALTER TABLE inventory DROP CONSTRAINT IF EXISTS uq_inventory_product_warehouse_bin"))
|
||||||
|
op.execute(text("""
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_inventory_product_location') THEN
|
||||||
|
ALTER TABLE inventory ADD CONSTRAINT uq_inventory_product_location UNIQUE (product_id, location);
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
"""))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS idx_inventory_product_location ON inventory (product_id, location)"))
|
||||||
|
op.alter_column("inventory", "location", existing_type=sa.VARCHAR(), nullable=False)
|
||||||
|
op.alter_column("inventory", "bin_location", existing_type=sa.VARCHAR(), nullable=True)
|
||||||
|
op.alter_column("inventory", "warehouse", existing_type=sa.VARCHAR(), nullable=True)
|
||||||
|
|
||||||
|
# customer_addresses
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_customer_addresses_id"))
|
||||||
|
op.execute(text("CREATE INDEX IF NOT EXISTS ix_customers_addresses_id ON customer_addresses (id)"))
|
||||||
|
|
||||||
|
# cart_items
|
||||||
|
op.alter_column("cart_items", "updated_at", existing_type=sa.DATETIME(), nullable=True)
|
||||||
|
op.alter_column("cart_items", "created_at", existing_type=sa.DATETIME(), nullable=True)
|
||||||
|
|
||||||
|
# capacity_snapshots
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_snapshot_date"))
|
||||||
|
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_date"))
|
||||||
|
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_date ON capacity_snapshots (snapshot_date)"))
|
||||||
|
|
||||||
|
# application_logs
|
||||||
|
op.alter_column("application_logs", "updated_at", existing_type=sa.DATETIME(), nullable=True)
|
||||||
|
op.alter_column("application_logs", "created_at", existing_type=sa.DATETIME(), nullable=True)
|
||||||
|
|
||||||
|
# Drop email tables
|
||||||
|
op.drop_index(op.f("ix_email_logs_vendor_id"), table_name="email_logs")
|
||||||
|
op.drop_index(op.f("ix_email_logs_user_id"), table_name="email_logs")
|
||||||
|
op.drop_index(op.f("ix_email_logs_template_code"), table_name="email_logs")
|
||||||
|
op.drop_index(op.f("ix_email_logs_status"), table_name="email_logs")
|
||||||
|
op.drop_index(op.f("ix_email_logs_recipient_email"), table_name="email_logs")
|
||||||
|
op.drop_index(op.f("ix_email_logs_provider_message_id"), table_name="email_logs")
|
||||||
|
op.drop_index(op.f("ix_email_logs_id"), table_name="email_logs")
|
||||||
|
op.drop_table("email_logs")
|
||||||
|
op.drop_index(op.f("ix_email_templates_id"), table_name="email_templates")
|
||||||
|
op.drop_index(op.f("ix_email_templates_code"), table_name="email_templates")
|
||||||
|
op.drop_index(op.f("ix_email_templates_category"), table_name="email_templates")
|
||||||
|
op.drop_table("email_templates")
|
||||||
@@ -0,0 +1,205 @@
|
|||||||
|
"""Add product type and digital fields to marketplace_products
|
||||||
|
|
||||||
|
Revision ID: e1a2b3c4d5e6
|
||||||
|
Revises: 28d44d503cac
|
||||||
|
Create Date: 2025-12-11
|
||||||
|
|
||||||
|
This migration adds support for:
|
||||||
|
- Product type classification (physical, digital, service, subscription)
|
||||||
|
- Digital product fields (delivery method, platform, region restrictions)
|
||||||
|
- Numeric price fields for filtering/sorting
|
||||||
|
- Additional images as JSON array
|
||||||
|
- Source URL tracking
|
||||||
|
- Flexible attributes JSON column
|
||||||
|
- Active status flag
|
||||||
|
|
||||||
|
It also renames 'product_type' to 'product_type_raw' to preserve the original
|
||||||
|
Google Shopping feed value while using 'product_type' for the new enum.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "e1a2b3c4d5e6"
|
||||||
|
down_revision: str | None = "28d44d503cac"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Rename existing product_type column to product_type_raw
|
||||||
|
# to preserve the original Google Shopping feed value
|
||||||
|
op.alter_column(
|
||||||
|
"marketplace_products",
|
||||||
|
"product_type",
|
||||||
|
new_column_name="product_type_raw",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add new product classification columns
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column(
|
||||||
|
"product_type_enum",
|
||||||
|
sa.String(20),
|
||||||
|
nullable=False,
|
||||||
|
server_default="physical",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column(
|
||||||
|
"is_digital",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("false"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add digital product specific fields
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("digital_delivery_method", sa.String(20), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("platform", sa.String(50), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("region_restrictions", sa.JSON(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("license_type", sa.String(50), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add source tracking
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("source_url", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add numeric price fields for filtering/sorting
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("price_numeric", sa.Float(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("sale_price_numeric", sa.Float(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add flexible attributes JSON column
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("attributes", sa.JSON(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add additional images as JSON array (complements existing additional_image_link)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("additional_images", sa.JSON(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add active status flag
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column(
|
||||||
|
"is_active",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("true"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add SKU field for internal reference
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("sku", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add weight fields for physical products
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("weight", sa.Float(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("weight_unit", sa.String(10), nullable=True, server_default="kg"),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("dimensions", sa.JSON(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add category_path for normalized hierarchy
|
||||||
|
op.add_column(
|
||||||
|
"marketplace_products",
|
||||||
|
sa.Column("category_path", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for new columns
|
||||||
|
op.create_index(
|
||||||
|
"idx_mp_product_type",
|
||||||
|
"marketplace_products",
|
||||||
|
["product_type_enum", "is_digital"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_mp_is_active",
|
||||||
|
"marketplace_products",
|
||||||
|
["is_active"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_mp_platform",
|
||||||
|
"marketplace_products",
|
||||||
|
["platform"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_mp_sku",
|
||||||
|
"marketplace_products",
|
||||||
|
["sku"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_mp_gtin_marketplace",
|
||||||
|
"marketplace_products",
|
||||||
|
["gtin", "marketplace"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop indexes
|
||||||
|
op.drop_index("idx_mp_gtin_marketplace", table_name="marketplace_products")
|
||||||
|
op.drop_index("idx_mp_sku", table_name="marketplace_products")
|
||||||
|
op.drop_index("idx_mp_platform", table_name="marketplace_products")
|
||||||
|
op.drop_index("idx_mp_is_active", table_name="marketplace_products")
|
||||||
|
op.drop_index("idx_mp_product_type", table_name="marketplace_products")
|
||||||
|
|
||||||
|
# Drop new columns
|
||||||
|
op.drop_column("marketplace_products", "category_path")
|
||||||
|
op.drop_column("marketplace_products", "dimensions")
|
||||||
|
op.drop_column("marketplace_products", "weight_unit")
|
||||||
|
op.drop_column("marketplace_products", "weight")
|
||||||
|
op.drop_column("marketplace_products", "sku")
|
||||||
|
op.drop_column("marketplace_products", "is_active")
|
||||||
|
op.drop_column("marketplace_products", "additional_images")
|
||||||
|
op.drop_column("marketplace_products", "attributes")
|
||||||
|
op.drop_column("marketplace_products", "sale_price_numeric")
|
||||||
|
op.drop_column("marketplace_products", "price_numeric")
|
||||||
|
op.drop_column("marketplace_products", "source_url")
|
||||||
|
op.drop_column("marketplace_products", "license_type")
|
||||||
|
op.drop_column("marketplace_products", "region_restrictions")
|
||||||
|
op.drop_column("marketplace_products", "platform")
|
||||||
|
op.drop_column("marketplace_products", "digital_delivery_method")
|
||||||
|
op.drop_column("marketplace_products", "is_digital")
|
||||||
|
op.drop_column("marketplace_products", "product_type_enum")
|
||||||
|
|
||||||
|
# Rename product_type_raw back to product_type
|
||||||
|
op.alter_column(
|
||||||
|
"marketplace_products",
|
||||||
|
"product_type_raw",
|
||||||
|
new_column_name="product_type",
|
||||||
|
)
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
"""add_warehouse_and_bin_location_to_inventory
|
||||||
|
|
||||||
|
Revision ID: e1bfb453fbe9
|
||||||
|
Revises: j8e9f0a1b2c3
|
||||||
|
Create Date: 2025-12-25 12:21:24.006548
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "e1bfb453fbe9"
|
||||||
|
down_revision: str | None = "j8e9f0a1b2c3"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_column_names(conn, table_name: str) -> set:
|
||||||
|
"""Get column names for a table (PostgreSQL)."""
|
||||||
|
result = conn.execute(text(
|
||||||
|
"SELECT column_name FROM information_schema.columns "
|
||||||
|
"WHERE table_name = :table AND table_schema = 'public'"
|
||||||
|
), {"table": table_name})
|
||||||
|
return {row[0] for row in result.fetchall()}
|
||||||
|
|
||||||
|
|
||||||
|
def get_index_names(conn, table_name: str) -> set:
|
||||||
|
"""Get index names for a table (PostgreSQL)."""
|
||||||
|
result = conn.execute(text(
|
||||||
|
"SELECT indexname FROM pg_indexes "
|
||||||
|
"WHERE tablename = :table AND schemaname = 'public'"
|
||||||
|
), {"table": table_name})
|
||||||
|
return {row[0] for row in result.fetchall()}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
conn = op.get_bind()
|
||||||
|
|
||||||
|
# Check if columns already exist (idempotent)
|
||||||
|
columns = get_column_names(conn, "inventory")
|
||||||
|
|
||||||
|
if "warehouse" not in columns:
|
||||||
|
op.add_column("inventory", sa.Column("warehouse", sa.String(), nullable=False, server_default="strassen"))
|
||||||
|
|
||||||
|
if "bin_location" not in columns:
|
||||||
|
op.add_column("inventory", sa.Column("bin_location", sa.String(), nullable=False, server_default=""))
|
||||||
|
|
||||||
|
# Migrate existing data: copy location to bin_location, set default warehouse
|
||||||
|
conn.execute(text("""
|
||||||
|
UPDATE inventory
|
||||||
|
SET bin_location = COALESCE(location, 'UNKNOWN'),
|
||||||
|
warehouse = 'strassen'
|
||||||
|
WHERE bin_location IS NULL OR bin_location = ''
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Create indexes if they don't exist
|
||||||
|
existing_indexes = get_index_names(conn, "inventory")
|
||||||
|
|
||||||
|
if "idx_inventory_warehouse_bin" not in existing_indexes:
|
||||||
|
op.create_index("idx_inventory_warehouse_bin", "inventory", ["warehouse", "bin_location"], unique=False)
|
||||||
|
if "ix_inventory_bin_location" not in existing_indexes:
|
||||||
|
op.create_index(op.f("ix_inventory_bin_location"), "inventory", ["bin_location"], unique=False)
|
||||||
|
if "ix_inventory_warehouse" not in existing_indexes:
|
||||||
|
op.create_index(op.f("ix_inventory_warehouse"), "inventory", ["warehouse"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
conn = op.get_bind()
|
||||||
|
|
||||||
|
# Check which indexes exist before dropping
|
||||||
|
existing_indexes = get_index_names(conn, "inventory")
|
||||||
|
|
||||||
|
if "ix_inventory_warehouse" in existing_indexes:
|
||||||
|
op.drop_index(op.f("ix_inventory_warehouse"), table_name="inventory")
|
||||||
|
if "ix_inventory_bin_location" in existing_indexes:
|
||||||
|
op.drop_index(op.f("ix_inventory_bin_location"), table_name="inventory")
|
||||||
|
if "idx_inventory_warehouse_bin" in existing_indexes:
|
||||||
|
op.drop_index("idx_inventory_warehouse_bin", table_name="inventory")
|
||||||
|
|
||||||
|
# Check if columns exist before dropping
|
||||||
|
columns = get_column_names(conn, "inventory")
|
||||||
|
|
||||||
|
if "bin_location" in columns:
|
||||||
|
op.drop_column("inventory", "bin_location")
|
||||||
|
if "warehouse" in columns:
|
||||||
|
op.drop_column("inventory", "warehouse")
|
||||||
@@ -0,0 +1,223 @@
|
|||||||
|
"""convert_prices_to_integer_cents
|
||||||
|
|
||||||
|
Revision ID: e1f2a3b4c5d6
|
||||||
|
Revises: c00d2985701f
|
||||||
|
Create Date: 2025-12-20 21:30:00.000000
|
||||||
|
|
||||||
|
Converts all price/amount columns from Float to Integer cents.
|
||||||
|
This follows e-commerce best practices (Stripe, PayPal, Shopify) for
|
||||||
|
precise monetary calculations.
|
||||||
|
|
||||||
|
Example: €105.91 is stored as 10591 (integer cents)
|
||||||
|
|
||||||
|
Affected tables:
|
||||||
|
- products: price, sale_price, supplier_cost, margin_percent
|
||||||
|
- orders: subtotal, tax_amount, shipping_amount, discount_amount, total_amount
|
||||||
|
- order_items: unit_price, total_price
|
||||||
|
- cart_items: price_at_add
|
||||||
|
- marketplace_products: price_numeric, sale_price_numeric
|
||||||
|
|
||||||
|
See docs/architecture/money-handling.md for full documentation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "e1f2a3b4c5d6"
|
||||||
|
down_revision: str | None = "c00d2985701f"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# SQLite requires batch mode for column alterations
|
||||||
|
# Strategy: Add new _cents columns, migrate data, drop old columns
|
||||||
|
|
||||||
|
# === PRODUCTS TABLE ===
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
# Add new cents columns
|
||||||
|
batch_op.add_column(sa.Column("price_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("sale_price_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("supplier_cost_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("margin_percent_x100", sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# Migrate data for products
|
||||||
|
op.execute("UPDATE products SET price_cents = ROUND(COALESCE(price, 0) * 100)")
|
||||||
|
op.execute("UPDATE products SET sale_price_cents = ROUND(sale_price * 100) WHERE sale_price IS NOT NULL")
|
||||||
|
op.execute("UPDATE products SET supplier_cost_cents = ROUND(supplier_cost * 100) WHERE supplier_cost IS NOT NULL")
|
||||||
|
op.execute("UPDATE products SET margin_percent_x100 = ROUND(margin_percent * 100) WHERE margin_percent IS NOT NULL")
|
||||||
|
|
||||||
|
# Drop old columns
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("price")
|
||||||
|
batch_op.drop_column("sale_price")
|
||||||
|
batch_op.drop_column("supplier_cost")
|
||||||
|
batch_op.drop_column("margin_percent")
|
||||||
|
|
||||||
|
# === ORDERS TABLE ===
|
||||||
|
with op.batch_alter_table("orders", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("subtotal_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("tax_amount_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("shipping_amount_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("discount_amount_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("total_amount_cents", sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# Migrate data for orders
|
||||||
|
op.execute("UPDATE orders SET subtotal_cents = ROUND(COALESCE(subtotal, 0) * 100)")
|
||||||
|
op.execute("UPDATE orders SET tax_amount_cents = ROUND(COALESCE(tax_amount, 0) * 100)")
|
||||||
|
op.execute("UPDATE orders SET shipping_amount_cents = ROUND(COALESCE(shipping_amount, 0) * 100)")
|
||||||
|
op.execute("UPDATE orders SET discount_amount_cents = ROUND(COALESCE(discount_amount, 0) * 100)")
|
||||||
|
op.execute("UPDATE orders SET total_amount_cents = ROUND(COALESCE(total_amount, 0) * 100)")
|
||||||
|
|
||||||
|
# Make total_amount_cents NOT NULL after migration
|
||||||
|
with op.batch_alter_table("orders", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("subtotal")
|
||||||
|
batch_op.drop_column("tax_amount")
|
||||||
|
batch_op.drop_column("shipping_amount")
|
||||||
|
batch_op.drop_column("discount_amount")
|
||||||
|
batch_op.drop_column("total_amount")
|
||||||
|
# Alter total_amount_cents to be NOT NULL
|
||||||
|
batch_op.alter_column("total_amount_cents",
|
||||||
|
existing_type=sa.Integer(),
|
||||||
|
nullable=False)
|
||||||
|
|
||||||
|
# === ORDER_ITEMS TABLE ===
|
||||||
|
with op.batch_alter_table("order_items", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("unit_price_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("total_price_cents", sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# Migrate data for order_items
|
||||||
|
op.execute("UPDATE order_items SET unit_price_cents = ROUND(COALESCE(unit_price, 0) * 100)")
|
||||||
|
op.execute("UPDATE order_items SET total_price_cents = ROUND(COALESCE(total_price, 0) * 100)")
|
||||||
|
|
||||||
|
with op.batch_alter_table("order_items", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("unit_price")
|
||||||
|
batch_op.drop_column("total_price")
|
||||||
|
batch_op.alter_column("unit_price_cents",
|
||||||
|
existing_type=sa.Integer(),
|
||||||
|
nullable=False)
|
||||||
|
batch_op.alter_column("total_price_cents",
|
||||||
|
existing_type=sa.Integer(),
|
||||||
|
nullable=False)
|
||||||
|
|
||||||
|
# === CART_ITEMS TABLE ===
|
||||||
|
with op.batch_alter_table("cart_items", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("price_at_add_cents", sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# Migrate data for cart_items
|
||||||
|
op.execute("UPDATE cart_items SET price_at_add_cents = ROUND(COALESCE(price_at_add, 0) * 100)")
|
||||||
|
|
||||||
|
with op.batch_alter_table("cart_items", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("price_at_add")
|
||||||
|
batch_op.alter_column("price_at_add_cents",
|
||||||
|
existing_type=sa.Integer(),
|
||||||
|
nullable=False)
|
||||||
|
|
||||||
|
# === MARKETPLACE_PRODUCTS TABLE ===
|
||||||
|
with op.batch_alter_table("marketplace_products", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("price_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("sale_price_cents", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("weight_grams", sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
# Migrate data for marketplace_products
|
||||||
|
op.execute("UPDATE marketplace_products SET price_cents = ROUND(price_numeric * 100) WHERE price_numeric IS NOT NULL")
|
||||||
|
op.execute("UPDATE marketplace_products SET sale_price_cents = ROUND(sale_price_numeric * 100) WHERE sale_price_numeric IS NOT NULL")
|
||||||
|
op.execute("UPDATE marketplace_products SET weight_grams = ROUND(weight * 1000) WHERE weight IS NOT NULL")
|
||||||
|
|
||||||
|
with op.batch_alter_table("marketplace_products", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("price_numeric")
|
||||||
|
batch_op.drop_column("sale_price_numeric")
|
||||||
|
batch_op.drop_column("weight")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# === MARKETPLACE_PRODUCTS TABLE ===
|
||||||
|
with op.batch_alter_table("marketplace_products", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("price_numeric", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("sale_price_numeric", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("weight", sa.Float(), nullable=True))
|
||||||
|
|
||||||
|
op.execute("UPDATE marketplace_products SET price_numeric = price_cents / 100.0 WHERE price_cents IS NOT NULL")
|
||||||
|
op.execute("UPDATE marketplace_products SET sale_price_numeric = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL")
|
||||||
|
op.execute("UPDATE marketplace_products SET weight = weight_grams / 1000.0 WHERE weight_grams IS NOT NULL")
|
||||||
|
|
||||||
|
with op.batch_alter_table("marketplace_products", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("price_cents")
|
||||||
|
batch_op.drop_column("sale_price_cents")
|
||||||
|
batch_op.drop_column("weight_grams")
|
||||||
|
|
||||||
|
# === CART_ITEMS TABLE ===
|
||||||
|
with op.batch_alter_table("cart_items", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("price_at_add", sa.Float(), nullable=True))
|
||||||
|
|
||||||
|
op.execute("UPDATE cart_items SET price_at_add = price_at_add_cents / 100.0")
|
||||||
|
|
||||||
|
with op.batch_alter_table("cart_items", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("price_at_add_cents")
|
||||||
|
batch_op.alter_column("price_at_add",
|
||||||
|
existing_type=sa.Float(),
|
||||||
|
nullable=False)
|
||||||
|
|
||||||
|
# === ORDER_ITEMS TABLE ===
|
||||||
|
with op.batch_alter_table("order_items", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("unit_price", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("total_price", sa.Float(), nullable=True))
|
||||||
|
|
||||||
|
op.execute("UPDATE order_items SET unit_price = unit_price_cents / 100.0")
|
||||||
|
op.execute("UPDATE order_items SET total_price = total_price_cents / 100.0")
|
||||||
|
|
||||||
|
with op.batch_alter_table("order_items", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("unit_price_cents")
|
||||||
|
batch_op.drop_column("total_price_cents")
|
||||||
|
batch_op.alter_column("unit_price",
|
||||||
|
existing_type=sa.Float(),
|
||||||
|
nullable=False)
|
||||||
|
batch_op.alter_column("total_price",
|
||||||
|
existing_type=sa.Float(),
|
||||||
|
nullable=False)
|
||||||
|
|
||||||
|
# === ORDERS TABLE ===
|
||||||
|
with op.batch_alter_table("orders", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("subtotal", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("tax_amount", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("shipping_amount", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("discount_amount", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("total_amount", sa.Float(), nullable=True))
|
||||||
|
|
||||||
|
op.execute("UPDATE orders SET subtotal = subtotal_cents / 100.0")
|
||||||
|
op.execute("UPDATE orders SET tax_amount = tax_amount_cents / 100.0")
|
||||||
|
op.execute("UPDATE orders SET shipping_amount = shipping_amount_cents / 100.0")
|
||||||
|
op.execute("UPDATE orders SET discount_amount = discount_amount_cents / 100.0")
|
||||||
|
op.execute("UPDATE orders SET total_amount = total_amount_cents / 100.0")
|
||||||
|
|
||||||
|
with op.batch_alter_table("orders", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("subtotal_cents")
|
||||||
|
batch_op.drop_column("tax_amount_cents")
|
||||||
|
batch_op.drop_column("shipping_amount_cents")
|
||||||
|
batch_op.drop_column("discount_amount_cents")
|
||||||
|
batch_op.drop_column("total_amount_cents")
|
||||||
|
batch_op.alter_column("total_amount",
|
||||||
|
existing_type=sa.Float(),
|
||||||
|
nullable=False)
|
||||||
|
|
||||||
|
# === PRODUCTS TABLE ===
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("price", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("sale_price", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("supplier_cost", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("margin_percent", sa.Float(), nullable=True))
|
||||||
|
|
||||||
|
op.execute("UPDATE products SET price = price_cents / 100.0 WHERE price_cents IS NOT NULL")
|
||||||
|
op.execute("UPDATE products SET sale_price = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL")
|
||||||
|
op.execute("UPDATE products SET supplier_cost = supplier_cost_cents / 100.0 WHERE supplier_cost_cents IS NOT NULL")
|
||||||
|
op.execute("UPDATE products SET margin_percent = margin_percent_x100 / 100.0 WHERE margin_percent_x100 IS NOT NULL")
|
||||||
|
|
||||||
|
with op.batch_alter_table("products", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("price_cents")
|
||||||
|
batch_op.drop_column("sale_price_cents")
|
||||||
|
batch_op.drop_column("supplier_cost_cents")
|
||||||
|
batch_op.drop_column("margin_percent_x100")
|
||||||
339
alembic/versions_backup/e3f4a5b6c7d8_add_messaging_tables.py
Normal file
339
alembic/versions_backup/e3f4a5b6c7d8_add_messaging_tables.py
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
"""add_messaging_tables
|
||||||
|
|
||||||
|
Revision ID: e3f4a5b6c7d8
|
||||||
|
Revises: c9e22eadf533
|
||||||
|
Create Date: 2025-12-21
|
||||||
|
|
||||||
|
This migration adds the messaging system tables:
|
||||||
|
- conversations: Threaded conversation threads
|
||||||
|
- conversation_participants: Links users/customers to conversations
|
||||||
|
- messages: Individual messages within conversations
|
||||||
|
- message_attachments: File attachments for messages
|
||||||
|
|
||||||
|
Supports three communication channels:
|
||||||
|
- Admin <-> Vendor
|
||||||
|
- Vendor <-> Customer
|
||||||
|
- Admin <-> Customer
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "e3f4a5b6c7d8"
|
||||||
|
down_revision: str | None = "c9e22eadf533"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def table_exists(table_name: str) -> bool:
|
||||||
|
"""Check if a table exists in the database."""
|
||||||
|
bind = op.get_bind()
|
||||||
|
inspector = inspect(bind)
|
||||||
|
return table_name in inspector.get_table_names()
|
||||||
|
|
||||||
|
|
||||||
|
def index_exists(index_name: str, table_name: str) -> bool:
|
||||||
|
"""Check if an index exists on a table."""
|
||||||
|
bind = op.get_bind()
|
||||||
|
inspector = inspect(bind)
|
||||||
|
try:
|
||||||
|
indexes = inspector.get_indexes(table_name)
|
||||||
|
return any(idx["name"] == index_name for idx in indexes)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# =========================================================================
|
||||||
|
# Step 1: Create conversations table
|
||||||
|
# =========================================================================
|
||||||
|
if not table_exists("conversations"):
|
||||||
|
op.create_table(
|
||||||
|
"conversations",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"conversation_type",
|
||||||
|
sa.Enum(
|
||||||
|
"admin_vendor",
|
||||||
|
"vendor_customer",
|
||||||
|
"admin_customer",
|
||||||
|
name="conversationtype",
|
||||||
|
),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("subject", sa.String(length=500), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("is_closed", sa.Boolean(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("closed_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"closed_by_type",
|
||||||
|
sa.Enum("admin", "vendor", "customer", name="participanttype"),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column("closed_by_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("last_message_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("message_count", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_conversations_id"), "conversations", ["id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_conversations_conversation_type"),
|
||||||
|
"conversations",
|
||||||
|
["conversation_type"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_conversations_vendor_id"),
|
||||||
|
"conversations",
|
||||||
|
["vendor_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_conversations_last_message_at"),
|
||||||
|
"conversations",
|
||||||
|
["last_message_at"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_conversations_type_vendor",
|
||||||
|
"conversations",
|
||||||
|
["conversation_type", "vendor_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 2: Create conversation_participants table
|
||||||
|
# =========================================================================
|
||||||
|
if not table_exists("conversation_participants"):
|
||||||
|
op.create_table(
|
||||||
|
"conversation_participants",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("conversation_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"participant_type",
|
||||||
|
sa.Enum("admin", "vendor", "customer", name="participanttype"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("participant_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("unread_count", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("last_read_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"email_notifications", sa.Boolean(), nullable=False, server_default="1"
|
||||||
|
),
|
||||||
|
sa.Column("muted", sa.Boolean(), nullable=False, server_default="0"),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["conversation_id"],
|
||||||
|
["conversations.id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint(
|
||||||
|
"conversation_id",
|
||||||
|
"participant_type",
|
||||||
|
"participant_id",
|
||||||
|
name="uq_conversation_participant",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_conversation_participants_id"),
|
||||||
|
"conversation_participants",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_conversation_participants_conversation_id"),
|
||||||
|
"conversation_participants",
|
||||||
|
["conversation_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_conversation_participants_participant_id"),
|
||||||
|
"conversation_participants",
|
||||||
|
["participant_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_participant_lookup",
|
||||||
|
"conversation_participants",
|
||||||
|
["participant_type", "participant_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 3: Create messages table
|
||||||
|
# =========================================================================
|
||||||
|
if not table_exists("messages"):
|
||||||
|
op.create_table(
|
||||||
|
"messages",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("conversation_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"sender_type",
|
||||||
|
sa.Enum("admin", "vendor", "customer", name="participanttype"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("sender_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("content", sa.Text(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"is_system_message", sa.Boolean(), nullable=False, server_default="0"
|
||||||
|
),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("deleted_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"deleted_by_type",
|
||||||
|
sa.Enum("admin", "vendor", "customer", name="participanttype"),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column("deleted_by_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["conversation_id"],
|
||||||
|
["conversations.id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_messages_id"), "messages", ["id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_messages_conversation_id"),
|
||||||
|
"messages",
|
||||||
|
["conversation_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_messages_sender_id"), "messages", ["sender_id"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_messages_conversation_created",
|
||||||
|
"messages",
|
||||||
|
["conversation_id", "created_at"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 4: Create message_attachments table
|
||||||
|
# =========================================================================
|
||||||
|
if not table_exists("message_attachments"):
|
||||||
|
op.create_table(
|
||||||
|
"message_attachments",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("message_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("filename", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("original_filename", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("file_path", sa.String(length=1000), nullable=False),
|
||||||
|
sa.Column("file_size", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("mime_type", sa.String(length=100), nullable=False),
|
||||||
|
sa.Column("is_image", sa.Boolean(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("image_width", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("image_height", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("thumbnail_path", sa.String(length=1000), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["message_id"],
|
||||||
|
["messages.id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_message_attachments_id"),
|
||||||
|
"message_attachments",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_message_attachments_message_id"),
|
||||||
|
"message_attachments",
|
||||||
|
["message_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Step 5: Add platform setting for attachment size limit
|
||||||
|
# =========================================================================
|
||||||
|
# Note: This will be added via seed script or manually
|
||||||
|
# Key: message_attachment_max_size_mb
|
||||||
|
# Value: 10
|
||||||
|
# Category: messaging
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop tables in reverse order (respecting foreign keys)
|
||||||
|
if table_exists("message_attachments"):
|
||||||
|
op.drop_table("message_attachments")
|
||||||
|
|
||||||
|
if table_exists("messages"):
|
||||||
|
op.drop_table("messages")
|
||||||
|
|
||||||
|
if table_exists("conversation_participants"):
|
||||||
|
op.drop_table("conversation_participants")
|
||||||
|
|
||||||
|
if table_exists("conversations"):
|
||||||
|
op.drop_table("conversations")
|
||||||
|
|
||||||
|
# Note: Enum types are not dropped automatically
|
||||||
|
# They can be manually dropped with:
|
||||||
|
# op.execute("DROP TYPE IF EXISTS conversationtype")
|
||||||
|
# op.execute("DROP TYPE IF EXISTS participanttype")
|
||||||
@@ -0,0 +1,147 @@
|
|||||||
|
"""Create translation tables for multi-language support
|
||||||
|
|
||||||
|
Revision ID: f2b3c4d5e6f7
|
||||||
|
Revises: e1a2b3c4d5e6
|
||||||
|
Create Date: 2025-12-11
|
||||||
|
|
||||||
|
This migration creates:
|
||||||
|
- marketplace_product_translations: Localized content from marketplace sources
|
||||||
|
- product_translations: Vendor-specific localized overrides
|
||||||
|
|
||||||
|
The translation tables support multi-language product information with
|
||||||
|
language fallback capabilities. Fields in product_translations can be
|
||||||
|
NULL to inherit from marketplace_product_translations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "f2b3c4d5e6f7"
|
||||||
|
down_revision: str | None = "e1a2b3c4d5e6"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create marketplace_product_translations table
|
||||||
|
# Note: Unique constraint is included in create_table for SQLite compatibility
|
||||||
|
op.create_table(
|
||||||
|
"marketplace_product_translations",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column(
|
||||||
|
"marketplace_product_id",
|
||||||
|
sa.Integer(),
|
||||||
|
sa.ForeignKey("marketplace_products.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("language", sa.String(5), nullable=False),
|
||||||
|
# Localized content
|
||||||
|
sa.Column("title", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("short_description", sa.String(500), nullable=True),
|
||||||
|
# SEO fields
|
||||||
|
sa.Column("meta_title", sa.String(70), nullable=True),
|
||||||
|
sa.Column("meta_description", sa.String(160), nullable=True),
|
||||||
|
sa.Column("url_slug", sa.String(255), nullable=True),
|
||||||
|
# Source tracking
|
||||||
|
sa.Column("source_import_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("source_file", sa.String(), nullable=True),
|
||||||
|
# Timestamps
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
# Unique constraint included in table creation for SQLite
|
||||||
|
sa.UniqueConstraint(
|
||||||
|
"marketplace_product_id",
|
||||||
|
"language",
|
||||||
|
name="uq_marketplace_product_translation",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for marketplace_product_translations
|
||||||
|
op.create_index(
|
||||||
|
"idx_mpt_marketplace_product_id",
|
||||||
|
"marketplace_product_translations",
|
||||||
|
["marketplace_product_id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_mpt_language",
|
||||||
|
"marketplace_product_translations",
|
||||||
|
["language"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create product_translations table
|
||||||
|
# Note: Unique constraint is included in create_table for SQLite compatibility
|
||||||
|
op.create_table(
|
||||||
|
"product_translations",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column(
|
||||||
|
"product_id",
|
||||||
|
sa.Integer(),
|
||||||
|
sa.ForeignKey("products.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("language", sa.String(5), nullable=False),
|
||||||
|
# Overridable localized content (NULL = inherit from marketplace)
|
||||||
|
sa.Column("title", sa.String(), nullable=True),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("short_description", sa.String(500), nullable=True),
|
||||||
|
# SEO overrides
|
||||||
|
sa.Column("meta_title", sa.String(70), nullable=True),
|
||||||
|
sa.Column("meta_description", sa.String(160), nullable=True),
|
||||||
|
sa.Column("url_slug", sa.String(255), nullable=True),
|
||||||
|
# Timestamps
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("CURRENT_TIMESTAMP"),
|
||||||
|
),
|
||||||
|
# Unique constraint included in table creation for SQLite
|
||||||
|
sa.UniqueConstraint("product_id", "language", name="uq_product_translation"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for product_translations
|
||||||
|
op.create_index(
|
||||||
|
"idx_pt_product_id",
|
||||||
|
"product_translations",
|
||||||
|
["product_id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_pt_product_language",
|
||||||
|
"product_translations",
|
||||||
|
["product_id", "language"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop product_translations table and its indexes
|
||||||
|
op.drop_index("idx_pt_product_language", table_name="product_translations")
|
||||||
|
op.drop_index("idx_pt_product_id", table_name="product_translations")
|
||||||
|
op.drop_table("product_translations")
|
||||||
|
|
||||||
|
# Drop marketplace_product_translations table and its indexes
|
||||||
|
op.drop_index("idx_mpt_language", table_name="marketplace_product_translations")
|
||||||
|
op.drop_index(
|
||||||
|
"idx_mpt_marketplace_product_id", table_name="marketplace_product_translations"
|
||||||
|
)
|
||||||
|
op.drop_table("marketplace_product_translations")
|
||||||
@@ -0,0 +1,95 @@
|
|||||||
|
"""add_validator_type_to_code_quality
|
||||||
|
|
||||||
|
Revision ID: f4a5b6c7d8e9
|
||||||
|
Revises: e3f4a5b6c7d8
|
||||||
|
Create Date: 2025-12-21
|
||||||
|
|
||||||
|
This migration adds validator_type column to architecture scans and violations
|
||||||
|
to support multiple validator types (architecture, security, performance).
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "f4a5b6c7d8e9"
|
||||||
|
down_revision: str | None = "e3f4a5b6c7d8"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add validator_type to architecture_scans table
|
||||||
|
op.add_column(
|
||||||
|
"architecture_scans",
|
||||||
|
sa.Column(
|
||||||
|
"validator_type",
|
||||||
|
sa.String(length=20),
|
||||||
|
nullable=False,
|
||||||
|
server_default="architecture",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_scans_validator_type"),
|
||||||
|
"architecture_scans",
|
||||||
|
["validator_type"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add validator_type to architecture_violations table
|
||||||
|
op.add_column(
|
||||||
|
"architecture_violations",
|
||||||
|
sa.Column(
|
||||||
|
"validator_type",
|
||||||
|
sa.String(length=20),
|
||||||
|
nullable=False,
|
||||||
|
server_default="architecture",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_violations_validator_type"),
|
||||||
|
"architecture_violations",
|
||||||
|
["validator_type"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add validator_type to architecture_rules table
|
||||||
|
op.add_column(
|
||||||
|
"architecture_rules",
|
||||||
|
sa.Column(
|
||||||
|
"validator_type",
|
||||||
|
sa.String(length=20),
|
||||||
|
nullable=False,
|
||||||
|
server_default="architecture",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_rules_validator_type"),
|
||||||
|
"architecture_rules",
|
||||||
|
["validator_type"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop indexes first
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_rules_validator_type"),
|
||||||
|
table_name="architecture_rules",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_violations_validator_type"),
|
||||||
|
table_name="architecture_violations",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_architecture_scans_validator_type"),
|
||||||
|
table_name="architecture_scans",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Drop columns
|
||||||
|
op.drop_column("architecture_rules", "validator_type")
|
||||||
|
op.drop_column("architecture_violations", "validator_type")
|
||||||
|
op.drop_column("architecture_scans", "validator_type")
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
"""add template field to content pages for landing page designs
|
||||||
|
|
||||||
|
Revision ID: f68d8da5315a
|
||||||
|
Revises: 72aa309d4007
|
||||||
|
Create Date: 2025-11-22 23:51:40.694983
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "f68d8da5315a"
|
||||||
|
down_revision: str | None = "72aa309d4007"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add template column to content_pages table
|
||||||
|
op.add_column(
|
||||||
|
"content_pages",
|
||||||
|
sa.Column(
|
||||||
|
"template", sa.String(length=50), nullable=False, server_default="default"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove template column from content_pages table
|
||||||
|
op.drop_column("content_pages", "template")
|
||||||
148
alembic/versions_backup/fa7d4d10e358_add_rbac_enhancements.py
Normal file
148
alembic/versions_backup/fa7d4d10e358_add_rbac_enhancements.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
"""add_rbac_enhancements
|
||||||
|
|
||||||
|
Revision ID: fa7d4d10e358
|
||||||
|
Revises: 4951b2e50581
|
||||||
|
Create Date: 2025-11-13 16:51:25.010057
|
||||||
|
|
||||||
|
SQLite-compatible version
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "fa7d4d10e358"
|
||||||
|
down_revision: str | None = "4951b2e50581"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
"""Upgrade database schema for RBAC enhancements.
|
||||||
|
|
||||||
|
SQLite-compatible version using batch operations for table modifications.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# User table changes
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("users", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column(
|
||||||
|
"is_email_verified",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default="false",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set existing active users as verified
|
||||||
|
op.execute("UPDATE users SET is_email_verified = TRUE WHERE is_active = TRUE")
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# VendorUser table changes (requires table recreation for SQLite)
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("vendor_users", schema=None) as batch_op:
|
||||||
|
# Add new columns
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column(
|
||||||
|
"user_type",
|
||||||
|
sa.String(length=20),
|
||||||
|
nullable=False,
|
||||||
|
server_default="member",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("invitation_token", sa.String(length=100), nullable=True)
|
||||||
|
)
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("invitation_sent_at", sa.DateTime(), nullable=True)
|
||||||
|
)
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("invitation_accepted_at", sa.DateTime(), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create index on invitation_token
|
||||||
|
batch_op.create_index("idx_vendor_users_invitation_token", ["invitation_token"])
|
||||||
|
|
||||||
|
# Modify role_id to be nullable (this recreates the table in SQLite)
|
||||||
|
batch_op.alter_column("role_id", existing_type=sa.Integer(), nullable=True)
|
||||||
|
|
||||||
|
# Change is_active default (this recreates the table in SQLite)
|
||||||
|
batch_op.alter_column(
|
||||||
|
"is_active", existing_type=sa.Boolean(), server_default="false"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set owners correctly (after table modifications)
|
||||||
|
# SQLite-compatible UPDATE with subquery
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE vendor_users
|
||||||
|
SET user_type = 'owner'
|
||||||
|
WHERE (vendor_id, user_id) IN (
|
||||||
|
SELECT id, owner_user_id
|
||||||
|
FROM vendors
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set existing owners as active
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE vendor_users
|
||||||
|
SET is_active = TRUE
|
||||||
|
WHERE user_type = 'owner'
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Role table changes
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("roles", schema=None) as batch_op:
|
||||||
|
# Create index on vendor_id and name
|
||||||
|
batch_op.create_index("idx_roles_vendor_name", ["vendor_id", "name"])
|
||||||
|
|
||||||
|
# Note: JSONB conversion only for PostgreSQL
|
||||||
|
# SQLite stores JSON as TEXT by default, no conversion needed
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
"""Downgrade database schema.
|
||||||
|
|
||||||
|
SQLite-compatible version using batch operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Role table changes
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("roles", schema=None) as batch_op:
|
||||||
|
batch_op.drop_index("idx_roles_vendor_name")
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# VendorUser table changes
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("vendor_users", schema=None) as batch_op:
|
||||||
|
# Revert is_active default
|
||||||
|
batch_op.alter_column(
|
||||||
|
"is_active", existing_type=sa.Boolean(), server_default="true"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Revert role_id to NOT NULL
|
||||||
|
# Note: This might fail if there are NULL values
|
||||||
|
batch_op.alter_column("role_id", existing_type=sa.Integer(), nullable=False)
|
||||||
|
|
||||||
|
# Drop indexes and columns
|
||||||
|
batch_op.drop_index("idx_vendor_users_invitation_token")
|
||||||
|
batch_op.drop_column("invitation_accepted_at")
|
||||||
|
batch_op.drop_column("invitation_sent_at")
|
||||||
|
batch_op.drop_column("invitation_token")
|
||||||
|
batch_op.drop_column("user_type")
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# User table changes
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("users", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("is_email_verified")
|
||||||
@@ -0,0 +1,84 @@
|
|||||||
|
"""add_language_settings_to_vendor_user_customer
|
||||||
|
|
||||||
|
Revision ID: fcfdc02d5138
|
||||||
|
Revises: b412e0b49c2e
|
||||||
|
Create Date: 2025-12-13 20:08:27.120863
|
||||||
|
|
||||||
|
This migration adds language preference fields to support multi-language UI:
|
||||||
|
- Vendor: default_language, dashboard_language, storefront_language
|
||||||
|
- User: preferred_language
|
||||||
|
- Customer: preferred_language
|
||||||
|
|
||||||
|
Supported languages: en (English), fr (French), de (German), lb (Luxembourgish)
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "fcfdc02d5138"
|
||||||
|
down_revision: str | None = "b412e0b49c2e"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ========================================================================
|
||||||
|
# Vendor language settings
|
||||||
|
# ========================================================================
|
||||||
|
# default_language: Default language for vendor content (products, etc.)
|
||||||
|
op.add_column(
|
||||||
|
"vendors",
|
||||||
|
sa.Column("default_language", sa.String(5), nullable=False, server_default="fr")
|
||||||
|
)
|
||||||
|
# dashboard_language: Language for vendor team dashboard UI
|
||||||
|
op.add_column(
|
||||||
|
"vendors",
|
||||||
|
sa.Column("dashboard_language", sa.String(5), nullable=False, server_default="fr")
|
||||||
|
)
|
||||||
|
# storefront_language: Default language for customer-facing shop
|
||||||
|
op.add_column(
|
||||||
|
"vendors",
|
||||||
|
sa.Column("storefront_language", sa.String(5), nullable=False, server_default="fr")
|
||||||
|
)
|
||||||
|
# storefront_languages: JSON array of enabled languages for storefront
|
||||||
|
# Allows vendors to enable/disable specific languages
|
||||||
|
op.add_column(
|
||||||
|
"vendors",
|
||||||
|
sa.Column(
|
||||||
|
"storefront_languages",
|
||||||
|
sa.JSON,
|
||||||
|
nullable=False,
|
||||||
|
server_default='["fr", "de", "en"]'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# User language preference
|
||||||
|
# ========================================================================
|
||||||
|
# preferred_language: User's preferred UI language (NULL = use context default)
|
||||||
|
op.add_column(
|
||||||
|
"users",
|
||||||
|
sa.Column("preferred_language", sa.String(5), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Customer language preference
|
||||||
|
# ========================================================================
|
||||||
|
# preferred_language: Customer's preferred language (NULL = use storefront default)
|
||||||
|
op.add_column(
|
||||||
|
"customers",
|
||||||
|
sa.Column("preferred_language", sa.String(5), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove columns in reverse order
|
||||||
|
op.drop_column("customers", "preferred_language")
|
||||||
|
op.drop_column("users", "preferred_language")
|
||||||
|
op.drop_column("vendors", "storefront_languages")
|
||||||
|
op.drop_column("vendors", "storefront_language")
|
||||||
|
op.drop_column("vendors", "dashboard_language")
|
||||||
|
op.drop_column("vendors", "default_language")
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
"""Add content_pages table for CMS
|
||||||
|
|
||||||
|
Revision ID: fef1d20ce8b4
|
||||||
|
Revises: fa7d4d10e358
|
||||||
|
Create Date: 2025-11-22 13:41:18.069674
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "fef1d20ce8b4"
|
||||||
|
down_revision: str | None = "fa7d4d10e358"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index("idx_roles_vendor_name", table_name="roles")
|
||||||
|
op.drop_index("idx_vendor_users_invitation_token", table_name="vendor_users")
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_users_invitation_token"),
|
||||||
|
"vendor_users",
|
||||||
|
["invitation_token"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f("ix_vendor_users_invitation_token"), table_name="vendor_users")
|
||||||
|
op.create_index(
|
||||||
|
"idx_vendor_users_invitation_token",
|
||||||
|
"vendor_users",
|
||||||
|
["invitation_token"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_roles_vendor_name", "roles", ["vendor_id", "name"], unique=False
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,83 @@
|
|||||||
|
"""add_scan_status_fields
|
||||||
|
|
||||||
|
Add background task status fields to architecture_scans table
|
||||||
|
for harmonized background task architecture.
|
||||||
|
|
||||||
|
Revision ID: g5b6c7d8e9f0
|
||||||
|
Revises: f4a5b6c7d8e9
|
||||||
|
Create Date: 2024-12-21
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "g5b6c7d8e9f0"
|
||||||
|
down_revision: str | None = "f4a5b6c7d8e9"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add status field with default 'completed' for existing records
|
||||||
|
# New records will use 'pending' as default
|
||||||
|
op.add_column(
|
||||||
|
"architecture_scans",
|
||||||
|
sa.Column(
|
||||||
|
"status",
|
||||||
|
sa.String(length=30),
|
||||||
|
nullable=False,
|
||||||
|
server_default="completed", # Existing scans are already completed
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_architecture_scans_status"), "architecture_scans", ["status"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add started_at - for existing records, use timestamp as started_at
|
||||||
|
op.add_column(
|
||||||
|
"architecture_scans",
|
||||||
|
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add completed_at - for existing records, use timestamp + duration as completed_at
|
||||||
|
op.add_column(
|
||||||
|
"architecture_scans",
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add error_message for failed scans
|
||||||
|
op.add_column(
|
||||||
|
"architecture_scans",
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add progress_message for showing current step
|
||||||
|
op.add_column(
|
||||||
|
"architecture_scans",
|
||||||
|
sa.Column("progress_message", sa.String(length=255), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update existing records to have proper started_at and completed_at
|
||||||
|
# This is done via raw SQL for efficiency (PostgreSQL syntax)
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE architecture_scans
|
||||||
|
SET started_at = timestamp,
|
||||||
|
completed_at = timestamp + (COALESCE(duration_seconds, 0) || ' seconds')::interval
|
||||||
|
WHERE started_at IS NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index(op.f("ix_architecture_scans_status"), table_name="architecture_scans")
|
||||||
|
op.drop_column("architecture_scans", "progress_message")
|
||||||
|
op.drop_column("architecture_scans", "error_message")
|
||||||
|
op.drop_column("architecture_scans", "completed_at")
|
||||||
|
op.drop_column("architecture_scans", "started_at")
|
||||||
|
op.drop_column("architecture_scans", "status")
|
||||||
199
alembic/versions_backup/h6c7d8e9f0a1_add_invoice_tables.py
Normal file
199
alembic/versions_backup/h6c7d8e9f0a1_add_invoice_tables.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
"""Add invoice tables
|
||||||
|
|
||||||
|
Revision ID: h6c7d8e9f0a1
|
||||||
|
Revises: g5b6c7d8e9f0
|
||||||
|
Create Date: 2025-12-24
|
||||||
|
|
||||||
|
This migration adds:
|
||||||
|
- vendor_invoice_settings: Per-vendor invoice configuration
|
||||||
|
- invoices: Invoice records with seller/buyer snapshots
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "h6c7d8e9f0a1"
|
||||||
|
down_revision: str | None = "g5b6c7d8e9f0"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create vendor_invoice_settings table
|
||||||
|
op.create_table(
|
||||||
|
"vendor_invoice_settings",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
# Company details
|
||||||
|
sa.Column("company_name", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("company_address", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("company_city", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("company_postal_code", sa.String(length=20), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"company_country", sa.String(length=2), server_default="LU", nullable=False
|
||||||
|
),
|
||||||
|
# VAT information
|
||||||
|
sa.Column("vat_number", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("is_vat_registered", sa.Boolean(), server_default="1", nullable=False),
|
||||||
|
# OSS
|
||||||
|
sa.Column("is_oss_registered", sa.Boolean(), server_default="0", nullable=False),
|
||||||
|
sa.Column("oss_registration_country", sa.String(length=2), nullable=True),
|
||||||
|
# Invoice numbering
|
||||||
|
sa.Column(
|
||||||
|
"invoice_prefix", sa.String(length=20), server_default="INV", nullable=False
|
||||||
|
),
|
||||||
|
sa.Column("invoice_next_number", sa.Integer(), server_default="1", nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"invoice_number_padding", sa.Integer(), server_default="5", nullable=False
|
||||||
|
),
|
||||||
|
# Payment information
|
||||||
|
sa.Column("payment_terms", sa.Text(), nullable=True),
|
||||||
|
sa.Column("bank_name", sa.String(length=255), nullable=True),
|
||||||
|
sa.Column("bank_iban", sa.String(length=50), nullable=True),
|
||||||
|
sa.Column("bank_bic", sa.String(length=20), nullable=True),
|
||||||
|
# Footer
|
||||||
|
sa.Column("footer_text", sa.Text(), nullable=True),
|
||||||
|
# Default VAT rate
|
||||||
|
sa.Column(
|
||||||
|
"default_vat_rate", sa.Numeric(precision=5, scale=2), server_default="17.00", nullable=False
|
||||||
|
),
|
||||||
|
# Timestamps
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("vendor_id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_invoice_settings_id"),
|
||||||
|
"vendor_invoice_settings",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_invoice_settings_vendor_id"),
|
||||||
|
"vendor_invoice_settings",
|
||||||
|
["vendor_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create invoices table
|
||||||
|
op.create_table(
|
||||||
|
"invoices",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("order_id", sa.Integer(), nullable=True),
|
||||||
|
# Invoice identification
|
||||||
|
sa.Column("invoice_number", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("invoice_date", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
# Status
|
||||||
|
sa.Column(
|
||||||
|
"status", sa.String(length=20), server_default="draft", nullable=False
|
||||||
|
),
|
||||||
|
# Snapshots (JSON)
|
||||||
|
sa.Column("seller_details", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("buyer_details", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("line_items", sa.JSON(), nullable=False),
|
||||||
|
# VAT information
|
||||||
|
sa.Column(
|
||||||
|
"vat_regime", sa.String(length=20), server_default="domestic", nullable=False
|
||||||
|
),
|
||||||
|
sa.Column("destination_country", sa.String(length=2), nullable=True),
|
||||||
|
sa.Column("vat_rate", sa.Numeric(precision=5, scale=2), nullable=False),
|
||||||
|
sa.Column("vat_rate_label", sa.String(length=50), nullable=True),
|
||||||
|
# Amounts (in cents)
|
||||||
|
sa.Column("currency", sa.String(length=3), server_default="EUR", nullable=False),
|
||||||
|
sa.Column("subtotal_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vat_amount_cents", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("total_cents", sa.Integer(), nullable=False),
|
||||||
|
# Payment info
|
||||||
|
sa.Column("payment_terms", sa.Text(), nullable=True),
|
||||||
|
sa.Column("bank_details", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("footer_text", sa.Text(), nullable=True),
|
||||||
|
# PDF
|
||||||
|
sa.Column("pdf_generated_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("pdf_path", sa.String(length=500), nullable=True),
|
||||||
|
# Notes
|
||||||
|
sa.Column("notes", sa.Text(), nullable=True),
|
||||||
|
# Timestamps
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["order_id"],
|
||||||
|
["orders.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_invoices_id"), "invoices", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_invoices_vendor_id"), "invoices", ["vendor_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_invoices_order_id"), "invoices", ["order_id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
"idx_invoice_vendor_number",
|
||||||
|
"invoices",
|
||||||
|
["vendor_id", "invoice_number"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_invoice_vendor_date",
|
||||||
|
"invoices",
|
||||||
|
["vendor_id", "invoice_date"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_invoice_status",
|
||||||
|
"invoices",
|
||||||
|
["vendor_id", "status"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop invoices table
|
||||||
|
op.drop_index("idx_invoice_status", table_name="invoices")
|
||||||
|
op.drop_index("idx_invoice_vendor_date", table_name="invoices")
|
||||||
|
op.drop_index("idx_invoice_vendor_number", table_name="invoices")
|
||||||
|
op.drop_index(op.f("ix_invoices_order_id"), table_name="invoices")
|
||||||
|
op.drop_index(op.f("ix_invoices_vendor_id"), table_name="invoices")
|
||||||
|
op.drop_index(op.f("ix_invoices_id"), table_name="invoices")
|
||||||
|
op.drop_table("invoices")
|
||||||
|
|
||||||
|
# Drop vendor_invoice_settings table
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_invoice_settings_vendor_id"),
|
||||||
|
table_name="vendor_invoice_settings",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_invoice_settings_id"), table_name="vendor_invoice_settings"
|
||||||
|
)
|
||||||
|
op.drop_table("vendor_invoice_settings")
|
||||||
149
alembic/versions_backup/i7d8e9f0a1b2_add_vendor_subscriptions.py
Normal file
149
alembic/versions_backup/i7d8e9f0a1b2_add_vendor_subscriptions.py
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
"""Add vendor subscriptions table
|
||||||
|
|
||||||
|
Revision ID: i7d8e9f0a1b2
|
||||||
|
Revises: h6c7d8e9f0a1
|
||||||
|
Create Date: 2025-12-24
|
||||||
|
|
||||||
|
This migration adds:
|
||||||
|
- vendor_subscriptions: Per-vendor subscription tracking with tier limits
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "i7d8e9f0a1b2"
|
||||||
|
down_revision: str | None = "h6c7d8e9f0a1"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create vendor_subscriptions table
|
||||||
|
op.create_table(
|
||||||
|
"vendor_subscriptions",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
# Tier and status
|
||||||
|
sa.Column(
|
||||||
|
"tier", sa.String(length=20), server_default="essential", nullable=False
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"status", sa.String(length=20), server_default="trial", nullable=False
|
||||||
|
),
|
||||||
|
# Billing period
|
||||||
|
sa.Column("period_start", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("period_end", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("is_annual", sa.Boolean(), server_default="0", nullable=False),
|
||||||
|
# Trial
|
||||||
|
sa.Column("trial_ends_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
# Usage counters
|
||||||
|
sa.Column("orders_this_period", sa.Integer(), server_default="0", nullable=False),
|
||||||
|
sa.Column("orders_limit_reached_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
# Custom overrides
|
||||||
|
sa.Column("custom_orders_limit", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("custom_products_limit", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("custom_team_limit", sa.Integer(), nullable=True),
|
||||||
|
# Payment (future Stripe integration)
|
||||||
|
sa.Column("stripe_customer_id", sa.String(length=100), nullable=True),
|
||||||
|
sa.Column("stripe_subscription_id", sa.String(length=100), nullable=True),
|
||||||
|
# Cancellation
|
||||||
|
sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("cancellation_reason", sa.Text(), nullable=True),
|
||||||
|
# Timestamps
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"updated_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["vendor_id"],
|
||||||
|
["vendors.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("vendor_id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_subscriptions_id"),
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_subscriptions_vendor_id"),
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["vendor_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_subscriptions_tier"),
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["tier"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_subscriptions_status"),
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["status"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_subscriptions_stripe_customer_id"),
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["stripe_customer_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_vendor_subscriptions_stripe_subscription_id"),
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["stripe_subscription_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_subscription_vendor_status",
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["vendor_id", "status"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_subscription_period",
|
||||||
|
"vendor_subscriptions",
|
||||||
|
["period_start", "period_end"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("idx_subscription_period", table_name="vendor_subscriptions")
|
||||||
|
op.drop_index("idx_subscription_vendor_status", table_name="vendor_subscriptions")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_subscriptions_stripe_subscription_id"),
|
||||||
|
table_name="vendor_subscriptions",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_subscriptions_stripe_customer_id"),
|
||||||
|
table_name="vendor_subscriptions",
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_subscriptions_status"), table_name="vendor_subscriptions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_subscriptions_tier"), table_name="vendor_subscriptions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_subscriptions_vendor_id"), table_name="vendor_subscriptions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_vendor_subscriptions_id"), table_name="vendor_subscriptions"
|
||||||
|
)
|
||||||
|
op.drop_table("vendor_subscriptions")
|
||||||
@@ -0,0 +1,262 @@
|
|||||||
|
"""Populate product fields from marketplace for independence refactor
|
||||||
|
|
||||||
|
Revision ID: j8e9f0a1b2c3
|
||||||
|
Revises: i7d8e9f0a1b2
|
||||||
|
Create Date: 2025-12-24
|
||||||
|
|
||||||
|
This migration populates NULL fields on products and product_translations
|
||||||
|
with values from their linked marketplace products. This is part of the
|
||||||
|
"product independence" refactor where products become standalone entities
|
||||||
|
instead of inheriting from marketplace products via NULL fallback.
|
||||||
|
|
||||||
|
After this migration:
|
||||||
|
- All Product fields will have actual values (no NULL inheritance)
|
||||||
|
- All ProductTranslation records will exist with actual values
|
||||||
|
- The marketplace_product_id FK is kept for "view original source" feature
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "j8e9f0a1b2c3"
|
||||||
|
down_revision: str | None = "i7d8e9f0a1b2"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Populate NULL product fields with marketplace product values."""
|
||||||
|
|
||||||
|
# Get database connection for raw SQL
|
||||||
|
connection = op.get_bind()
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# STEP 1: Populate Product fields from MarketplaceProduct
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# Price cents
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET price_cents = (
|
||||||
|
SELECT mp.price_cents
|
||||||
|
FROM marketplace_products mp
|
||||||
|
WHERE mp.id = products.marketplace_product_id
|
||||||
|
)
|
||||||
|
WHERE price_cents IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Sale price cents
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET sale_price_cents = (
|
||||||
|
SELECT mp.sale_price_cents
|
||||||
|
FROM marketplace_products mp
|
||||||
|
WHERE mp.id = products.marketplace_product_id
|
||||||
|
)
|
||||||
|
WHERE sale_price_cents IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Currency (default to EUR if marketplace has NULL)
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET currency = COALESCE(
|
||||||
|
(SELECT mp.currency FROM marketplace_products mp WHERE mp.id = products.marketplace_product_id),
|
||||||
|
'EUR'
|
||||||
|
)
|
||||||
|
WHERE currency IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Brand
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET brand = (
|
||||||
|
SELECT mp.brand
|
||||||
|
FROM marketplace_products mp
|
||||||
|
WHERE mp.id = products.marketplace_product_id
|
||||||
|
)
|
||||||
|
WHERE brand IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Condition
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET condition = (
|
||||||
|
SELECT mp.condition
|
||||||
|
FROM marketplace_products mp
|
||||||
|
WHERE mp.id = products.marketplace_product_id
|
||||||
|
)
|
||||||
|
WHERE condition IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Availability
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET availability = (
|
||||||
|
SELECT mp.availability
|
||||||
|
FROM marketplace_products mp
|
||||||
|
WHERE mp.id = products.marketplace_product_id
|
||||||
|
)
|
||||||
|
WHERE availability IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Primary image URL (marketplace uses 'image_link')
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET primary_image_url = (
|
||||||
|
SELECT mp.image_link
|
||||||
|
FROM marketplace_products mp
|
||||||
|
WHERE mp.id = products.marketplace_product_id
|
||||||
|
)
|
||||||
|
WHERE primary_image_url IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Additional images
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE products
|
||||||
|
SET additional_images = (
|
||||||
|
SELECT mp.additional_images
|
||||||
|
FROM marketplace_products mp
|
||||||
|
WHERE mp.id = products.marketplace_product_id
|
||||||
|
)
|
||||||
|
WHERE additional_images IS NULL
|
||||||
|
AND marketplace_product_id IS NOT NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# STEP 2: Create missing ProductTranslation records from MarketplaceProductTranslation
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# Insert missing translations (where product doesn't have translation for a language
|
||||||
|
# that the marketplace product has)
|
||||||
|
connection.execute(text("""
|
||||||
|
INSERT INTO product_translations (product_id, language, title, description, short_description,
|
||||||
|
meta_title, meta_description, url_slug, created_at, updated_at)
|
||||||
|
SELECT
|
||||||
|
p.id,
|
||||||
|
mpt.language,
|
||||||
|
mpt.title,
|
||||||
|
mpt.description,
|
||||||
|
mpt.short_description,
|
||||||
|
mpt.meta_title,
|
||||||
|
mpt.meta_description,
|
||||||
|
mpt.url_slug,
|
||||||
|
CURRENT_TIMESTAMP,
|
||||||
|
CURRENT_TIMESTAMP
|
||||||
|
FROM products p
|
||||||
|
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
|
||||||
|
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT 1 FROM product_translations pt
|
||||||
|
WHERE pt.product_id = p.id AND pt.language = mpt.language
|
||||||
|
)
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# STEP 3: Update existing ProductTranslation NULL fields with marketplace values
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# Update title where NULL
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE product_translations
|
||||||
|
SET title = (
|
||||||
|
SELECT mpt.title
|
||||||
|
FROM products p
|
||||||
|
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
|
||||||
|
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
|
||||||
|
AND mpt.language = product_translations.language
|
||||||
|
WHERE p.id = product_translations.product_id
|
||||||
|
)
|
||||||
|
WHERE title IS NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Update description where NULL
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE product_translations
|
||||||
|
SET description = (
|
||||||
|
SELECT mpt.description
|
||||||
|
FROM products p
|
||||||
|
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
|
||||||
|
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
|
||||||
|
AND mpt.language = product_translations.language
|
||||||
|
WHERE p.id = product_translations.product_id
|
||||||
|
)
|
||||||
|
WHERE description IS NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Update short_description where NULL
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE product_translations
|
||||||
|
SET short_description = (
|
||||||
|
SELECT mpt.short_description
|
||||||
|
FROM products p
|
||||||
|
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
|
||||||
|
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
|
||||||
|
AND mpt.language = product_translations.language
|
||||||
|
WHERE p.id = product_translations.product_id
|
||||||
|
)
|
||||||
|
WHERE short_description IS NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Update meta_title where NULL
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE product_translations
|
||||||
|
SET meta_title = (
|
||||||
|
SELECT mpt.meta_title
|
||||||
|
FROM products p
|
||||||
|
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
|
||||||
|
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
|
||||||
|
AND mpt.language = product_translations.language
|
||||||
|
WHERE p.id = product_translations.product_id
|
||||||
|
)
|
||||||
|
WHERE meta_title IS NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Update meta_description where NULL
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE product_translations
|
||||||
|
SET meta_description = (
|
||||||
|
SELECT mpt.meta_description
|
||||||
|
FROM products p
|
||||||
|
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
|
||||||
|
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
|
||||||
|
AND mpt.language = product_translations.language
|
||||||
|
WHERE p.id = product_translations.product_id
|
||||||
|
)
|
||||||
|
WHERE meta_description IS NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
# Update url_slug where NULL
|
||||||
|
connection.execute(text("""
|
||||||
|
UPDATE product_translations
|
||||||
|
SET url_slug = (
|
||||||
|
SELECT mpt.url_slug
|
||||||
|
FROM products p
|
||||||
|
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
|
||||||
|
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
|
||||||
|
AND mpt.language = product_translations.language
|
||||||
|
WHERE p.id = product_translations.product_id
|
||||||
|
)
|
||||||
|
WHERE url_slug IS NULL
|
||||||
|
"""))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""
|
||||||
|
Downgrade is a no-op for data population.
|
||||||
|
|
||||||
|
The data was copied, not moved. The original marketplace product data
|
||||||
|
is still intact. We don't reset fields to NULL because:
|
||||||
|
1. It would lose any vendor customizations made after migration
|
||||||
|
2. The model code may still work with populated fields
|
||||||
|
"""
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
"""Add tier_id FK to vendor_subscriptions
|
||||||
|
|
||||||
|
Revision ID: k9f0a1b2c3d4
|
||||||
|
Revises: 2953ed10d22c
|
||||||
|
Create Date: 2025-12-26
|
||||||
|
|
||||||
|
Adds tier_id column to vendor_subscriptions table with FK to subscription_tiers.
|
||||||
|
Backfills tier_id based on existing tier (code) values.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "k9f0a1b2c3d4"
|
||||||
|
down_revision = "2953ed10d22c"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Use batch mode for SQLite compatibility
|
||||||
|
with op.batch_alter_table("vendor_subscriptions", schema=None) as batch_op:
|
||||||
|
# Add tier_id column (nullable for backfill)
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("tier_id", sa.Integer(), nullable=True)
|
||||||
|
)
|
||||||
|
# Create index for tier_id
|
||||||
|
batch_op.create_index(
|
||||||
|
"ix_vendor_subscriptions_tier_id",
|
||||||
|
["tier_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
# Add FK constraint
|
||||||
|
batch_op.create_foreign_key(
|
||||||
|
"fk_vendor_subscriptions_tier_id",
|
||||||
|
"subscription_tiers",
|
||||||
|
["tier_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Backfill tier_id from tier code
|
||||||
|
# This updates existing subscriptions to link to their tier
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE vendor_subscriptions
|
||||||
|
SET tier_id = (
|
||||||
|
SELECT id FROM subscription_tiers
|
||||||
|
WHERE subscription_tiers.code = vendor_subscriptions.tier
|
||||||
|
)
|
||||||
|
WHERE EXISTS (
|
||||||
|
SELECT 1 FROM subscription_tiers
|
||||||
|
WHERE subscription_tiers.code = vendor_subscriptions.tier
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# In SQLite batch mode, we must explicitly drop the index before dropping
|
||||||
|
# the column, otherwise batch mode will try to recreate the index on the
|
||||||
|
# new table (which won't have the column).
|
||||||
|
with op.batch_alter_table("vendor_subscriptions", schema=None) as batch_op:
|
||||||
|
# First drop the index on tier_id
|
||||||
|
batch_op.drop_index("ix_vendor_subscriptions_tier_id")
|
||||||
|
# Then drop the column (FK is automatically removed with the column)
|
||||||
|
batch_op.drop_column("tier_id")
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
"""Add capacity_snapshots table
|
||||||
|
|
||||||
|
Revision ID: l0a1b2c3d4e5
|
||||||
|
Revises: k9f0a1b2c3d4
|
||||||
|
Create Date: 2025-12-26
|
||||||
|
|
||||||
|
Adds table for tracking daily platform capacity metrics for growth forecasting.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "l0a1b2c3d4e5"
|
||||||
|
down_revision = "k9f0a1b2c3d4"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"capacity_snapshots",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("snapshot_date", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
# Vendor metrics
|
||||||
|
sa.Column("total_vendors", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("active_vendors", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("trial_vendors", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
# Subscription metrics
|
||||||
|
sa.Column("total_subscriptions", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("active_subscriptions", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
# Resource metrics
|
||||||
|
sa.Column("total_products", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("total_orders_month", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("total_team_members", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
# Storage metrics
|
||||||
|
sa.Column("storage_used_gb", sa.Numeric(10, 2), nullable=False, server_default="0"),
|
||||||
|
sa.Column("db_size_mb", sa.Numeric(10, 2), nullable=False, server_default="0"),
|
||||||
|
# Capacity metrics
|
||||||
|
sa.Column("theoretical_products_limit", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("theoretical_orders_limit", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("theoretical_team_limit", sa.Integer(), nullable=True),
|
||||||
|
# Tier distribution
|
||||||
|
sa.Column("tier_distribution", sa.JSON(), nullable=True),
|
||||||
|
# Performance metrics
|
||||||
|
sa.Column("avg_response_ms", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("peak_cpu_percent", sa.Numeric(5, 2), nullable=True),
|
||||||
|
sa.Column("peak_memory_percent", sa.Numeric(5, 2), nullable=True),
|
||||||
|
# Timestamps
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||||
|
# Primary key
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index("ix_capacity_snapshots_id", "capacity_snapshots", ["id"], unique=False)
|
||||||
|
op.create_index("ix_capacity_snapshots_date", "capacity_snapshots", ["snapshot_date"], unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("ix_capacity_snapshots_date", table_name="capacity_snapshots")
|
||||||
|
op.drop_index("ix_capacity_snapshots_id", table_name="capacity_snapshots")
|
||||||
|
op.drop_table("capacity_snapshots")
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
"""add vendor onboarding table
|
||||||
|
|
||||||
|
Revision ID: m1b2c3d4e5f6
|
||||||
|
Revises: d7a4a3f06394
|
||||||
|
Create Date: 2025-12-27 22:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "m1b2c3d4e5f6"
|
||||||
|
down_revision: str | None = "d7a4a3f06394"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table("vendor_onboarding",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
# Overall status
|
||||||
|
sa.Column("status", sa.String(length=20), nullable=False, server_default="not_started"),
|
||||||
|
sa.Column("current_step", sa.String(length=30), nullable=False, server_default="company_profile"),
|
||||||
|
# Step 1: Company Profile
|
||||||
|
sa.Column("step_company_profile_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")),
|
||||||
|
sa.Column("step_company_profile_completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("step_company_profile_data", sa.JSON(), nullable=True),
|
||||||
|
# Step 2: Letzshop API Configuration
|
||||||
|
sa.Column("step_letzshop_api_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")),
|
||||||
|
sa.Column("step_letzshop_api_completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("step_letzshop_api_connection_verified", sa.Boolean(), nullable=False, server_default=sa.text("false")),
|
||||||
|
# Step 3: Product Import
|
||||||
|
sa.Column("step_product_import_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")),
|
||||||
|
sa.Column("step_product_import_completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("step_product_import_csv_url_set", sa.Boolean(), nullable=False, server_default=sa.text("false")),
|
||||||
|
# Step 4: Order Sync
|
||||||
|
sa.Column("step_order_sync_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")),
|
||||||
|
sa.Column("step_order_sync_completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("step_order_sync_job_id", sa.Integer(), nullable=True),
|
||||||
|
# Completion tracking
|
||||||
|
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
# Admin override
|
||||||
|
sa.Column("skipped_by_admin", sa.Boolean(), nullable=False, server_default=sa.text("false")),
|
||||||
|
sa.Column("skipped_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("skipped_reason", sa.Text(), nullable=True),
|
||||||
|
sa.Column("skipped_by_user_id", sa.Integer(), nullable=True),
|
||||||
|
# Timestamps
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
# Constraints
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["skipped_by_user_id"], ["users.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(op.f("ix_vendor_onboarding_id"), "vendor_onboarding", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_vendor_onboarding_vendor_id"), "vendor_onboarding", ["vendor_id"], unique=True)
|
||||||
|
op.create_index(op.f("ix_vendor_onboarding_status"), "vendor_onboarding", ["status"], unique=False)
|
||||||
|
op.create_index("idx_onboarding_vendor_status", "vendor_onboarding", ["vendor_id", "status"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("idx_onboarding_vendor_status", table_name="vendor_onboarding")
|
||||||
|
op.drop_index(op.f("ix_vendor_onboarding_status"), table_name="vendor_onboarding")
|
||||||
|
op.drop_index(op.f("ix_vendor_onboarding_vendor_id"), table_name="vendor_onboarding")
|
||||||
|
op.drop_index(op.f("ix_vendor_onboarding_id"), table_name="vendor_onboarding")
|
||||||
|
op.drop_table("vendor_onboarding")
|
||||||
@@ -0,0 +1,179 @@
|
|||||||
|
# app/modules/billing/migrations/versions/billing_001_merchant_subscriptions_and_feature_limits.py
|
||||||
|
"""
|
||||||
|
Merchant subscriptions and feature limits migration.
|
||||||
|
|
||||||
|
Creates:
|
||||||
|
- merchant_subscriptions table (replaces store_subscriptions)
|
||||||
|
- tier_feature_limits table (replaces hardcoded limit columns)
|
||||||
|
- merchant_feature_overrides table (replaces custom_*_limit columns)
|
||||||
|
|
||||||
|
Drops:
|
||||||
|
- store_subscriptions table
|
||||||
|
- features table
|
||||||
|
|
||||||
|
Alters:
|
||||||
|
- subscription_tiers: removes limit columns and features JSON
|
||||||
|
|
||||||
|
Revision ID: billing_001
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# Revision identifiers
|
||||||
|
revision = "billing_001"
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = ("billing",)
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ========================================================================
|
||||||
|
# Create merchant_subscriptions table
|
||||||
|
# ========================================================================
|
||||||
|
op.create_table(
|
||||||
|
"merchant_subscriptions",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, index=True),
|
||||||
|
sa.Column("merchant_id", sa.Integer(), sa.ForeignKey("merchants.id", ondelete="CASCADE"), nullable=False, index=True),
|
||||||
|
sa.Column("platform_id", sa.Integer(), sa.ForeignKey("platforms.id", ondelete="CASCADE"), nullable=False, index=True),
|
||||||
|
sa.Column("tier_id", sa.Integer(), sa.ForeignKey("subscription_tiers.id", ondelete="SET NULL"), nullable=True, index=True),
|
||||||
|
sa.Column("status", sa.String(20), nullable=False, server_default="trial", index=True),
|
||||||
|
sa.Column("is_annual", sa.Boolean(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("period_start", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("period_end", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("trial_ends_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("stripe_customer_id", sa.String(100), nullable=True, index=True),
|
||||||
|
sa.Column("stripe_subscription_id", sa.String(100), nullable=True, index=True),
|
||||||
|
sa.Column("stripe_payment_method_id", sa.String(100), nullable=True),
|
||||||
|
sa.Column("payment_retry_count", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("last_payment_error", sa.Text(), nullable=True),
|
||||||
|
sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("cancellation_reason", sa.Text(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.UniqueConstraint("merchant_id", "platform_id", name="uq_merchant_platform_subscription"),
|
||||||
|
)
|
||||||
|
op.create_index("idx_merchant_sub_status", "merchant_subscriptions", ["merchant_id", "status"])
|
||||||
|
op.create_index("idx_merchant_sub_platform", "merchant_subscriptions", ["platform_id", "status"])
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Create tier_feature_limits table
|
||||||
|
# ========================================================================
|
||||||
|
op.create_table(
|
||||||
|
"tier_feature_limits",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, index=True),
|
||||||
|
sa.Column("tier_id", sa.Integer(), sa.ForeignKey("subscription_tiers.id", ondelete="CASCADE"), nullable=False, index=True),
|
||||||
|
sa.Column("feature_code", sa.String(80), nullable=False, index=True),
|
||||||
|
sa.Column("limit_value", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.UniqueConstraint("tier_id", "feature_code", name="uq_tier_feature_code"),
|
||||||
|
)
|
||||||
|
op.create_index("idx_tier_feature_lookup", "tier_feature_limits", ["tier_id", "feature_code"])
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Create merchant_feature_overrides table
|
||||||
|
# ========================================================================
|
||||||
|
op.create_table(
|
||||||
|
"merchant_feature_overrides",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, index=True),
|
||||||
|
sa.Column("merchant_id", sa.Integer(), sa.ForeignKey("merchants.id", ondelete="CASCADE"), nullable=False, index=True),
|
||||||
|
sa.Column("platform_id", sa.Integer(), sa.ForeignKey("platforms.id", ondelete="CASCADE"), nullable=False, index=True),
|
||||||
|
sa.Column("feature_code", sa.String(80), nullable=False, index=True),
|
||||||
|
sa.Column("limit_value", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("is_enabled", sa.Boolean(), nullable=False, server_default="1"),
|
||||||
|
sa.Column("reason", sa.String(255), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.UniqueConstraint("merchant_id", "platform_id", "feature_code", name="uq_merchant_platform_feature"),
|
||||||
|
)
|
||||||
|
op.create_index("idx_merchant_override_lookup", "merchant_feature_overrides", ["merchant_id", "platform_id", "feature_code"])
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Drop legacy tables
|
||||||
|
# ========================================================================
|
||||||
|
op.drop_table("store_subscriptions")
|
||||||
|
op.drop_table("features")
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Remove legacy columns from subscription_tiers
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("subscription_tiers") as batch_op:
|
||||||
|
batch_op.drop_column("orders_per_month")
|
||||||
|
batch_op.drop_column("products_limit")
|
||||||
|
batch_op.drop_column("team_members")
|
||||||
|
batch_op.drop_column("order_history_months")
|
||||||
|
batch_op.drop_column("cms_pages_limit")
|
||||||
|
batch_op.drop_column("cms_custom_pages_limit")
|
||||||
|
batch_op.drop_column("features")
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Update stripe_webhook_events FK to merchant_subscriptions
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("stripe_webhook_events") as batch_op:
|
||||||
|
batch_op.drop_column("subscription_id")
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("merchant_subscription_id", sa.Integer(),
|
||||||
|
sa.ForeignKey("merchant_subscriptions.id"), nullable=True, index=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========================================================================
|
||||||
|
# Add merchant_id to billing_history
|
||||||
|
# ========================================================================
|
||||||
|
with op.batch_alter_table("billing_history") as batch_op:
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("merchant_id", sa.Integer(),
|
||||||
|
sa.ForeignKey("merchants.id"), nullable=True, index=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove merchant_id from billing_history
|
||||||
|
with op.batch_alter_table("billing_history") as batch_op:
|
||||||
|
batch_op.drop_column("merchant_id")
|
||||||
|
|
||||||
|
# Restore subscription_id on stripe_webhook_events
|
||||||
|
with op.batch_alter_table("stripe_webhook_events") as batch_op:
|
||||||
|
batch_op.drop_column("merchant_subscription_id")
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("subscription_id", sa.Integer(),
|
||||||
|
sa.ForeignKey("store_subscriptions.id"), nullable=True, index=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Restore columns on subscription_tiers
|
||||||
|
with op.batch_alter_table("subscription_tiers") as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("orders_per_month", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("products_limit", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("team_members", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("order_history_months", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("cms_pages_limit", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("cms_custom_pages_limit", sa.Integer(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column("features", sa.JSON(), nullable=True))
|
||||||
|
|
||||||
|
# Recreate features table
|
||||||
|
op.create_table(
|
||||||
|
"features",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column("code", sa.String(50), unique=True, nullable=False),
|
||||||
|
sa.Column("name", sa.String(100), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("category", sa.String(50), nullable=False),
|
||||||
|
sa.Column("is_active", sa.Boolean(), server_default="1"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Recreate store_subscriptions table
|
||||||
|
op.create_table(
|
||||||
|
"store_subscriptions",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column("store_id", sa.Integer(), sa.ForeignKey("stores.id"), unique=True, nullable=False),
|
||||||
|
sa.Column("tier", sa.String(20), nullable=False, server_default="essential"),
|
||||||
|
sa.Column("status", sa.String(20), nullable=False, server_default="trial"),
|
||||||
|
sa.Column("period_start", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("period_end", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Drop new tables
|
||||||
|
op.drop_table("merchant_feature_overrides")
|
||||||
|
op.drop_table("tier_feature_limits")
|
||||||
|
op.drop_table("merchant_subscriptions")
|
||||||
@@ -0,0 +1,650 @@
|
|||||||
|
"""add loyalty module tables
|
||||||
|
|
||||||
|
Revision ID: 0fb5d6d6ff97
|
||||||
|
Revises: zd3n4o5p6q7r8
|
||||||
|
Create Date: 2026-01-28 22:55:34.074321
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql, sqlite
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "0fb5d6d6ff97"
|
||||||
|
down_revision: str | None = "zd3n4o5p6q7r8"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table("loyalty_programs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("store_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("loyalty_type", sa.String(length=20), nullable=False),
|
||||||
|
sa.Column("stamps_target", sa.Integer(), nullable=False, comment="Number of stamps needed for reward"),
|
||||||
|
sa.Column("stamps_reward_description", sa.String(length=255), nullable=False, comment="Description of stamp reward"),
|
||||||
|
sa.Column("stamps_reward_value_cents", sa.Integer(), nullable=True, comment="Value of stamp reward in cents (for analytics)"),
|
||||||
|
sa.Column("points_per_euro", sa.Integer(), nullable=False, comment="Points earned per euro spent"),
|
||||||
|
sa.Column("points_rewards", sqlite.JSON(), nullable=False, comment="List of point rewards: [{id, name, points_required, description}]"),
|
||||||
|
sa.Column("cooldown_minutes", sa.Integer(), nullable=False, comment="Minutes between stamps for same card"),
|
||||||
|
sa.Column("max_daily_stamps", sa.Integer(), nullable=False, comment="Maximum stamps per card per day"),
|
||||||
|
sa.Column("require_staff_pin", sa.Boolean(), nullable=False, comment="Require staff PIN for stamp/points operations"),
|
||||||
|
sa.Column("card_name", sa.String(length=100), nullable=True, comment="Display name for loyalty card"),
|
||||||
|
sa.Column("card_color", sa.String(length=7), nullable=False, comment="Primary color for card (hex)"),
|
||||||
|
sa.Column("card_secondary_color", sa.String(length=7), nullable=True, comment="Secondary color for card (hex)"),
|
||||||
|
sa.Column("logo_url", sa.String(length=500), nullable=True, comment="URL to store logo for card"),
|
||||||
|
sa.Column("hero_image_url", sa.String(length=500), nullable=True, comment="URL to hero image for card"),
|
||||||
|
sa.Column("google_issuer_id", sa.String(length=100), nullable=True, comment="Google Wallet Issuer ID"),
|
||||||
|
sa.Column("google_class_id", sa.String(length=200), nullable=True, comment="Google Wallet Loyalty Class ID"),
|
||||||
|
sa.Column("apple_pass_type_id", sa.String(length=100), nullable=True, comment="Apple Wallet Pass Type ID"),
|
||||||
|
sa.Column("terms_text", sa.Text(), nullable=True, comment="Loyalty program terms and conditions"),
|
||||||
|
sa.Column("privacy_url", sa.String(length=500), nullable=True, comment="URL to privacy policy"),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("activated_at", sa.DateTime(timezone=True), nullable=True, comment="When program was first activated"),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_loyalty_program_store_active", "loyalty_programs", ["store_id", "is_active"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_programs_id"), "loyalty_programs", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_programs_is_active"), "loyalty_programs", ["is_active"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_programs_store_id"), "loyalty_programs", ["store_id"], unique=True)
|
||||||
|
op.create_table("loyalty_cards",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("customer_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("program_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("store_id", sa.Integer(), nullable=False, comment="Denormalized for query performance"),
|
||||||
|
sa.Column("card_number", sa.String(length=20), nullable=False, comment="Human-readable card number"),
|
||||||
|
sa.Column("qr_code_data", sa.String(length=50), nullable=False, comment="Data encoded in QR code for scanning"),
|
||||||
|
sa.Column("stamp_count", sa.Integer(), nullable=False, comment="Current stamps toward next reward"),
|
||||||
|
sa.Column("total_stamps_earned", sa.Integer(), nullable=False, comment="Lifetime stamps earned"),
|
||||||
|
sa.Column("stamps_redeemed", sa.Integer(), nullable=False, comment="Total rewards redeemed (stamps reset on redemption)"),
|
||||||
|
sa.Column("points_balance", sa.Integer(), nullable=False, comment="Current available points"),
|
||||||
|
sa.Column("total_points_earned", sa.Integer(), nullable=False, comment="Lifetime points earned"),
|
||||||
|
sa.Column("points_redeemed", sa.Integer(), nullable=False, comment="Lifetime points redeemed"),
|
||||||
|
sa.Column("google_object_id", sa.String(length=200), nullable=True, comment="Google Wallet Loyalty Object ID"),
|
||||||
|
sa.Column("google_object_jwt", sa.String(length=2000), nullable=True, comment="JWT for Google Wallet 'Add to Wallet' button"),
|
||||||
|
sa.Column("apple_serial_number", sa.String(length=100), nullable=True, comment="Apple Wallet pass serial number"),
|
||||||
|
sa.Column("apple_auth_token", sa.String(length=100), nullable=True, comment="Apple Wallet authentication token for updates"),
|
||||||
|
sa.Column("last_stamp_at", sa.DateTime(timezone=True), nullable=True, comment="Last stamp added (for cooldown)"),
|
||||||
|
sa.Column("last_points_at", sa.DateTime(timezone=True), nullable=True, comment="Last points earned"),
|
||||||
|
sa.Column("last_redemption_at", sa.DateTime(timezone=True), nullable=True, comment="Last reward redemption"),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["customer_id"], ["customers.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["program_id"], ["loyalty_programs.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_loyalty_card_customer_program", "loyalty_cards", ["customer_id", "program_id"], unique=True)
|
||||||
|
op.create_index("idx_loyalty_card_store_active", "loyalty_cards", ["store_id", "is_active"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_apple_serial_number"), "loyalty_cards", ["apple_serial_number"], unique=True)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_card_number"), "loyalty_cards", ["card_number"], unique=True)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_customer_id"), "loyalty_cards", ["customer_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_google_object_id"), "loyalty_cards", ["google_object_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_id"), "loyalty_cards", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_is_active"), "loyalty_cards", ["is_active"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_program_id"), "loyalty_cards", ["program_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_qr_code_data"), "loyalty_cards", ["qr_code_data"], unique=True)
|
||||||
|
op.create_index(op.f("ix_loyalty_cards_store_id"), "loyalty_cards", ["store_id"], unique=False)
|
||||||
|
op.create_table("staff_pins",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("program_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("store_id", sa.Integer(), nullable=False, comment="Denormalized for query performance"),
|
||||||
|
sa.Column("name", sa.String(length=100), nullable=False, comment="Staff member name"),
|
||||||
|
sa.Column("staff_id", sa.String(length=50), nullable=True, comment="Optional staff ID/employee number"),
|
||||||
|
sa.Column("pin_hash", sa.String(length=255), nullable=False, comment="bcrypt hash of PIN"),
|
||||||
|
sa.Column("failed_attempts", sa.Integer(), nullable=False, comment="Consecutive failed PIN attempts"),
|
||||||
|
sa.Column("locked_until", sa.DateTime(timezone=True), nullable=True, comment="Lockout expires at this time"),
|
||||||
|
sa.Column("last_used_at", sa.DateTime(timezone=True), nullable=True, comment="Last successful use of PIN"),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["program_id"], ["loyalty_programs.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_staff_pin_program_active", "staff_pins", ["program_id", "is_active"], unique=False)
|
||||||
|
op.create_index("idx_staff_pin_store_active", "staff_pins", ["store_id", "is_active"], unique=False)
|
||||||
|
op.create_index(op.f("ix_staff_pins_id"), "staff_pins", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_staff_pins_is_active"), "staff_pins", ["is_active"], unique=False)
|
||||||
|
op.create_index(op.f("ix_staff_pins_program_id"), "staff_pins", ["program_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_staff_pins_staff_id"), "staff_pins", ["staff_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_staff_pins_store_id"), "staff_pins", ["store_id"], unique=False)
|
||||||
|
op.create_table("apple_device_registrations",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("card_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("device_library_identifier", sa.String(length=100), nullable=False, comment="Unique identifier for the device/library"),
|
||||||
|
sa.Column("push_token", sa.String(length=100), nullable=False, comment="APNs push token for this device"),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["card_id"], ["loyalty_cards.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_apple_device_card", "apple_device_registrations", ["device_library_identifier", "card_id"], unique=True)
|
||||||
|
op.create_index(op.f("ix_apple_device_registrations_card_id"), "apple_device_registrations", ["card_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_apple_device_registrations_device_library_identifier"), "apple_device_registrations", ["device_library_identifier"], unique=False)
|
||||||
|
op.create_index(op.f("ix_apple_device_registrations_id"), "apple_device_registrations", ["id"], unique=False)
|
||||||
|
op.create_table("loyalty_transactions",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("card_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("store_id", sa.Integer(), nullable=False, comment="Denormalized for query performance"),
|
||||||
|
sa.Column("staff_pin_id", sa.Integer(), nullable=True, comment="Staff PIN used for this operation"),
|
||||||
|
sa.Column("transaction_type", sa.String(length=30), nullable=False),
|
||||||
|
sa.Column("stamps_delta", sa.Integer(), nullable=False, comment="Change in stamps (+1 for earn, -N for redeem)"),
|
||||||
|
sa.Column("points_delta", sa.Integer(), nullable=False, comment="Change in points (+N for earn, -N for redeem)"),
|
||||||
|
sa.Column("stamps_balance_after", sa.Integer(), nullable=True, comment="Stamp count after this transaction"),
|
||||||
|
sa.Column("points_balance_after", sa.Integer(), nullable=True, comment="Points balance after this transaction"),
|
||||||
|
sa.Column("purchase_amount_cents", sa.Integer(), nullable=True, comment="Purchase amount in cents (for points calculation)"),
|
||||||
|
sa.Column("order_reference", sa.String(length=100), nullable=True, comment="Reference to order that triggered points"),
|
||||||
|
sa.Column("reward_id", sa.String(length=50), nullable=True, comment="ID of redeemed reward (from program.points_rewards)"),
|
||||||
|
sa.Column("reward_description", sa.String(length=255), nullable=True, comment="Description of redeemed reward"),
|
||||||
|
sa.Column("ip_address", sa.String(length=45), nullable=True, comment="IP address of requester (IPv4 or IPv6)"),
|
||||||
|
sa.Column("user_agent", sa.String(length=500), nullable=True, comment="User agent string"),
|
||||||
|
sa.Column("notes", sa.Text(), nullable=True, comment="Additional notes (e.g., reason for adjustment)"),
|
||||||
|
sa.Column("transaction_at", sa.DateTime(timezone=True), nullable=False, comment="When the transaction occurred (may differ from created_at)"),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["card_id"], ["loyalty_cards.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["staff_pin_id"], ["staff_pins.id"], ondelete="SET NULL"),
|
||||||
|
sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id")
|
||||||
|
)
|
||||||
|
op.create_index("idx_loyalty_tx_card_type", "loyalty_transactions", ["card_id", "transaction_type"], unique=False)
|
||||||
|
op.create_index("idx_loyalty_tx_type_date", "loyalty_transactions", ["transaction_type", "transaction_at"], unique=False)
|
||||||
|
op.create_index("idx_loyalty_tx_store_date", "loyalty_transactions", ["store_id", "transaction_at"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_transactions_card_id"), "loyalty_transactions", ["card_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_transactions_id"), "loyalty_transactions", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_transactions_order_reference"), "loyalty_transactions", ["order_reference"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_transactions_staff_pin_id"), "loyalty_transactions", ["staff_pin_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_transactions_transaction_at"), "loyalty_transactions", ["transaction_at"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_transactions_transaction_type"), "loyalty_transactions", ["transaction_type"], unique=False)
|
||||||
|
op.create_index(op.f("ix_loyalty_transactions_store_id"), "loyalty_transactions", ["store_id"], unique=False)
|
||||||
|
op.alter_column("admin_menu_configs", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Platform scope - applies to users/stores of this platform",
|
||||||
|
existing_comment="Platform scope - applies to all platform admins of this platform",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("admin_menu_configs", "user_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="User scope - applies to this specific super admin (admin frontend only)",
|
||||||
|
existing_comment="User scope - applies to this specific super admin",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("admin_menu_configs", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("admin_menu_configs", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.drop_index("idx_admin_menu_configs_frontend_type", table_name="admin_menu_configs")
|
||||||
|
op.drop_index("idx_admin_menu_configs_menu_item_id", table_name="admin_menu_configs")
|
||||||
|
op.drop_index("idx_admin_menu_configs_platform_id", table_name="admin_menu_configs")
|
||||||
|
op.drop_index("idx_admin_menu_configs_user_id", table_name="admin_menu_configs")
|
||||||
|
op.create_index(op.f("ix_admin_menu_configs_frontend_type"), "admin_menu_configs", ["frontend_type"], unique=False)
|
||||||
|
op.create_index(op.f("ix_admin_menu_configs_id"), "admin_menu_configs", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_admin_menu_configs_menu_item_id"), "admin_menu_configs", ["menu_item_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_admin_menu_configs_platform_id"), "admin_menu_configs", ["platform_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_admin_menu_configs_user_id"), "admin_menu_configs", ["user_id"], unique=False)
|
||||||
|
op.alter_column("admin_platforms", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("admin_platforms", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.drop_index("idx_admin_platforms_platform_id", table_name="admin_platforms")
|
||||||
|
op.drop_index("idx_admin_platforms_user_id", table_name="admin_platforms")
|
||||||
|
op.create_index(op.f("ix_admin_platforms_id"), "admin_platforms", ["id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_admin_platforms_platform_id"), "admin_platforms", ["platform_id"], unique=False)
|
||||||
|
op.create_index(op.f("ix_admin_platforms_user_id"), "admin_platforms", ["user_id"], unique=False)
|
||||||
|
op.alter_column("content_pages", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Platform this page belongs to",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("content_pages", "store_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Store this page belongs to (NULL for platform/default pages)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("content_pages", "is_platform_page",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment="True = platform marketing page (homepage, pricing); False = store default or override",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.alter_column("platform_modules", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("platform_modules", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.create_index(op.f("ix_platform_modules_id"), "platform_modules", ["id"], unique=False)
|
||||||
|
op.alter_column("platforms", "code",
|
||||||
|
existing_type=sa.VARCHAR(length=50),
|
||||||
|
comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("platforms", "name",
|
||||||
|
existing_type=sa.VARCHAR(length=100),
|
||||||
|
comment="Display name (e.g., 'Wizamart OMS')",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("platforms", "description",
|
||||||
|
existing_type=sa.TEXT(),
|
||||||
|
comment="Platform description for admin/marketing purposes",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "domain",
|
||||||
|
existing_type=sa.VARCHAR(length=255),
|
||||||
|
comment="Production domain (e.g., 'omsflow.lu', 'rewardflow.lu')",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "path_prefix",
|
||||||
|
existing_type=sa.VARCHAR(length=50),
|
||||||
|
comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "logo",
|
||||||
|
existing_type=sa.VARCHAR(length=500),
|
||||||
|
comment="Logo URL for light mode",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "logo_dark",
|
||||||
|
existing_type=sa.VARCHAR(length=500),
|
||||||
|
comment="Logo URL for dark mode",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "favicon",
|
||||||
|
existing_type=sa.VARCHAR(length=500),
|
||||||
|
comment="Favicon URL",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "theme_config",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment="Theme configuration (colors, fonts, etc.)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "default_language",
|
||||||
|
existing_type=sa.VARCHAR(length=5),
|
||||||
|
comment="Default language code (e.g., 'fr', 'en', 'de')",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("'fr'::character varying"))
|
||||||
|
op.alter_column("platforms", "supported_languages",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment="List of supported language codes",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("platforms", "is_active",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment="Whether the platform is active and accessible",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("true"))
|
||||||
|
op.alter_column("platforms", "is_public",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment="Whether the platform is visible in public listings",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("true"))
|
||||||
|
op.alter_column("platforms", "settings",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment="Platform-specific settings and feature flags",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("platforms", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.create_index(op.f("ix_platforms_id"), "platforms", ["id"], unique=False)
|
||||||
|
op.alter_column("subscription_tiers", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Platform this tier belongs to (NULL = global tier)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("subscription_tiers", "cms_pages_limit",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Total CMS pages limit (NULL = unlimited)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("subscription_tiers", "cms_custom_pages_limit",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Custom pages limit, excluding overrides (NULL = unlimited)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.drop_index("ix_subscription_tiers_code", table_name="subscription_tiers")
|
||||||
|
op.create_index(op.f("ix_subscription_tiers_code"), "subscription_tiers", ["code"], unique=False)
|
||||||
|
op.alter_column("users", "is_super_admin",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Whether this admin has access to all platforms (super admin)",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.alter_column("store_platforms", "store_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Reference to the store",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("store_platforms", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Reference to the platform",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("store_platforms", "tier_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Platform-specific subscription tier",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("store_platforms", "is_active",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment="Whether the store is active on this platform",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("true"))
|
||||||
|
op.alter_column("store_platforms", "is_primary",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment="Whether this is the store's primary platform",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.alter_column("store_platforms", "custom_subdomain",
|
||||||
|
existing_type=sa.VARCHAR(length=100),
|
||||||
|
comment="Platform-specific subdomain (if different from main subdomain)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("store_platforms", "settings",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment="Platform-specific store settings",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("store_platforms", "joined_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
comment="When the store joined this platform",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("store_platforms", "created_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("store_platforms", "updated_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
type_=sa.DateTime(),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.create_index(op.f("ix_store_platforms_id"), "store_platforms", ["id"], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f("ix_store_platforms_id"), table_name="store_platforms")
|
||||||
|
op.alter_column("store_platforms", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("store_platforms", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("store_platforms", "joined_at",
|
||||||
|
existing_type=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="When the store joined this platform",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("store_platforms", "settings",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Platform-specific store settings",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("store_platforms", "custom_subdomain",
|
||||||
|
existing_type=sa.VARCHAR(length=100),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Platform-specific subdomain (if different from main subdomain)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("store_platforms", "is_primary",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Whether this is the store's primary platform",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.alter_column("store_platforms", "is_active",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Whether the store is active on this platform",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("true"))
|
||||||
|
op.alter_column("store_platforms", "tier_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Platform-specific subscription tier",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("store_platforms", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Reference to the platform",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("store_platforms", "store_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Reference to the store",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("users", "is_super_admin",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment="Whether this admin has access to all platforms (super admin)",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.drop_index(op.f("ix_subscription_tiers_code"), table_name="subscription_tiers")
|
||||||
|
op.create_index("ix_subscription_tiers_code", "subscription_tiers", ["code"], unique=True)
|
||||||
|
op.alter_column("subscription_tiers", "cms_custom_pages_limit",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Custom pages limit, excluding overrides (NULL = unlimited)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("subscription_tiers", "cms_pages_limit",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Total CMS pages limit (NULL = unlimited)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("subscription_tiers", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Platform this tier belongs to (NULL = global tier)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.drop_index(op.f("ix_platforms_id"), table_name="platforms")
|
||||||
|
op.alter_column("platforms", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("platforms", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("platforms", "settings",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Platform-specific settings and feature flags",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "is_public",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Whether the platform is visible in public listings",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("true"))
|
||||||
|
op.alter_column("platforms", "is_active",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Whether the platform is active and accessible",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("true"))
|
||||||
|
op.alter_column("platforms", "supported_languages",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="List of supported language codes",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("platforms", "default_language",
|
||||||
|
existing_type=sa.VARCHAR(length=5),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Default language code (e.g., 'fr', 'en', 'de')",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("'fr'::character varying"))
|
||||||
|
op.alter_column("platforms", "theme_config",
|
||||||
|
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Theme configuration (colors, fonts, etc.)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "favicon",
|
||||||
|
existing_type=sa.VARCHAR(length=500),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Favicon URL",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "logo_dark",
|
||||||
|
existing_type=sa.VARCHAR(length=500),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Logo URL for dark mode",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "logo",
|
||||||
|
existing_type=sa.VARCHAR(length=500),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Logo URL for light mode",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "path_prefix",
|
||||||
|
existing_type=sa.VARCHAR(length=50),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "domain",
|
||||||
|
existing_type=sa.VARCHAR(length=255),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Production domain (e.g., 'omsflow.lu', 'rewardflow.lu')",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "description",
|
||||||
|
existing_type=sa.TEXT(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Platform description for admin/marketing purposes",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("platforms", "name",
|
||||||
|
existing_type=sa.VARCHAR(length=100),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Display name (e.g., 'Wizamart OMS')",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column("platforms", "code",
|
||||||
|
existing_type=sa.VARCHAR(length=50),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.drop_index(op.f("ix_platform_modules_id"), table_name="platform_modules")
|
||||||
|
op.alter_column("platform_modules", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("platform_modules", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("content_pages", "is_platform_page",
|
||||||
|
existing_type=sa.BOOLEAN(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="True = platform marketing page (homepage, pricing); False = store default or override",
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("false"))
|
||||||
|
op.alter_column("content_pages", "store_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Store this page belongs to (NULL for platform/default pages)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("content_pages", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment=None,
|
||||||
|
existing_comment="Platform this page belongs to",
|
||||||
|
existing_nullable=False)
|
||||||
|
op.drop_index(op.f("ix_admin_platforms_user_id"), table_name="admin_platforms")
|
||||||
|
op.drop_index(op.f("ix_admin_platforms_platform_id"), table_name="admin_platforms")
|
||||||
|
op.drop_index(op.f("ix_admin_platforms_id"), table_name="admin_platforms")
|
||||||
|
op.create_index("idx_admin_platforms_user_id", "admin_platforms", ["user_id"], unique=False)
|
||||||
|
op.create_index("idx_admin_platforms_platform_id", "admin_platforms", ["platform_id"], unique=False)
|
||||||
|
op.alter_column("admin_platforms", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("admin_platforms", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.drop_index(op.f("ix_admin_menu_configs_user_id"), table_name="admin_menu_configs")
|
||||||
|
op.drop_index(op.f("ix_admin_menu_configs_platform_id"), table_name="admin_menu_configs")
|
||||||
|
op.drop_index(op.f("ix_admin_menu_configs_menu_item_id"), table_name="admin_menu_configs")
|
||||||
|
op.drop_index(op.f("ix_admin_menu_configs_id"), table_name="admin_menu_configs")
|
||||||
|
op.drop_index(op.f("ix_admin_menu_configs_frontend_type"), table_name="admin_menu_configs")
|
||||||
|
op.create_index("idx_admin_menu_configs_user_id", "admin_menu_configs", ["user_id"], unique=False)
|
||||||
|
op.create_index("idx_admin_menu_configs_platform_id", "admin_menu_configs", ["platform_id"], unique=False)
|
||||||
|
op.create_index("idx_admin_menu_configs_menu_item_id", "admin_menu_configs", ["menu_item_id"], unique=False)
|
||||||
|
op.create_index("idx_admin_menu_configs_frontend_type", "admin_menu_configs", ["frontend_type"], unique=False)
|
||||||
|
op.alter_column("admin_menu_configs", "updated_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("admin_menu_configs", "created_at",
|
||||||
|
existing_type=sa.DateTime(),
|
||||||
|
type_=postgresql.TIMESTAMP(timezone=True),
|
||||||
|
existing_nullable=False,
|
||||||
|
existing_server_default=sa.text("now()"))
|
||||||
|
op.alter_column("admin_menu_configs", "user_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="User scope - applies to this specific super admin",
|
||||||
|
existing_comment="User scope - applies to this specific super admin (admin frontend only)",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column("admin_menu_configs", "platform_id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
comment="Platform scope - applies to all platform admins of this platform",
|
||||||
|
existing_comment="Platform scope - applies to users/stores of this platform",
|
||||||
|
existing_nullable=True)
|
||||||
|
op.drop_index(op.f("ix_loyalty_transactions_store_id"), table_name="loyalty_transactions")
|
||||||
|
op.drop_index(op.f("ix_loyalty_transactions_transaction_type"), table_name="loyalty_transactions")
|
||||||
|
op.drop_index(op.f("ix_loyalty_transactions_transaction_at"), table_name="loyalty_transactions")
|
||||||
|
op.drop_index(op.f("ix_loyalty_transactions_staff_pin_id"), table_name="loyalty_transactions")
|
||||||
|
op.drop_index(op.f("ix_loyalty_transactions_order_reference"), table_name="loyalty_transactions")
|
||||||
|
op.drop_index(op.f("ix_loyalty_transactions_id"), table_name="loyalty_transactions")
|
||||||
|
op.drop_index(op.f("ix_loyalty_transactions_card_id"), table_name="loyalty_transactions")
|
||||||
|
op.drop_index("idx_loyalty_tx_store_date", table_name="loyalty_transactions")
|
||||||
|
op.drop_index("idx_loyalty_tx_type_date", table_name="loyalty_transactions")
|
||||||
|
op.drop_index("idx_loyalty_tx_card_type", table_name="loyalty_transactions")
|
||||||
|
op.drop_table("loyalty_transactions")
|
||||||
|
op.drop_index(op.f("ix_apple_device_registrations_id"), table_name="apple_device_registrations")
|
||||||
|
op.drop_index(op.f("ix_apple_device_registrations_device_library_identifier"), table_name="apple_device_registrations")
|
||||||
|
op.drop_index(op.f("ix_apple_device_registrations_card_id"), table_name="apple_device_registrations")
|
||||||
|
op.drop_index("idx_apple_device_card", table_name="apple_device_registrations")
|
||||||
|
op.drop_table("apple_device_registrations")
|
||||||
|
op.drop_index(op.f("ix_staff_pins_store_id"), table_name="staff_pins")
|
||||||
|
op.drop_index(op.f("ix_staff_pins_staff_id"), table_name="staff_pins")
|
||||||
|
op.drop_index(op.f("ix_staff_pins_program_id"), table_name="staff_pins")
|
||||||
|
op.drop_index(op.f("ix_staff_pins_is_active"), table_name="staff_pins")
|
||||||
|
op.drop_index(op.f("ix_staff_pins_id"), table_name="staff_pins")
|
||||||
|
op.drop_index("idx_staff_pin_store_active", table_name="staff_pins")
|
||||||
|
op.drop_index("idx_staff_pin_program_active", table_name="staff_pins")
|
||||||
|
op.drop_table("staff_pins")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_store_id"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_qr_code_data"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_program_id"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_is_active"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_id"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_google_object_id"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_customer_id"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_card_number"), table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_apple_serial_number"), table_name="loyalty_cards")
|
||||||
|
op.drop_index("idx_loyalty_card_store_active", table_name="loyalty_cards")
|
||||||
|
op.drop_index("idx_loyalty_card_customer_program", table_name="loyalty_cards")
|
||||||
|
op.drop_table("loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_programs_store_id"), table_name="loyalty_programs")
|
||||||
|
op.drop_index(op.f("ix_loyalty_programs_is_active"), table_name="loyalty_programs")
|
||||||
|
op.drop_index(op.f("ix_loyalty_programs_id"), table_name="loyalty_programs")
|
||||||
|
op.drop_index("idx_loyalty_program_store_active", table_name="loyalty_programs")
|
||||||
|
op.drop_table("loyalty_programs")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,588 @@
|
|||||||
|
"""Phase 2: migrate loyalty module to merchant-based architecture
|
||||||
|
|
||||||
|
Revision ID: loyalty_003_phase2
|
||||||
|
Revises: 0fb5d6d6ff97
|
||||||
|
Create Date: 2026-02-06 20:30:00.000000
|
||||||
|
|
||||||
|
Phase 2 changes:
|
||||||
|
- loyalty_programs: store_id -> merchant_id (one program per merchant)
|
||||||
|
- loyalty_cards: add merchant_id, rename store_id -> enrolled_at_store_id
|
||||||
|
- loyalty_transactions: add merchant_id, add related_transaction_id, store_id nullable
|
||||||
|
- staff_pins: add merchant_id
|
||||||
|
- NEW TABLE: merchant_loyalty_settings
|
||||||
|
- NEW COLUMNS on loyalty_programs: points_expiration_days, welcome_bonus_points,
|
||||||
|
minimum_redemption_points, minimum_purchase_cents, tier_config
|
||||||
|
- NEW COLUMN on loyalty_cards: last_activity_at
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "loyalty_003_phase2"
|
||||||
|
down_revision: str | None = "0fb5d6d6ff97"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# =========================================================================
|
||||||
|
# 1. Create merchant_loyalty_settings table
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
"merchant_loyalty_settings",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("merchant_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"staff_pin_policy",
|
||||||
|
sa.String(length=20),
|
||||||
|
nullable=False,
|
||||||
|
server_default="required",
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"staff_pin_lockout_attempts",
|
||||||
|
sa.Integer(),
|
||||||
|
nullable=False,
|
||||||
|
server_default="5",
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"staff_pin_lockout_minutes",
|
||||||
|
sa.Integer(),
|
||||||
|
nullable=False,
|
||||||
|
server_default="30",
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"allow_self_enrollment",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("true"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"allow_void_transactions",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("true"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"allow_cross_location_redemption",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("true"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"require_order_reference",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("false"),
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"log_ip_addresses",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.text("true"),
|
||||||
|
),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["merchant_id"], ["merchants.id"], ondelete="CASCADE"
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_merchant_loyalty_settings_id"),
|
||||||
|
"merchant_loyalty_settings",
|
||||||
|
["id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_merchant_loyalty_settings_merchant_id"),
|
||||||
|
"merchant_loyalty_settings",
|
||||||
|
["merchant_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 2. Modify loyalty_programs: store_id -> merchant_id + new columns
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# Add merchant_id (nullable first for data migration)
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_programs", sa.Column("merchant_id", sa.Integer(), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Migrate existing data: derive merchant_id from store_id
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE loyalty_programs lp
|
||||||
|
SET merchant_id = v.merchant_id
|
||||||
|
FROM stores v
|
||||||
|
WHERE v.id = lp.store_id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make merchant_id non-nullable
|
||||||
|
op.alter_column("loyalty_programs", "merchant_id", nullable=False)
|
||||||
|
|
||||||
|
# Add FK and indexes
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_loyalty_programs_merchant_id",
|
||||||
|
"loyalty_programs",
|
||||||
|
"merchants",
|
||||||
|
["merchant_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_loyalty_programs_merchant_id"),
|
||||||
|
"loyalty_programs",
|
||||||
|
["merchant_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_loyalty_program_merchant_active",
|
||||||
|
"loyalty_programs",
|
||||||
|
["merchant_id", "is_active"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add new Phase 2 columns
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_programs",
|
||||||
|
sa.Column("points_expiration_days", sa.Integer(), nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_programs",
|
||||||
|
sa.Column(
|
||||||
|
"welcome_bonus_points",
|
||||||
|
sa.Integer(),
|
||||||
|
nullable=False,
|
||||||
|
server_default="0",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_programs",
|
||||||
|
sa.Column(
|
||||||
|
"minimum_redemption_points",
|
||||||
|
sa.Integer(),
|
||||||
|
nullable=False,
|
||||||
|
server_default="100",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_programs",
|
||||||
|
sa.Column(
|
||||||
|
"minimum_purchase_cents",
|
||||||
|
sa.Integer(),
|
||||||
|
nullable=False,
|
||||||
|
server_default="0",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_programs",
|
||||||
|
sa.Column("tier_config", sa.JSON(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Drop old store_id column and indexes
|
||||||
|
op.drop_index("idx_loyalty_program_store_active", table_name="loyalty_programs")
|
||||||
|
op.drop_index(
|
||||||
|
op.f("ix_loyalty_programs_store_id"), table_name="loyalty_programs"
|
||||||
|
)
|
||||||
|
op.drop_constraint(
|
||||||
|
"loyalty_programs_store_id_fkey", "loyalty_programs", type_="foreignkey"
|
||||||
|
)
|
||||||
|
op.drop_column("loyalty_programs", "store_id")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 3. Modify loyalty_cards: add merchant_id, rename store_id
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# Add merchant_id
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_cards", sa.Column("merchant_id", sa.Integer(), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Migrate data
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE loyalty_cards lc
|
||||||
|
SET merchant_id = v.merchant_id
|
||||||
|
FROM stores v
|
||||||
|
WHERE v.id = lc.store_id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
op.alter_column("loyalty_cards", "merchant_id", nullable=False)
|
||||||
|
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_loyalty_cards_merchant_id",
|
||||||
|
"loyalty_cards",
|
||||||
|
"merchants",
|
||||||
|
["merchant_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_loyalty_cards_merchant_id"),
|
||||||
|
"loyalty_cards",
|
||||||
|
["merchant_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_loyalty_card_merchant_active",
|
||||||
|
"loyalty_cards",
|
||||||
|
["merchant_id", "is_active"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_loyalty_card_merchant_customer",
|
||||||
|
"loyalty_cards",
|
||||||
|
["merchant_id", "customer_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rename store_id -> enrolled_at_store_id, make nullable, change FK
|
||||||
|
op.drop_index("idx_loyalty_card_store_active", table_name="loyalty_cards")
|
||||||
|
op.drop_index(op.f("ix_loyalty_cards_store_id"), table_name="loyalty_cards")
|
||||||
|
op.drop_constraint(
|
||||||
|
"loyalty_cards_store_id_fkey", "loyalty_cards", type_="foreignkey"
|
||||||
|
)
|
||||||
|
op.alter_column(
|
||||||
|
"loyalty_cards",
|
||||||
|
"store_id",
|
||||||
|
new_column_name="enrolled_at_store_id",
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_loyalty_cards_enrolled_store",
|
||||||
|
"loyalty_cards",
|
||||||
|
"stores",
|
||||||
|
["enrolled_at_store_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_loyalty_cards_enrolled_at_store_id"),
|
||||||
|
"loyalty_cards",
|
||||||
|
["enrolled_at_store_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add last_activity_at
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_cards",
|
||||||
|
sa.Column("last_activity_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 4. Modify loyalty_transactions: add merchant_id, related_transaction_id
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
# Add merchant_id
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_transactions",
|
||||||
|
sa.Column("merchant_id", sa.Integer(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Migrate data (from card's merchant)
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE loyalty_transactions lt
|
||||||
|
SET merchant_id = lc.merchant_id
|
||||||
|
FROM loyalty_cards lc
|
||||||
|
WHERE lc.id = lt.card_id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
op.alter_column("loyalty_transactions", "merchant_id", nullable=False)
|
||||||
|
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_loyalty_transactions_merchant_id",
|
||||||
|
"loyalty_transactions",
|
||||||
|
"merchants",
|
||||||
|
["merchant_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_loyalty_transactions_merchant_id"),
|
||||||
|
"loyalty_transactions",
|
||||||
|
["merchant_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_loyalty_tx_merchant_date",
|
||||||
|
"loyalty_transactions",
|
||||||
|
["merchant_id", "transaction_at"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_loyalty_tx_merchant_store",
|
||||||
|
"loyalty_transactions",
|
||||||
|
["merchant_id", "store_id"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make store_id nullable and change FK to SET NULL
|
||||||
|
op.drop_constraint(
|
||||||
|
"loyalty_transactions_store_id_fkey",
|
||||||
|
"loyalty_transactions",
|
||||||
|
type_="foreignkey",
|
||||||
|
)
|
||||||
|
op.alter_column("loyalty_transactions", "store_id", nullable=True)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_loyalty_transactions_store_id",
|
||||||
|
"loyalty_transactions",
|
||||||
|
"stores",
|
||||||
|
["store_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add related_transaction_id (for void linkage)
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_transactions",
|
||||||
|
sa.Column("related_transaction_id", sa.Integer(), nullable=True),
|
||||||
|
)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_loyalty_tx_related",
|
||||||
|
"loyalty_transactions",
|
||||||
|
"loyalty_transactions",
|
||||||
|
["related_transaction_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_loyalty_transactions_related_transaction_id"),
|
||||||
|
"loyalty_transactions",
|
||||||
|
["related_transaction_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 5. Modify staff_pins: add merchant_id
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
op.add_column(
|
||||||
|
"staff_pins", sa.Column("merchant_id", sa.Integer(), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Migrate data (from store's merchant)
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE staff_pins sp
|
||||||
|
SET merchant_id = v.merchant_id
|
||||||
|
FROM stores v
|
||||||
|
WHERE v.id = sp.store_id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
op.alter_column("staff_pins", "merchant_id", nullable=False)
|
||||||
|
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_staff_pins_merchant_id",
|
||||||
|
"staff_pins",
|
||||||
|
"merchants",
|
||||||
|
["merchant_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_staff_pins_merchant_id"),
|
||||||
|
"staff_pins",
|
||||||
|
["merchant_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_staff_pin_merchant_active",
|
||||||
|
"staff_pins",
|
||||||
|
["merchant_id", "is_active"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _constraint_exists(table: str, constraint: str) -> bool:
|
||||||
|
"""Check if a constraint exists on a table."""
|
||||||
|
conn = op.get_bind()
|
||||||
|
result = conn.execute(text(
|
||||||
|
"SELECT 1 FROM pg_constraint c "
|
||||||
|
"JOIN pg_class r ON c.conrelid = r.oid "
|
||||||
|
"JOIN pg_namespace n ON r.relnamespace = n.oid "
|
||||||
|
"WHERE n.nspname = 'public' AND r.relname = :t AND c.conname = :c"
|
||||||
|
), {"t": table, "c": constraint})
|
||||||
|
return result.fetchone() is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _col_exists(table: str, col: str) -> bool:
|
||||||
|
"""Check if column exists."""
|
||||||
|
conn = op.get_bind()
|
||||||
|
result = conn.execute(text(
|
||||||
|
"SELECT 1 FROM information_schema.columns "
|
||||||
|
"WHERE table_schema='public' AND table_name=:t AND column_name=:c"
|
||||||
|
), {"t": table, "c": col})
|
||||||
|
return result.fetchone() is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _index_exists(index: str) -> bool:
|
||||||
|
"""Check if an index exists."""
|
||||||
|
conn = op.get_bind()
|
||||||
|
result = conn.execute(text(
|
||||||
|
"SELECT 1 FROM pg_indexes WHERE schemaname='public' AND indexname=:i"
|
||||||
|
), {"i": index})
|
||||||
|
return result.fetchone() is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _table_exists(table: str) -> bool:
|
||||||
|
"""Check if table exists."""
|
||||||
|
conn = op.get_bind()
|
||||||
|
result = conn.execute(text(
|
||||||
|
"SELECT 1 FROM information_schema.tables "
|
||||||
|
"WHERE table_schema='public' AND table_name=:t"
|
||||||
|
), {"t": table})
|
||||||
|
return result.fetchone() is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_drop_index(name: str, table: str) -> None:
|
||||||
|
if _index_exists(name):
|
||||||
|
op.drop_index(name, table_name=table)
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_drop_constraint(name: str, table: str, type_: str = "foreignkey") -> None:
|
||||||
|
if _constraint_exists(table, name):
|
||||||
|
op.drop_constraint(name, table, type_=type_)
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_drop_column(table: str, col: str) -> None:
|
||||||
|
if _col_exists(table, col):
|
||||||
|
op.drop_column(table, col)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# t001 may have renamed stores→vendors and merchants→companies before us.
|
||||||
|
# Detect the correct table name for FK references.
|
||||||
|
stores_ref = "stores" if _table_exists("stores") else "vendors"
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 5. Revert staff_pins
|
||||||
|
# =========================================================================
|
||||||
|
_safe_drop_index("idx_staff_pin_merchant_active", "staff_pins")
|
||||||
|
_safe_drop_index("ix_staff_pins_merchant_id", "staff_pins")
|
||||||
|
_safe_drop_constraint("fk_staff_pins_merchant_id", "staff_pins")
|
||||||
|
_safe_drop_column("staff_pins", "merchant_id")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 4. Revert loyalty_transactions
|
||||||
|
# =========================================================================
|
||||||
|
_safe_drop_index("ix_loyalty_transactions_related_transaction_id", "loyalty_transactions")
|
||||||
|
_safe_drop_constraint("fk_loyalty_tx_related", "loyalty_transactions")
|
||||||
|
_safe_drop_column("loyalty_transactions", "related_transaction_id")
|
||||||
|
|
||||||
|
_safe_drop_constraint("fk_loyalty_transactions_store_id", "loyalty_transactions")
|
||||||
|
if _col_exists("loyalty_transactions", "store_id"):
|
||||||
|
op.alter_column("loyalty_transactions", "store_id", nullable=False)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"loyalty_transactions_store_id_fkey",
|
||||||
|
"loyalty_transactions",
|
||||||
|
stores_ref,
|
||||||
|
["store_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
)
|
||||||
|
|
||||||
|
_safe_drop_index("idx_loyalty_tx_merchant_store", "loyalty_transactions")
|
||||||
|
_safe_drop_index("idx_loyalty_tx_merchant_date", "loyalty_transactions")
|
||||||
|
_safe_drop_index("ix_loyalty_transactions_merchant_id", "loyalty_transactions")
|
||||||
|
_safe_drop_constraint("fk_loyalty_transactions_merchant_id", "loyalty_transactions")
|
||||||
|
_safe_drop_column("loyalty_transactions", "merchant_id")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 3. Revert loyalty_cards
|
||||||
|
# =========================================================================
|
||||||
|
_safe_drop_column("loyalty_cards", "last_activity_at")
|
||||||
|
|
||||||
|
# Rename enrolled_at_store_id back to store_id
|
||||||
|
enrolled_col = "enrolled_at_store_id"
|
||||||
|
if _col_exists("loyalty_cards", enrolled_col):
|
||||||
|
_safe_drop_index("ix_loyalty_cards_enrolled_at_store_id", "loyalty_cards")
|
||||||
|
_safe_drop_constraint("fk_loyalty_cards_enrolled_store", "loyalty_cards")
|
||||||
|
op.alter_column(
|
||||||
|
"loyalty_cards",
|
||||||
|
enrolled_col,
|
||||||
|
new_column_name="store_id",
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"loyalty_cards_store_id_fkey",
|
||||||
|
"loyalty_cards",
|
||||||
|
stores_ref,
|
||||||
|
["store_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_loyalty_cards_store_id"),
|
||||||
|
"loyalty_cards",
|
||||||
|
["store_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_loyalty_card_store_active",
|
||||||
|
"loyalty_cards",
|
||||||
|
["store_id", "is_active"],
|
||||||
|
)
|
||||||
|
|
||||||
|
_safe_drop_index("idx_loyalty_card_merchant_customer", "loyalty_cards")
|
||||||
|
_safe_drop_index("idx_loyalty_card_merchant_active", "loyalty_cards")
|
||||||
|
_safe_drop_index("ix_loyalty_cards_merchant_id", "loyalty_cards")
|
||||||
|
_safe_drop_constraint("fk_loyalty_cards_merchant_id", "loyalty_cards")
|
||||||
|
_safe_drop_column("loyalty_cards", "merchant_id")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 2. Revert loyalty_programs
|
||||||
|
# =========================================================================
|
||||||
|
if not _col_exists("loyalty_programs", "store_id"):
|
||||||
|
op.add_column(
|
||||||
|
"loyalty_programs",
|
||||||
|
sa.Column("store_id", sa.Integer(), nullable=True),
|
||||||
|
)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"loyalty_programs_store_id_fkey",
|
||||||
|
"loyalty_programs",
|
||||||
|
stores_ref,
|
||||||
|
["store_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f("ix_loyalty_programs_store_id"),
|
||||||
|
"loyalty_programs",
|
||||||
|
["store_id"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_loyalty_program_store_active",
|
||||||
|
"loyalty_programs",
|
||||||
|
["store_id", "is_active"],
|
||||||
|
)
|
||||||
|
|
||||||
|
_safe_drop_column("loyalty_programs", "tier_config")
|
||||||
|
_safe_drop_column("loyalty_programs", "minimum_purchase_cents")
|
||||||
|
_safe_drop_column("loyalty_programs", "minimum_redemption_points")
|
||||||
|
_safe_drop_column("loyalty_programs", "welcome_bonus_points")
|
||||||
|
_safe_drop_column("loyalty_programs", "points_expiration_days")
|
||||||
|
|
||||||
|
_safe_drop_index("idx_loyalty_program_merchant_active", "loyalty_programs")
|
||||||
|
_safe_drop_index("ix_loyalty_programs_merchant_id", "loyalty_programs")
|
||||||
|
_safe_drop_constraint("fk_loyalty_programs_merchant_id", "loyalty_programs")
|
||||||
|
_safe_drop_column("loyalty_programs", "merchant_id")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# 1. Drop merchant_loyalty_settings table
|
||||||
|
# =========================================================================
|
||||||
|
# t001 may have renamed this to company_loyalty_settings
|
||||||
|
settings_table = "merchant_loyalty_settings" if _table_exists("merchant_loyalty_settings") else "company_loyalty_settings"
|
||||||
|
if _table_exists(settings_table):
|
||||||
|
_safe_drop_index("ix_merchant_loyalty_settings_merchant_id", settings_table)
|
||||||
|
_safe_drop_index("ix_merchant_loyalty_settings_id", settings_table)
|
||||||
|
# Also check for indexes under the renamed table name
|
||||||
|
_safe_drop_index("ix_company_loyalty_settings_merchant_id", settings_table)
|
||||||
|
_safe_drop_index("ix_company_loyalty_settings_id", settings_table)
|
||||||
|
op.drop_table(settings_table)
|
||||||
293
alembic/versions_backup/n2c3d4e5f6a7_add_features_table.py
Normal file
293
alembic/versions_backup/n2c3d4e5f6a7_add_features_table.py
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
"""add features table and seed data
|
||||||
|
|
||||||
|
Revision ID: n2c3d4e5f6a7
|
||||||
|
Revises: ba2c0ce78396
|
||||||
|
Create Date: 2025-12-31 10:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "n2c3d4e5f6a7"
|
||||||
|
down_revision: str | None = "ba2c0ce78396"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Feature Definitions
|
||||||
|
# ============================================================================
|
||||||
|
# category, code, name, description, ui_location, ui_icon, ui_route, display_order
|
||||||
|
FEATURES = [
|
||||||
|
# Orders (category: orders)
|
||||||
|
("orders", "order_management", "Order Management", "View and manage orders", "sidebar", "clipboard-list", "/vendor/{code}/orders", 1),
|
||||||
|
("orders", "order_bulk_actions", "Bulk Order Actions", "Process multiple orders at once", "inline", None, None, 2),
|
||||||
|
("orders", "order_export", "Order Export", "Export orders to CSV/Excel", "inline", "download", None, 3),
|
||||||
|
("orders", "automation_rules", "Automation Rules", "Automatic order processing rules", "sidebar", "cog", "/vendor/{code}/automation", 4),
|
||||||
|
|
||||||
|
# Inventory (category: inventory)
|
||||||
|
("inventory", "inventory_basic", "Basic Inventory", "Track product stock levels", "sidebar", "cube", "/vendor/{code}/inventory", 1),
|
||||||
|
("inventory", "inventory_locations", "Warehouse Locations", "Manage multiple warehouse locations", "inline", "map-pin", None, 2),
|
||||||
|
("inventory", "inventory_purchase_orders", "Purchase Orders", "Create and manage purchase orders", "sidebar", "shopping-cart", "/vendor/{code}/purchase-orders", 3),
|
||||||
|
("inventory", "low_stock_alerts", "Low Stock Alerts", "Get notified when stock is low", "inline", "bell", None, 4),
|
||||||
|
|
||||||
|
# Analytics (category: analytics)
|
||||||
|
("analytics", "basic_reports", "Basic Reports", "Essential sales and order reports", "sidebar", "chart-pie", "/vendor/{code}/reports", 1),
|
||||||
|
("analytics", "analytics_dashboard", "Analytics Dashboard", "Advanced analytics with charts and trends", "sidebar", "chart-bar", "/vendor/{code}/analytics", 2),
|
||||||
|
("analytics", "custom_reports", "Custom Reports", "Build custom report configurations", "inline", "document-report", None, 3),
|
||||||
|
("analytics", "export_reports", "Export Reports", "Export reports to various formats", "inline", "download", None, 4),
|
||||||
|
|
||||||
|
# Invoicing (category: invoicing)
|
||||||
|
("invoicing", "invoice_lu", "Luxembourg Invoicing", "Generate compliant Luxembourg invoices", "sidebar", "document-text", "/vendor/{code}/invoices", 1),
|
||||||
|
("invoicing", "invoice_eu_vat", "EU VAT Support", "Handle EU VAT for cross-border sales", "inline", "globe", None, 2),
|
||||||
|
("invoicing", "invoice_bulk", "Bulk Invoicing", "Generate invoices in bulk", "inline", "document-duplicate", None, 3),
|
||||||
|
("invoicing", "accounting_export", "Accounting Export", "Export to accounting software formats", "inline", "calculator", None, 4),
|
||||||
|
|
||||||
|
# Integrations (category: integrations)
|
||||||
|
("integrations", "letzshop_sync", "Letzshop Sync", "Sync orders and products with Letzshop", "settings", "refresh", None, 1),
|
||||||
|
("integrations", "api_access", "API Access", "REST API access for custom integrations", "settings", "code", "/vendor/{code}/settings/api", 2),
|
||||||
|
("integrations", "webhooks", "Webhooks", "Receive real-time event notifications", "settings", "lightning-bolt", "/vendor/{code}/settings/webhooks", 3),
|
||||||
|
("integrations", "custom_integrations", "Custom Integrations", "Connect with any third-party service", "settings", "puzzle", None, 4),
|
||||||
|
|
||||||
|
# Team (category: team)
|
||||||
|
("team", "single_user", "Single User", "One user account", "api", None, None, 1),
|
||||||
|
("team", "team_basic", "Team Access", "Invite team members", "sidebar", "users", "/vendor/{code}/team", 2),
|
||||||
|
("team", "team_roles", "Team Roles", "Role-based permissions for team members", "inline", "shield-check", None, 3),
|
||||||
|
("team", "audit_log", "Audit Log", "Track all user actions", "sidebar", "clipboard-check", "/vendor/{code}/audit-log", 4),
|
||||||
|
|
||||||
|
# Branding (category: branding)
|
||||||
|
("branding", "basic_shop", "Basic Shop", "Your shop on the platform", "api", None, None, 1),
|
||||||
|
("branding", "custom_domain", "Custom Domain", "Use your own domain name", "settings", "globe-alt", None, 2),
|
||||||
|
("branding", "white_label", "White Label", "Remove platform branding entirely", "settings", "color-swatch", None, 3),
|
||||||
|
|
||||||
|
# Customers (category: customers)
|
||||||
|
("customers", "customer_view", "Customer View", "View customer information", "sidebar", "user-group", "/vendor/{code}/customers", 1),
|
||||||
|
("customers", "customer_export", "Customer Export", "Export customer data", "inline", "download", None, 2),
|
||||||
|
("customers", "customer_messaging", "Customer Messaging", "Send messages to customers", "inline", "chat", None, 3),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Tier Feature Assignments
|
||||||
|
# ============================================================================
|
||||||
|
# tier_code -> list of feature codes
|
||||||
|
TIER_FEATURES = {
|
||||||
|
"essential": [
|
||||||
|
"order_management",
|
||||||
|
"inventory_basic",
|
||||||
|
"basic_reports",
|
||||||
|
"invoice_lu",
|
||||||
|
"letzshop_sync",
|
||||||
|
"single_user",
|
||||||
|
"basic_shop",
|
||||||
|
"customer_view",
|
||||||
|
],
|
||||||
|
"professional": [
|
||||||
|
# All Essential features
|
||||||
|
"order_management",
|
||||||
|
"order_bulk_actions",
|
||||||
|
"order_export",
|
||||||
|
"inventory_basic",
|
||||||
|
"inventory_locations",
|
||||||
|
"inventory_purchase_orders",
|
||||||
|
"low_stock_alerts",
|
||||||
|
"basic_reports",
|
||||||
|
"invoice_lu",
|
||||||
|
"invoice_eu_vat",
|
||||||
|
"letzshop_sync",
|
||||||
|
"team_basic",
|
||||||
|
"basic_shop",
|
||||||
|
"customer_view",
|
||||||
|
"customer_export",
|
||||||
|
],
|
||||||
|
"business": [
|
||||||
|
# All Professional features
|
||||||
|
"order_management",
|
||||||
|
"order_bulk_actions",
|
||||||
|
"order_export",
|
||||||
|
"automation_rules",
|
||||||
|
"inventory_basic",
|
||||||
|
"inventory_locations",
|
||||||
|
"inventory_purchase_orders",
|
||||||
|
"low_stock_alerts",
|
||||||
|
"basic_reports",
|
||||||
|
"analytics_dashboard",
|
||||||
|
"custom_reports",
|
||||||
|
"export_reports",
|
||||||
|
"invoice_lu",
|
||||||
|
"invoice_eu_vat",
|
||||||
|
"invoice_bulk",
|
||||||
|
"accounting_export",
|
||||||
|
"letzshop_sync",
|
||||||
|
"api_access",
|
||||||
|
"webhooks",
|
||||||
|
"team_basic",
|
||||||
|
"team_roles",
|
||||||
|
"audit_log",
|
||||||
|
"basic_shop",
|
||||||
|
"custom_domain",
|
||||||
|
"customer_view",
|
||||||
|
"customer_export",
|
||||||
|
"customer_messaging",
|
||||||
|
],
|
||||||
|
"enterprise": [
|
||||||
|
# All features
|
||||||
|
"order_management",
|
||||||
|
"order_bulk_actions",
|
||||||
|
"order_export",
|
||||||
|
"automation_rules",
|
||||||
|
"inventory_basic",
|
||||||
|
"inventory_locations",
|
||||||
|
"inventory_purchase_orders",
|
||||||
|
"low_stock_alerts",
|
||||||
|
"basic_reports",
|
||||||
|
"analytics_dashboard",
|
||||||
|
"custom_reports",
|
||||||
|
"export_reports",
|
||||||
|
"invoice_lu",
|
||||||
|
"invoice_eu_vat",
|
||||||
|
"invoice_bulk",
|
||||||
|
"accounting_export",
|
||||||
|
"letzshop_sync",
|
||||||
|
"api_access",
|
||||||
|
"webhooks",
|
||||||
|
"custom_integrations",
|
||||||
|
"team_basic",
|
||||||
|
"team_roles",
|
||||||
|
"audit_log",
|
||||||
|
"basic_shop",
|
||||||
|
"custom_domain",
|
||||||
|
"white_label",
|
||||||
|
"customer_view",
|
||||||
|
"customer_export",
|
||||||
|
"customer_messaging",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Minimum tier for each feature (for upgrade prompts)
|
||||||
|
# Maps feature_code -> tier_code
|
||||||
|
MINIMUM_TIER = {
|
||||||
|
# Essential
|
||||||
|
"order_management": "essential",
|
||||||
|
"inventory_basic": "essential",
|
||||||
|
"basic_reports": "essential",
|
||||||
|
"invoice_lu": "essential",
|
||||||
|
"letzshop_sync": "essential",
|
||||||
|
"single_user": "essential",
|
||||||
|
"basic_shop": "essential",
|
||||||
|
"customer_view": "essential",
|
||||||
|
# Professional
|
||||||
|
"order_bulk_actions": "professional",
|
||||||
|
"order_export": "professional",
|
||||||
|
"inventory_locations": "professional",
|
||||||
|
"inventory_purchase_orders": "professional",
|
||||||
|
"low_stock_alerts": "professional",
|
||||||
|
"invoice_eu_vat": "professional",
|
||||||
|
"team_basic": "professional",
|
||||||
|
"customer_export": "professional",
|
||||||
|
# Business
|
||||||
|
"automation_rules": "business",
|
||||||
|
"analytics_dashboard": "business",
|
||||||
|
"custom_reports": "business",
|
||||||
|
"export_reports": "business",
|
||||||
|
"invoice_bulk": "business",
|
||||||
|
"accounting_export": "business",
|
||||||
|
"api_access": "business",
|
||||||
|
"webhooks": "business",
|
||||||
|
"team_roles": "business",
|
||||||
|
"audit_log": "business",
|
||||||
|
"custom_domain": "business",
|
||||||
|
"customer_messaging": "business",
|
||||||
|
# Enterprise
|
||||||
|
"custom_integrations": "enterprise",
|
||||||
|
"white_label": "enterprise",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create features table
|
||||||
|
op.create_table(
|
||||||
|
"features",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("code", sa.String(50), nullable=False),
|
||||||
|
sa.Column("name", sa.String(100), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("category", sa.String(50), nullable=False),
|
||||||
|
sa.Column("ui_location", sa.String(50), nullable=True),
|
||||||
|
sa.Column("ui_icon", sa.String(50), nullable=True),
|
||||||
|
sa.Column("ui_route", sa.String(100), nullable=True),
|
||||||
|
sa.Column("ui_badge_text", sa.String(20), nullable=True),
|
||||||
|
sa.Column("minimum_tier_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("is_active", sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column("is_visible", sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column("display_order", sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["minimum_tier_id"], ["subscription_tiers.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_features_code", "features", ["code"], unique=True)
|
||||||
|
op.create_index("ix_features_category", "features", ["category"], unique=False)
|
||||||
|
op.create_index("idx_feature_category_order", "features", ["category", "display_order"])
|
||||||
|
op.create_index("idx_feature_active_visible", "features", ["is_active", "is_visible"])
|
||||||
|
|
||||||
|
# Get connection for data operations
|
||||||
|
conn = op.get_bind()
|
||||||
|
|
||||||
|
# Get tier IDs
|
||||||
|
tier_ids = {}
|
||||||
|
result = conn.execute(sa.text("SELECT id, code FROM subscription_tiers"))
|
||||||
|
for row in result:
|
||||||
|
tier_ids[row[1]] = row[0]
|
||||||
|
|
||||||
|
# Insert features
|
||||||
|
sa.func.now()
|
||||||
|
for category, code, name, description, ui_location, ui_icon, ui_route, display_order in FEATURES:
|
||||||
|
minimum_tier_code = MINIMUM_TIER.get(code)
|
||||||
|
minimum_tier_id = tier_ids.get(minimum_tier_code) if minimum_tier_code else None
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
sa.text("""
|
||||||
|
INSERT INTO features (code, name, description, category, ui_location, ui_icon, ui_route,
|
||||||
|
minimum_tier_id, is_active, is_visible, display_order, created_at, updated_at)
|
||||||
|
VALUES (:code, :name, :description, :category, :ui_location, :ui_icon, :ui_route,
|
||||||
|
:minimum_tier_id, true, true, :display_order, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
||||||
|
"""),
|
||||||
|
{
|
||||||
|
"code": code,
|
||||||
|
"name": name,
|
||||||
|
"description": description,
|
||||||
|
"category": category,
|
||||||
|
"ui_location": ui_location,
|
||||||
|
"ui_icon": ui_icon,
|
||||||
|
"ui_route": ui_route,
|
||||||
|
"minimum_tier_id": minimum_tier_id,
|
||||||
|
"display_order": display_order,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update subscription_tiers with feature arrays
|
||||||
|
for tier_code, features in TIER_FEATURES.items():
|
||||||
|
features_json = json.dumps(features)
|
||||||
|
conn.execute(
|
||||||
|
sa.text("UPDATE subscription_tiers SET features = :features WHERE code = :code"),
|
||||||
|
{"features": features_json, "code": tier_code},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Clear features from subscription_tiers
|
||||||
|
conn = op.get_bind()
|
||||||
|
conn.execute(sa.text("UPDATE subscription_tiers SET features = '[]'"))
|
||||||
|
|
||||||
|
# Drop features table
|
||||||
|
op.drop_index("idx_feature_active_visible", table_name="features")
|
||||||
|
op.drop_index("idx_feature_category_order", table_name="features")
|
||||||
|
op.drop_index("ix_features_category", table_name="features")
|
||||||
|
op.drop_index("ix_features_code", table_name="features")
|
||||||
|
op.drop_table("features")
|
||||||
@@ -0,0 +1,145 @@
|
|||||||
|
"""Add inventory_transactions table
|
||||||
|
|
||||||
|
Revision ID: o3c4d5e6f7a8
|
||||||
|
Revises: n2c3d4e5f6a7
|
||||||
|
Create Date: 2026-01-01
|
||||||
|
|
||||||
|
Adds an audit trail for inventory movements:
|
||||||
|
- Track all stock changes (reserve, fulfill, release, adjust, set)
|
||||||
|
- Link transactions to orders for traceability
|
||||||
|
- Store quantity snapshots for historical analysis
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "o3c4d5e6f7a8"
|
||||||
|
down_revision = "n2c3d4e5f6a7"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create transaction type enum
|
||||||
|
transaction_type_enum = sa.Enum(
|
||||||
|
"reserve",
|
||||||
|
"fulfill",
|
||||||
|
"release",
|
||||||
|
"adjust",
|
||||||
|
"set",
|
||||||
|
"import",
|
||||||
|
"return",
|
||||||
|
name="transactiontype",
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"inventory_transactions",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("vendor_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("inventory_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("transaction_type", transaction_type_enum, nullable=False),
|
||||||
|
sa.Column("quantity_change", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("quantity_after", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("reserved_after", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("location", sa.String(), nullable=True),
|
||||||
|
sa.Column("warehouse", sa.String(), nullable=True),
|
||||||
|
sa.Column("order_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("order_number", sa.String(), nullable=True),
|
||||||
|
sa.Column("reason", sa.Text(), nullable=True),
|
||||||
|
sa.Column("created_by", sa.String(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"created_at",
|
||||||
|
sa.DateTime(timezone=True),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.func.now(),
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["product_id"], ["products.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["inventory_id"], ["inventory.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["order_id"], ["orders.id"]),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index(
|
||||||
|
"ix_inventory_transactions_id",
|
||||||
|
"inventory_transactions",
|
||||||
|
["id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_inventory_transactions_vendor_id",
|
||||||
|
"inventory_transactions",
|
||||||
|
["vendor_id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_inventory_transactions_product_id",
|
||||||
|
"inventory_transactions",
|
||||||
|
["product_id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_inventory_transactions_inventory_id",
|
||||||
|
"inventory_transactions",
|
||||||
|
["inventory_id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_inventory_transactions_transaction_type",
|
||||||
|
"inventory_transactions",
|
||||||
|
["transaction_type"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_inventory_transactions_order_id",
|
||||||
|
"inventory_transactions",
|
||||||
|
["order_id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_inventory_transactions_created_at",
|
||||||
|
"inventory_transactions",
|
||||||
|
["created_at"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_inv_tx_vendor_product",
|
||||||
|
"inventory_transactions",
|
||||||
|
["vendor_id", "product_id"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_inv_tx_vendor_created",
|
||||||
|
"inventory_transactions",
|
||||||
|
["vendor_id", "created_at"],
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_inv_tx_type_created",
|
||||||
|
"inventory_transactions",
|
||||||
|
["transaction_type", "created_at"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("idx_inv_tx_type_created", table_name="inventory_transactions")
|
||||||
|
op.drop_index("idx_inv_tx_vendor_created", table_name="inventory_transactions")
|
||||||
|
op.drop_index("idx_inv_tx_vendor_product", table_name="inventory_transactions")
|
||||||
|
op.drop_index(
|
||||||
|
"ix_inventory_transactions_created_at", table_name="inventory_transactions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
"ix_inventory_transactions_order_id", table_name="inventory_transactions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
"ix_inventory_transactions_transaction_type", table_name="inventory_transactions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
"ix_inventory_transactions_inventory_id", table_name="inventory_transactions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
"ix_inventory_transactions_product_id", table_name="inventory_transactions"
|
||||||
|
)
|
||||||
|
op.drop_index(
|
||||||
|
"ix_inventory_transactions_vendor_id", table_name="inventory_transactions"
|
||||||
|
)
|
||||||
|
op.drop_index("ix_inventory_transactions_id", table_name="inventory_transactions")
|
||||||
|
op.drop_table("inventory_transactions")
|
||||||
|
|
||||||
|
# Drop enum
|
||||||
|
sa.Enum(name="transactiontype").drop(op.get_bind(), checkfirst=True)
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
# alembic/versions/p4d5e6f7a8b9_add_shipped_quantity_to_order_items.py
|
||||||
|
"""Add shipped_quantity to order_items for partial shipments.
|
||||||
|
|
||||||
|
Revision ID: p4d5e6f7a8b9
|
||||||
|
Revises: o3c4d5e6f7a8
|
||||||
|
Create Date: 2026-01-01 12:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "p4d5e6f7a8b9"
|
||||||
|
down_revision: str | None = "o3c4d5e6f7a8"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add shipped_quantity column to order_items
|
||||||
|
op.add_column(
|
||||||
|
"order_items",
|
||||||
|
sa.Column("shipped_quantity", sa.Integer(), nullable=False, server_default="0")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set shipped_quantity = quantity for already fulfilled items
|
||||||
|
# This handles existing data where inventory_fulfilled is True
|
||||||
|
op.execute("""
|
||||||
|
UPDATE order_items
|
||||||
|
SET shipped_quantity = quantity
|
||||||
|
WHERE inventory_fulfilled = true
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("order_items", "shipped_quantity")
|
||||||
@@ -0,0 +1,72 @@
|
|||||||
|
# alembic/versions/q5e6f7a8b9c0_add_vat_fields_to_orders.py
|
||||||
|
"""Add VAT fields to orders table.
|
||||||
|
|
||||||
|
Adds vat_regime, vat_rate, vat_rate_label, and vat_destination_country
|
||||||
|
to enable proper VAT tracking at order creation time, aligned with
|
||||||
|
invoice VAT logic.
|
||||||
|
|
||||||
|
Revision ID: q5e6f7a8b9c0
|
||||||
|
Revises: p4d5e6f7a8b9
|
||||||
|
Create Date: 2026-01-02 10:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "q5e6f7a8b9c0"
|
||||||
|
down_revision: str | None = "p4d5e6f7a8b9"
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add VAT regime (domestic, oss, reverse_charge, origin, exempt)
|
||||||
|
op.add_column(
|
||||||
|
"orders",
|
||||||
|
sa.Column("vat_regime", sa.String(20), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add VAT rate as percentage (e.g., 17.00 for 17%)
|
||||||
|
op.add_column(
|
||||||
|
"orders",
|
||||||
|
sa.Column("vat_rate", sa.Numeric(5, 2), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add human-readable VAT label (e.g., "Luxembourg VAT 17%")
|
||||||
|
op.add_column(
|
||||||
|
"orders",
|
||||||
|
sa.Column("vat_rate_label", sa.String(100), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add destination country for cross-border sales (ISO code)
|
||||||
|
op.add_column(
|
||||||
|
"orders",
|
||||||
|
sa.Column("vat_destination_country", sa.String(2), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Populate VAT fields for existing orders based on shipping country
|
||||||
|
# Default to 'domestic' for LU orders and 'origin' for other EU orders
|
||||||
|
op.execute("""
|
||||||
|
UPDATE orders
|
||||||
|
SET vat_regime = CASE
|
||||||
|
WHEN ship_country_iso = 'LU' THEN 'domestic'
|
||||||
|
WHEN ship_country_iso IN ('AT', 'BE', 'BG', 'HR', 'CY', 'CZ', 'DK', 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IE', 'IT', 'LV', 'LT', 'MT', 'NL', 'PL', 'PT', 'RO', 'SK', 'SI', 'ES', 'SE') THEN 'origin'
|
||||||
|
ELSE 'exempt'
|
||||||
|
END,
|
||||||
|
vat_destination_country = CASE
|
||||||
|
WHEN ship_country_iso != 'LU' AND ship_country_iso IN ('AT', 'BE', 'BG', 'HR', 'CY', 'CZ', 'DK', 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IE', 'IT', 'LV', 'LT', 'MT', 'NL', 'PL', 'PT', 'RO', 'SK', 'SI', 'ES', 'SE') THEN ship_country_iso
|
||||||
|
ELSE NULL
|
||||||
|
END
|
||||||
|
WHERE vat_regime IS NULL
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("orders", "vat_destination_country")
|
||||||
|
op.drop_column("orders", "vat_rate_label")
|
||||||
|
op.drop_column("orders", "vat_rate")
|
||||||
|
op.drop_column("orders", "vat_regime")
|
||||||
@@ -0,0 +1,144 @@
|
|||||||
|
"""Add country_iso to customer_addresses
|
||||||
|
|
||||||
|
Revision ID: r6f7a8b9c0d1
|
||||||
|
Revises: q5e6f7a8b9c0
|
||||||
|
Create Date: 2026-01-02
|
||||||
|
|
||||||
|
Adds country_iso field to customer_addresses table and renames
|
||||||
|
country to country_name for clarity.
|
||||||
|
|
||||||
|
This migration is idempotent - it checks for existing columns before
|
||||||
|
making changes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "r6f7a8b9c0d1"
|
||||||
|
down_revision = "q5e6f7a8b9c0"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
# Country name to ISO code mapping for backfill
|
||||||
|
COUNTRY_ISO_MAP = {
|
||||||
|
"Luxembourg": "LU",
|
||||||
|
"Germany": "DE",
|
||||||
|
"France": "FR",
|
||||||
|
"Belgium": "BE",
|
||||||
|
"Netherlands": "NL",
|
||||||
|
"Austria": "AT",
|
||||||
|
"Italy": "IT",
|
||||||
|
"Spain": "ES",
|
||||||
|
"Portugal": "PT",
|
||||||
|
"Poland": "PL",
|
||||||
|
"Czech Republic": "CZ",
|
||||||
|
"Czechia": "CZ",
|
||||||
|
"Slovakia": "SK",
|
||||||
|
"Hungary": "HU",
|
||||||
|
"Romania": "RO",
|
||||||
|
"Bulgaria": "BG",
|
||||||
|
"Greece": "GR",
|
||||||
|
"Croatia": "HR",
|
||||||
|
"Slovenia": "SI",
|
||||||
|
"Estonia": "EE",
|
||||||
|
"Latvia": "LV",
|
||||||
|
"Lithuania": "LT",
|
||||||
|
"Finland": "FI",
|
||||||
|
"Sweden": "SE",
|
||||||
|
"Denmark": "DK",
|
||||||
|
"Ireland": "IE",
|
||||||
|
"Cyprus": "CY",
|
||||||
|
"Malta": "MT",
|
||||||
|
"United Kingdom": "GB",
|
||||||
|
"Switzerland": "CH",
|
||||||
|
"United States": "US",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_column_names(connection, table_name):
|
||||||
|
"""Get list of column names for a table (PostgreSQL)."""
|
||||||
|
result = connection.execute(text(
|
||||||
|
"SELECT column_name FROM information_schema.columns "
|
||||||
|
"WHERE table_name = :table AND table_schema = 'public'"
|
||||||
|
), {"table": table_name})
|
||||||
|
return [row[0] for row in result]
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
connection = op.get_bind()
|
||||||
|
columns = get_column_names(connection, "customer_addresses")
|
||||||
|
|
||||||
|
# Check if we need to do anything (idempotent check)
|
||||||
|
has_country = "country" in columns
|
||||||
|
has_country_name = "country_name" in columns
|
||||||
|
has_country_iso = "country_iso" in columns
|
||||||
|
|
||||||
|
# If already has new columns, nothing to do
|
||||||
|
if has_country_name and has_country_iso:
|
||||||
|
print(" Columns country_name and country_iso already exist, skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
# If has old 'country' column, rename it (PostgreSQL supports direct rename)
|
||||||
|
if has_country and not has_country_name:
|
||||||
|
op.alter_column(
|
||||||
|
"customer_addresses",
|
||||||
|
"country",
|
||||||
|
new_column_name="country_name",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add country_iso if it doesn't exist
|
||||||
|
if not has_country_iso:
|
||||||
|
op.add_column(
|
||||||
|
"customer_addresses",
|
||||||
|
sa.Column("country_iso", sa.String(5), nullable=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Backfill country_iso from country_name
|
||||||
|
for country_name, iso_code in COUNTRY_ISO_MAP.items():
|
||||||
|
connection.execute(
|
||||||
|
text(
|
||||||
|
"UPDATE customer_addresses SET country_iso = :iso "
|
||||||
|
"WHERE country_name = :name"
|
||||||
|
),
|
||||||
|
{"iso": iso_code, "name": country_name},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set default for any remaining NULL values
|
||||||
|
connection.execute(
|
||||||
|
text(
|
||||||
|
"UPDATE customer_addresses SET country_iso = 'LU' "
|
||||||
|
"WHERE country_iso IS NULL"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make country_iso NOT NULL (PostgreSQL supports direct alter)
|
||||||
|
op.alter_column(
|
||||||
|
"customer_addresses",
|
||||||
|
"country_iso",
|
||||||
|
existing_type=sa.String(5),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
connection = op.get_bind()
|
||||||
|
columns = get_column_names(connection, "customer_addresses")
|
||||||
|
|
||||||
|
has_country_name = "country_name" in columns
|
||||||
|
has_country_iso = "country_iso" in columns
|
||||||
|
has_country = "country" in columns
|
||||||
|
|
||||||
|
# Only downgrade if in the new state
|
||||||
|
if has_country_name and not has_country:
|
||||||
|
op.alter_column(
|
||||||
|
"customer_addresses",
|
||||||
|
"country_name",
|
||||||
|
new_column_name="country",
|
||||||
|
)
|
||||||
|
|
||||||
|
if has_country_iso:
|
||||||
|
op.drop_column("customer_addresses", "country_iso")
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
# alembic/versions/s7a8b9c0d1e2_add_storefront_locale_to_vendors.py
|
||||||
|
"""Add storefront_locale to vendors for currency formatting.
|
||||||
|
|
||||||
|
Revision ID: s7a8b9c0d1e2
|
||||||
|
Revises: r6f7a8b9c0d1
|
||||||
|
Create Date: 2026-01-02 20:00:00.000000
|
||||||
|
|
||||||
|
This migration adds a nullable storefront_locale field to vendors.
|
||||||
|
NULL means the vendor inherits from platform defaults.
|
||||||
|
Examples: 'fr-LU', 'de-DE', 'en-GB'
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "s7a8b9c0d1e2"
|
||||||
|
down_revision = "r6f7a8b9c0d1"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Add storefront_locale column to vendors table."""
|
||||||
|
# Nullable - NULL means "inherit from platform default"
|
||||||
|
op.add_column(
|
||||||
|
"vendors",
|
||||||
|
sa.Column(
|
||||||
|
"storefront_locale",
|
||||||
|
sa.String(10),
|
||||||
|
nullable=True,
|
||||||
|
comment="Currency/number formatting locale (NULL = inherit from platform)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Remove storefront_locale column from vendors table."""
|
||||||
|
op.drop_column("vendors", "storefront_locale")
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user