diff --git a/.architecture-rules/api.yaml b/.architecture-rules/api.yaml index d9e9364a..d992bc91 100644 --- a/.architecture-rules/api.yaml +++ b/.architecture-rules/api.yaml @@ -24,7 +24,9 @@ api_endpoint_rules: SCHEMA LOCATION: All response schemas must be defined in models/schema/*.py, never inline in endpoint files. This ensures schemas are reusable and discoverable. pattern: - file_pattern: "app/api/v1/**/*.py" + file_pattern: + - "app/api/v1/**/*.py" + - "app/modules/*/routes/api/**/*.py" anti_patterns: - "return dict" - "-> dict" @@ -82,7 +84,9 @@ api_endpoint_rules: # In app/api/v1/admin/my_feature.py from models.schema.my_feature import MyRequest pattern: - file_pattern: "app/api/v1/**/*.py" + file_pattern: + - "app/api/v1/**/*.py" + - "app/modules/*/routes/api/**/*.py" anti_patterns: - "from pydantic import" - "from pydantic.main import" @@ -118,7 +122,9 @@ api_endpoint_rules: - db.query() - complex queries are business logic - db.delete() - deleting entities is business logic pattern: - file_pattern: "app/api/v1/**/*.py" + file_pattern: + - "app/api/v1/**/*.py" + - "app/modules/*/routes/api/**/*.py" anti_patterns: - "db.add(" - "db.delete(" @@ -155,7 +161,9 @@ api_endpoint_rules: # Dependency guarantees token_vendor_id is present return order_service.get_orders(db, current_user.token_vendor_id) pattern: - file_pattern: "app/api/v1/**/*.py" + file_pattern: + - "app/api/v1/**/*.py" + - "app/modules/*/routes/api/**/*.py" anti_patterns: - "raise HTTPException" - "raise InvalidTokenException" @@ -248,7 +256,9 @@ api_endpoint_rules: - from models.database.* - from app.modules.*.models.* pattern: - file_pattern: "app/api/**/*.py" + file_pattern: + - "app/api/**/*.py" + - "app/modules/*/routes/api/**/*.py" anti_patterns: - "from models\\.database\\." - "from app\\.modules\\.[a-z_]+\\.models\\." diff --git a/alembic/versions_backup/09d84a46530f_add_celery_task_id_to_job_tables.py b/alembic/versions_backup/09d84a46530f_add_celery_task_id_to_job_tables.py index 4e693903..b5732770 100644 --- a/alembic/versions_backup/09d84a46530f_add_celery_task_id_to_job_tables.py +++ b/alembic/versions_backup/09d84a46530f_add_celery_task_id_to_job_tables.py @@ -5,51 +5,52 @@ Revises: y3d4e5f6g7h8 Create Date: 2026-01-11 16:44:59.070110 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op + # revision identifiers, used by Alembic. -revision: str = '09d84a46530f' -down_revision: Union[str, None] = 'y3d4e5f6g7h8' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "09d84a46530f" +down_revision: str | None = "y3d4e5f6g7h8" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: """Add celery_task_id column to job tracking tables for Celery integration.""" # MarketplaceImportJob - op.add_column('marketplace_import_jobs', sa.Column('celery_task_id', sa.String(length=255), nullable=True)) - op.create_index(op.f('ix_marketplace_import_jobs_celery_task_id'), 'marketplace_import_jobs', ['celery_task_id'], unique=False) + op.add_column("marketplace_import_jobs", sa.Column("celery_task_id", sa.String(length=255), nullable=True)) + op.create_index(op.f("ix_marketplace_import_jobs_celery_task_id"), "marketplace_import_jobs", ["celery_task_id"], unique=False) # LetzshopHistoricalImportJob - op.add_column('letzshop_historical_import_jobs', sa.Column('celery_task_id', sa.String(length=255), nullable=True)) - op.create_index(op.f('ix_letzshop_historical_import_jobs_celery_task_id'), 'letzshop_historical_import_jobs', ['celery_task_id'], unique=False) + op.add_column("letzshop_historical_import_jobs", sa.Column("celery_task_id", sa.String(length=255), nullable=True)) + op.create_index(op.f("ix_letzshop_historical_import_jobs_celery_task_id"), "letzshop_historical_import_jobs", ["celery_task_id"], unique=False) # ArchitectureScan - op.add_column('architecture_scans', sa.Column('celery_task_id', sa.String(length=255), nullable=True)) - op.create_index(op.f('ix_architecture_scans_celery_task_id'), 'architecture_scans', ['celery_task_id'], unique=False) + op.add_column("architecture_scans", sa.Column("celery_task_id", sa.String(length=255), nullable=True)) + op.create_index(op.f("ix_architecture_scans_celery_task_id"), "architecture_scans", ["celery_task_id"], unique=False) # TestRun - op.add_column('test_runs', sa.Column('celery_task_id', sa.String(length=255), nullable=True)) - op.create_index(op.f('ix_test_runs_celery_task_id'), 'test_runs', ['celery_task_id'], unique=False) + op.add_column("test_runs", sa.Column("celery_task_id", sa.String(length=255), nullable=True)) + op.create_index(op.f("ix_test_runs_celery_task_id"), "test_runs", ["celery_task_id"], unique=False) def downgrade() -> None: """Remove celery_task_id column from job tracking tables.""" # TestRun - op.drop_index(op.f('ix_test_runs_celery_task_id'), table_name='test_runs') - op.drop_column('test_runs', 'celery_task_id') + op.drop_index(op.f("ix_test_runs_celery_task_id"), table_name="test_runs") + op.drop_column("test_runs", "celery_task_id") # ArchitectureScan - op.drop_index(op.f('ix_architecture_scans_celery_task_id'), table_name='architecture_scans') - op.drop_column('architecture_scans', 'celery_task_id') + op.drop_index(op.f("ix_architecture_scans_celery_task_id"), table_name="architecture_scans") + op.drop_column("architecture_scans", "celery_task_id") # LetzshopHistoricalImportJob - op.drop_index(op.f('ix_letzshop_historical_import_jobs_celery_task_id'), table_name='letzshop_historical_import_jobs') - op.drop_column('letzshop_historical_import_jobs', 'celery_task_id') + op.drop_index(op.f("ix_letzshop_historical_import_jobs_celery_task_id"), table_name="letzshop_historical_import_jobs") + op.drop_column("letzshop_historical_import_jobs", "celery_task_id") # MarketplaceImportJob - op.drop_index(op.f('ix_marketplace_import_jobs_celery_task_id'), table_name='marketplace_import_jobs') - op.drop_column('marketplace_import_jobs', 'celery_task_id') + op.drop_index(op.f("ix_marketplace_import_jobs_celery_task_id"), table_name="marketplace_import_jobs") + op.drop_column("marketplace_import_jobs", "celery_task_id") diff --git a/alembic/versions_backup/0bd9ffaaced1_add_application_logs_table_for_hybrid_.py b/alembic/versions_backup/0bd9ffaaced1_add_application_logs_table_for_hybrid_.py index 3d091068..695ec102 100644 --- a/alembic/versions_backup/0bd9ffaaced1_add_application_logs_table_for_hybrid_.py +++ b/alembic/versions_backup/0bd9ffaaced1_add_application_logs_table_for_hybrid_.py @@ -5,64 +5,64 @@ Revises: 7a7ce92593d5 Create Date: 2025-11-29 12:44:55.427245 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '0bd9ffaaced1' -down_revision: Union[str, None] = '7a7ce92593d5' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "0bd9ffaaced1" +down_revision: str | None = "7a7ce92593d5" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Create application_logs table op.create_table( - 'application_logs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('timestamp', sa.DateTime(), nullable=False), - sa.Column('level', sa.String(length=20), nullable=False), - sa.Column('logger_name', sa.String(length=200), nullable=False), - sa.Column('module', sa.String(length=200), nullable=True), - sa.Column('function_name', sa.String(length=100), nullable=True), - sa.Column('line_number', sa.Integer(), nullable=True), - sa.Column('message', sa.Text(), nullable=False), - sa.Column('exception_type', sa.String(length=200), nullable=True), - sa.Column('exception_message', sa.Text(), nullable=True), - sa.Column('stack_trace', sa.Text(), nullable=True), - sa.Column('request_id', sa.String(length=100), nullable=True), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('vendor_id', sa.Integer(), nullable=True), - sa.Column('context', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + "application_logs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("timestamp", sa.DateTime(), nullable=False), + sa.Column("level", sa.String(length=20), nullable=False), + sa.Column("logger_name", sa.String(length=200), nullable=False), + sa.Column("module", sa.String(length=200), nullable=True), + sa.Column("function_name", sa.String(length=100), nullable=True), + sa.Column("line_number", sa.Integer(), nullable=True), + sa.Column("message", sa.Text(), nullable=False), + sa.Column("exception_type", sa.String(length=200), nullable=True), + sa.Column("exception_message", sa.Text(), nullable=True), + sa.Column("stack_trace", sa.Text(), nullable=True), + sa.Column("request_id", sa.String(length=100), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("vendor_id", sa.Integer(), nullable=True), + sa.Column("context", sa.JSON(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], ), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) # Create indexes for better query performance - op.create_index(op.f('ix_application_logs_id'), 'application_logs', ['id'], unique=False) - op.create_index(op.f('ix_application_logs_timestamp'), 'application_logs', ['timestamp'], unique=False) - op.create_index(op.f('ix_application_logs_level'), 'application_logs', ['level'], unique=False) - op.create_index(op.f('ix_application_logs_logger_name'), 'application_logs', ['logger_name'], unique=False) - op.create_index(op.f('ix_application_logs_request_id'), 'application_logs', ['request_id'], unique=False) - op.create_index(op.f('ix_application_logs_user_id'), 'application_logs', ['user_id'], unique=False) - op.create_index(op.f('ix_application_logs_vendor_id'), 'application_logs', ['vendor_id'], unique=False) + op.create_index(op.f("ix_application_logs_id"), "application_logs", ["id"], unique=False) + op.create_index(op.f("ix_application_logs_timestamp"), "application_logs", ["timestamp"], unique=False) + op.create_index(op.f("ix_application_logs_level"), "application_logs", ["level"], unique=False) + op.create_index(op.f("ix_application_logs_logger_name"), "application_logs", ["logger_name"], unique=False) + op.create_index(op.f("ix_application_logs_request_id"), "application_logs", ["request_id"], unique=False) + op.create_index(op.f("ix_application_logs_user_id"), "application_logs", ["user_id"], unique=False) + op.create_index(op.f("ix_application_logs_vendor_id"), "application_logs", ["vendor_id"], unique=False) def downgrade() -> None: # Drop indexes - op.drop_index(op.f('ix_application_logs_vendor_id'), table_name='application_logs') - op.drop_index(op.f('ix_application_logs_user_id'), table_name='application_logs') - op.drop_index(op.f('ix_application_logs_request_id'), table_name='application_logs') - op.drop_index(op.f('ix_application_logs_logger_name'), table_name='application_logs') - op.drop_index(op.f('ix_application_logs_level'), table_name='application_logs') - op.drop_index(op.f('ix_application_logs_timestamp'), table_name='application_logs') - op.drop_index(op.f('ix_application_logs_id'), table_name='application_logs') + op.drop_index(op.f("ix_application_logs_vendor_id"), table_name="application_logs") + op.drop_index(op.f("ix_application_logs_user_id"), table_name="application_logs") + op.drop_index(op.f("ix_application_logs_request_id"), table_name="application_logs") + op.drop_index(op.f("ix_application_logs_logger_name"), table_name="application_logs") + op.drop_index(op.f("ix_application_logs_level"), table_name="application_logs") + op.drop_index(op.f("ix_application_logs_timestamp"), table_name="application_logs") + op.drop_index(op.f("ix_application_logs_id"), table_name="application_logs") # Drop table - op.drop_table('application_logs') + op.drop_table("application_logs") diff --git a/alembic/versions_backup/1b398cf45e85_add_letzshop_vendor_cache_table.py b/alembic/versions_backup/1b398cf45e85_add_letzshop_vendor_cache_table.py index b113d400..b67a91d4 100644 --- a/alembic/versions_backup/1b398cf45e85_add_letzshop_vendor_cache_table.py +++ b/alembic/versions_backup/1b398cf45e85_add_letzshop_vendor_cache_table.py @@ -5,363 +5,363 @@ Revises: 09d84a46530f Create Date: 2026-01-13 19:38:45.423378 """ -from typing import Sequence, Union +from collections.abc import Sequence + +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql, sqlite from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy.dialects import sqlite # revision identifiers, used by Alembic. -revision: str = '1b398cf45e85' -down_revision: Union[str, None] = '09d84a46530f' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "1b398cf45e85" +down_revision: str | None = "09d84a46530f" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('letzshop_vendor_cache', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('letzshop_id', sa.String(length=50), nullable=False), - sa.Column('slug', sa.String(length=200), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('company_name', sa.String(length=255), nullable=True), - sa.Column('is_active', sa.Boolean(), nullable=True), - sa.Column('description_en', sa.Text(), nullable=True), - sa.Column('description_fr', sa.Text(), nullable=True), - sa.Column('description_de', sa.Text(), nullable=True), - sa.Column('email', sa.String(length=255), nullable=True), - sa.Column('phone', sa.String(length=50), nullable=True), - sa.Column('fax', sa.String(length=50), nullable=True), - sa.Column('website', sa.String(length=500), nullable=True), - sa.Column('street', sa.String(length=255), nullable=True), - sa.Column('street_number', sa.String(length=50), nullable=True), - sa.Column('city', sa.String(length=100), nullable=True), - sa.Column('zipcode', sa.String(length=20), nullable=True), - sa.Column('country_iso', sa.String(length=5), nullable=True), - sa.Column('latitude', sa.String(length=20), nullable=True), - sa.Column('longitude', sa.String(length=20), nullable=True), - sa.Column('categories', sqlite.JSON(), nullable=True), - sa.Column('background_image_url', sa.String(length=500), nullable=True), - sa.Column('social_media_links', sqlite.JSON(), nullable=True), - sa.Column('opening_hours_en', sa.Text(), nullable=True), - sa.Column('opening_hours_fr', sa.Text(), nullable=True), - sa.Column('opening_hours_de', sa.Text(), nullable=True), - sa.Column('representative_name', sa.String(length=255), nullable=True), - sa.Column('representative_title', sa.String(length=100), nullable=True), - sa.Column('claimed_by_vendor_id', sa.Integer(), nullable=True), - sa.Column('claimed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=False), - sa.Column('raw_data', sqlite.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['claimed_by_vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("letzshop_vendor_cache", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("letzshop_id", sa.String(length=50), nullable=False), + sa.Column("slug", sa.String(length=200), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("company_name", sa.String(length=255), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=True), + sa.Column("description_en", sa.Text(), nullable=True), + sa.Column("description_fr", sa.Text(), nullable=True), + sa.Column("description_de", sa.Text(), nullable=True), + sa.Column("email", sa.String(length=255), nullable=True), + sa.Column("phone", sa.String(length=50), nullable=True), + sa.Column("fax", sa.String(length=50), nullable=True), + sa.Column("website", sa.String(length=500), nullable=True), + sa.Column("street", sa.String(length=255), nullable=True), + sa.Column("street_number", sa.String(length=50), nullable=True), + sa.Column("city", sa.String(length=100), nullable=True), + sa.Column("zipcode", sa.String(length=20), nullable=True), + sa.Column("country_iso", sa.String(length=5), nullable=True), + sa.Column("latitude", sa.String(length=20), nullable=True), + sa.Column("longitude", sa.String(length=20), nullable=True), + sa.Column("categories", sqlite.JSON(), nullable=True), + sa.Column("background_image_url", sa.String(length=500), nullable=True), + sa.Column("social_media_links", sqlite.JSON(), nullable=True), + sa.Column("opening_hours_en", sa.Text(), nullable=True), + sa.Column("opening_hours_fr", sa.Text(), nullable=True), + sa.Column("opening_hours_de", sa.Text(), nullable=True), + sa.Column("representative_name", sa.String(length=255), nullable=True), + sa.Column("representative_title", sa.String(length=100), nullable=True), + sa.Column("claimed_by_vendor_id", sa.Integer(), nullable=True), + sa.Column("claimed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("last_synced_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("raw_data", sqlite.JSON(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["claimed_by_vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_vendor_cache_active', 'letzshop_vendor_cache', ['is_active'], unique=False) - op.create_index('idx_vendor_cache_city', 'letzshop_vendor_cache', ['city'], unique=False) - op.create_index('idx_vendor_cache_claimed', 'letzshop_vendor_cache', ['claimed_by_vendor_id'], unique=False) - op.create_index(op.f('ix_letzshop_vendor_cache_claimed_by_vendor_id'), 'letzshop_vendor_cache', ['claimed_by_vendor_id'], unique=False) - op.create_index(op.f('ix_letzshop_vendor_cache_id'), 'letzshop_vendor_cache', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_vendor_cache_letzshop_id'), 'letzshop_vendor_cache', ['letzshop_id'], unique=True) - op.create_index(op.f('ix_letzshop_vendor_cache_slug'), 'letzshop_vendor_cache', ['slug'], unique=True) - op.drop_constraint('architecture_rules_rule_id_key', 'architecture_rules', type_='unique') - op.alter_column('capacity_snapshots', 'created_at', + op.create_index("idx_vendor_cache_active", "letzshop_vendor_cache", ["is_active"], unique=False) + op.create_index("idx_vendor_cache_city", "letzshop_vendor_cache", ["city"], unique=False) + op.create_index("idx_vendor_cache_claimed", "letzshop_vendor_cache", ["claimed_by_vendor_id"], unique=False) + op.create_index(op.f("ix_letzshop_vendor_cache_claimed_by_vendor_id"), "letzshop_vendor_cache", ["claimed_by_vendor_id"], unique=False) + op.create_index(op.f("ix_letzshop_vendor_cache_id"), "letzshop_vendor_cache", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_vendor_cache_letzshop_id"), "letzshop_vendor_cache", ["letzshop_id"], unique=True) + op.create_index(op.f("ix_letzshop_vendor_cache_slug"), "letzshop_vendor_cache", ["slug"], unique=True) + op.drop_constraint("architecture_rules_rule_id_key", "architecture_rules", type_="unique") + op.alter_column("capacity_snapshots", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('capacity_snapshots', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("capacity_snapshots", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_index(op.f('ix_features_id'), 'features', ['id'], unique=False) - op.create_index(op.f('ix_features_minimum_tier_id'), 'features', ['minimum_tier_id'], unique=False) - op.create_index('idx_inv_tx_order', 'inventory_transactions', ['order_id'], unique=False) - op.alter_column('invoices', 'created_at', + existing_server_default=sa.text("now()")) + op.create_index(op.f("ix_features_id"), "features", ["id"], unique=False) + op.create_index(op.f("ix_features_minimum_tier_id"), "features", ["minimum_tier_id"], unique=False) + op.create_index("idx_inv_tx_order", "inventory_transactions", ["order_id"], unique=False) + op.alter_column("invoices", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('invoices', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("invoices", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('letzshop_fulfillment_queue', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("letzshop_fulfillment_queue", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('letzshop_fulfillment_queue', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("letzshop_fulfillment_queue", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('letzshop_sync_logs', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("letzshop_sync_logs", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('letzshop_sync_logs', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("letzshop_sync_logs", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('media_files', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("media_files", "created_at", existing_type=postgresql.TIMESTAMP(), nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('media_files', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("media_files", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False) - op.alter_column('order_item_exceptions', 'created_at', + op.alter_column("order_item_exceptions", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('order_item_exceptions', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("order_item_exceptions", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('order_items', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("order_items", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('order_items', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("order_items", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('orders', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("orders", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('orders', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("orders", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_index('ix_password_reset_tokens_customer_id', table_name='password_reset_tokens') - op.create_index(op.f('ix_password_reset_tokens_id'), 'password_reset_tokens', ['id'], unique=False) - op.alter_column('product_media', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.drop_index("ix_password_reset_tokens_customer_id", table_name="password_reset_tokens") + op.create_index(op.f("ix_password_reset_tokens_id"), "password_reset_tokens", ["id"], unique=False) + op.alter_column("product_media", "created_at", existing_type=postgresql.TIMESTAMP(), nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('product_media', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("product_media", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False) - op.alter_column('products', 'is_digital', + op.alter_column("products", "is_digital", existing_type=sa.BOOLEAN(), nullable=True, - existing_server_default=sa.text('false')) - op.alter_column('products', 'product_type', + existing_server_default=sa.text("false")) + op.alter_column("products", "product_type", existing_type=sa.VARCHAR(length=20), nullable=True, existing_server_default=sa.text("'physical'::character varying")) - op.drop_index('idx_product_is_digital', table_name='products') - op.create_index(op.f('ix_products_is_digital'), 'products', ['is_digital'], unique=False) - op.drop_constraint('uq_vendor_email_settings_vendor_id', 'vendor_email_settings', type_='unique') - op.drop_index('ix_vendor_email_templates_lookup', table_name='vendor_email_templates') - op.create_index(op.f('ix_vendor_email_templates_id'), 'vendor_email_templates', ['id'], unique=False) - op.alter_column('vendor_invoice_settings', 'created_at', + op.drop_index("idx_product_is_digital", table_name="products") + op.create_index(op.f("ix_products_is_digital"), "products", ["is_digital"], unique=False) + op.drop_constraint("uq_vendor_email_settings_vendor_id", "vendor_email_settings", type_="unique") + op.drop_index("ix_vendor_email_templates_lookup", table_name="vendor_email_templates") + op.create_index(op.f("ix_vendor_email_templates_id"), "vendor_email_templates", ["id"], unique=False) + op.alter_column("vendor_invoice_settings", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('vendor_invoice_settings', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("vendor_invoice_settings", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_constraint('vendor_invoice_settings_vendor_id_key', 'vendor_invoice_settings', type_='unique') - op.alter_column('vendor_letzshop_credentials', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.drop_constraint("vendor_invoice_settings_vendor_id_key", "vendor_invoice_settings", type_="unique") + op.alter_column("vendor_letzshop_credentials", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('vendor_letzshop_credentials', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("vendor_letzshop_credentials", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_constraint('vendor_letzshop_credentials_vendor_id_key', 'vendor_letzshop_credentials', type_='unique') - op.alter_column('vendor_subscriptions', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.drop_constraint("vendor_letzshop_credentials_vendor_id_key", "vendor_letzshop_credentials", type_="unique") + op.alter_column("vendor_subscriptions", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('vendor_subscriptions', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("vendor_subscriptions", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_constraint('vendor_subscriptions_vendor_id_key', 'vendor_subscriptions', type_='unique') - op.drop_constraint('fk_vendor_subscriptions_tier_id', 'vendor_subscriptions', type_='foreignkey') - op.create_foreign_key(None, 'vendor_subscriptions', 'subscription_tiers', ['tier_id'], ['id']) - op.alter_column('vendors', 'storefront_locale', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.drop_constraint("vendor_subscriptions_vendor_id_key", "vendor_subscriptions", type_="unique") + op.drop_constraint("fk_vendor_subscriptions_tier_id", "vendor_subscriptions", type_="foreignkey") + op.create_foreign_key(None, "vendor_subscriptions", "subscription_tiers", ["tier_id"], ["id"]) + op.alter_column("vendors", "storefront_locale", existing_type=sa.VARCHAR(length=10), comment=None, - existing_comment='Currency/number formatting locale (NULL = inherit from platform)', + existing_comment="Currency/number formatting locale (NULL = inherit from platform)", existing_nullable=True) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('vendors', 'storefront_locale', + op.alter_column("vendors", "storefront_locale", existing_type=sa.VARCHAR(length=10), - comment='Currency/number formatting locale (NULL = inherit from platform)', + comment="Currency/number formatting locale (NULL = inherit from platform)", existing_nullable=True) - op.drop_constraint(None, 'vendor_subscriptions', type_='foreignkey') - op.create_foreign_key('fk_vendor_subscriptions_tier_id', 'vendor_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], ondelete='SET NULL') - op.create_unique_constraint('vendor_subscriptions_vendor_id_key', 'vendor_subscriptions', ['vendor_id']) - op.alter_column('vendor_subscriptions', 'updated_at', + op.drop_constraint(None, "vendor_subscriptions", type_="foreignkey") + op.create_foreign_key("fk_vendor_subscriptions_tier_id", "vendor_subscriptions", "subscription_tiers", ["tier_id"], ["id"], ondelete="SET NULL") + op.create_unique_constraint("vendor_subscriptions_vendor_id_key", "vendor_subscriptions", ["vendor_id"]) + op.alter_column("vendor_subscriptions", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('vendor_subscriptions', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("vendor_subscriptions", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.create_unique_constraint('vendor_letzshop_credentials_vendor_id_key', 'vendor_letzshop_credentials', ['vendor_id']) - op.alter_column('vendor_letzshop_credentials', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.create_unique_constraint("vendor_letzshop_credentials_vendor_id_key", "vendor_letzshop_credentials", ["vendor_id"]) + op.alter_column("vendor_letzshop_credentials", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('vendor_letzshop_credentials', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("vendor_letzshop_credentials", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.create_unique_constraint('vendor_invoice_settings_vendor_id_key', 'vendor_invoice_settings', ['vendor_id']) - op.alter_column('vendor_invoice_settings', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.create_unique_constraint("vendor_invoice_settings_vendor_id_key", "vendor_invoice_settings", ["vendor_id"]) + op.alter_column("vendor_invoice_settings", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('vendor_invoice_settings', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("vendor_invoice_settings", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_index(op.f('ix_vendor_email_templates_id'), table_name='vendor_email_templates') - op.create_index('ix_vendor_email_templates_lookup', 'vendor_email_templates', ['vendor_id', 'template_code', 'language'], unique=False) - op.create_unique_constraint('uq_vendor_email_settings_vendor_id', 'vendor_email_settings', ['vendor_id']) - op.drop_index(op.f('ix_products_is_digital'), table_name='products') - op.create_index('idx_product_is_digital', 'products', ['is_digital'], unique=False) - op.alter_column('products', 'product_type', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.drop_index(op.f("ix_vendor_email_templates_id"), table_name="vendor_email_templates") + op.create_index("ix_vendor_email_templates_lookup", "vendor_email_templates", ["vendor_id", "template_code", "language"], unique=False) + op.create_unique_constraint("uq_vendor_email_settings_vendor_id", "vendor_email_settings", ["vendor_id"]) + op.drop_index(op.f("ix_products_is_digital"), table_name="products") + op.create_index("idx_product_is_digital", "products", ["is_digital"], unique=False) + op.alter_column("products", "product_type", existing_type=sa.VARCHAR(length=20), nullable=False, existing_server_default=sa.text("'physical'::character varying")) - op.alter_column('products', 'is_digital', + op.alter_column("products", "is_digital", existing_type=sa.BOOLEAN(), nullable=False, - existing_server_default=sa.text('false')) - op.alter_column('product_media', 'updated_at', + existing_server_default=sa.text("false")) + op.alter_column("product_media", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True) - op.alter_column('product_media', 'created_at', + op.alter_column("product_media", "created_at", existing_type=postgresql.TIMESTAMP(), nullable=True, - existing_server_default=sa.text('now()')) - op.drop_index(op.f('ix_password_reset_tokens_id'), table_name='password_reset_tokens') - op.create_index('ix_password_reset_tokens_customer_id', 'password_reset_tokens', ['customer_id'], unique=False) - op.alter_column('orders', 'updated_at', + existing_server_default=sa.text("now()")) + op.drop_index(op.f("ix_password_reset_tokens_id"), table_name="password_reset_tokens") + op.create_index("ix_password_reset_tokens_customer_id", "password_reset_tokens", ["customer_id"], unique=False) + op.alter_column("orders", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('orders', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("orders", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('order_items', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("order_items", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('order_items', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("order_items", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('order_item_exceptions', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("order_item_exceptions", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('order_item_exceptions', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("order_item_exceptions", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('media_files', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("media_files", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True) - op.alter_column('media_files', 'created_at', + op.alter_column("media_files", "created_at", existing_type=postgresql.TIMESTAMP(), nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('letzshop_sync_logs', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("letzshop_sync_logs", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('letzshop_sync_logs', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("letzshop_sync_logs", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('letzshop_fulfillment_queue', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("letzshop_fulfillment_queue", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('letzshop_fulfillment_queue', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("letzshop_fulfillment_queue", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('invoices', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("invoices", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('invoices', 'created_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.alter_column("invoices", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_index('idx_inv_tx_order', table_name='inventory_transactions') - op.drop_index(op.f('ix_features_minimum_tier_id'), table_name='features') - op.drop_index(op.f('ix_features_id'), table_name='features') - op.alter_column('capacity_snapshots', 'updated_at', + existing_server_default=sa.text("CURRENT_TIMESTAMP")) + op.drop_index("idx_inv_tx_order", table_name="inventory_transactions") + op.drop_index(op.f("ix_features_minimum_tier_id"), table_name="features") + op.drop_index(op.f("ix_features_id"), table_name="features") + op.alter_column("capacity_snapshots", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('capacity_snapshots', 'created_at', + existing_server_default=sa.text("now()")) + op.alter_column("capacity_snapshots", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_unique_constraint('architecture_rules_rule_id_key', 'architecture_rules', ['rule_id']) - op.drop_index(op.f('ix_letzshop_vendor_cache_slug'), table_name='letzshop_vendor_cache') - op.drop_index(op.f('ix_letzshop_vendor_cache_letzshop_id'), table_name='letzshop_vendor_cache') - op.drop_index(op.f('ix_letzshop_vendor_cache_id'), table_name='letzshop_vendor_cache') - op.drop_index(op.f('ix_letzshop_vendor_cache_claimed_by_vendor_id'), table_name='letzshop_vendor_cache') - op.drop_index('idx_vendor_cache_claimed', table_name='letzshop_vendor_cache') - op.drop_index('idx_vendor_cache_city', table_name='letzshop_vendor_cache') - op.drop_index('idx_vendor_cache_active', table_name='letzshop_vendor_cache') - op.drop_table('letzshop_vendor_cache') + existing_server_default=sa.text("now()")) + op.create_unique_constraint("architecture_rules_rule_id_key", "architecture_rules", ["rule_id"]) + op.drop_index(op.f("ix_letzshop_vendor_cache_slug"), table_name="letzshop_vendor_cache") + op.drop_index(op.f("ix_letzshop_vendor_cache_letzshop_id"), table_name="letzshop_vendor_cache") + op.drop_index(op.f("ix_letzshop_vendor_cache_id"), table_name="letzshop_vendor_cache") + op.drop_index(op.f("ix_letzshop_vendor_cache_claimed_by_vendor_id"), table_name="letzshop_vendor_cache") + op.drop_index("idx_vendor_cache_claimed", table_name="letzshop_vendor_cache") + op.drop_index("idx_vendor_cache_city", table_name="letzshop_vendor_cache") + op.drop_index("idx_vendor_cache_active", table_name="letzshop_vendor_cache") + op.drop_table("letzshop_vendor_cache") # ### end Alembic commands ### diff --git a/alembic/versions_backup/204273a59d73_add_letzshop_historical_import_jobs_.py b/alembic/versions_backup/204273a59d73_add_letzshop_historical_import_jobs_.py index 44e6fe65..c326b952 100644 --- a/alembic/versions_backup/204273a59d73_add_letzshop_historical_import_jobs_.py +++ b/alembic/versions_backup/204273a59d73_add_letzshop_historical_import_jobs_.py @@ -5,53 +5,55 @@ Revises: cb88bc9b5f86 Create Date: 2025-12-19 05:40:53.463341 """ -from typing import Sequence, Union +from collections.abc import Sequence + +import sqlalchemy as sa from alembic import op -import sqlalchemy as sa + # Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL) # revision identifiers, used by Alembic. -revision: str = '204273a59d73' -down_revision: Union[str, None] = 'cb88bc9b5f86' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "204273a59d73" +down_revision: str | None = "cb88bc9b5f86" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: - op.create_table('letzshop_historical_import_jobs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('status', sa.String(length=50), nullable=False), - sa.Column('current_phase', sa.String(length=20), nullable=True), - sa.Column('current_page', sa.Integer(), nullable=True), - sa.Column('total_pages', sa.Integer(), nullable=True), - sa.Column('shipments_fetched', sa.Integer(), nullable=True), - sa.Column('orders_processed', sa.Integer(), nullable=True), - sa.Column('orders_imported', sa.Integer(), nullable=True), - sa.Column('orders_updated', sa.Integer(), nullable=True), - sa.Column('orders_skipped', sa.Integer(), nullable=True), - sa.Column('products_matched', sa.Integer(), nullable=True), - sa.Column('products_not_found', sa.Integer(), nullable=True), - sa.Column('confirmed_stats', sa.JSON(), nullable=True), - sa.Column('declined_stats', sa.JSON(), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('started_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("letzshop_historical_import_jobs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("status", sa.String(length=50), nullable=False), + sa.Column("current_phase", sa.String(length=20), nullable=True), + sa.Column("current_page", sa.Integer(), nullable=True), + sa.Column("total_pages", sa.Integer(), nullable=True), + sa.Column("shipments_fetched", sa.Integer(), nullable=True), + sa.Column("orders_processed", sa.Integer(), nullable=True), + sa.Column("orders_imported", sa.Integer(), nullable=True), + sa.Column("orders_updated", sa.Integer(), nullable=True), + sa.Column("orders_skipped", sa.Integer(), nullable=True), + sa.Column("products_matched", sa.Integer(), nullable=True), + sa.Column("products_not_found", sa.Integer(), nullable=True), + sa.Column("confirmed_stats", sa.JSON(), nullable=True), + sa.Column("declined_stats", sa.JSON(), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("started_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], ), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_historical_import_vendor', 'letzshop_historical_import_jobs', ['vendor_id', 'status'], unique=False) - op.create_index(op.f('ix_letzshop_historical_import_jobs_id'), 'letzshop_historical_import_jobs', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_historical_import_jobs_vendor_id'), 'letzshop_historical_import_jobs', ['vendor_id'], unique=False) + op.create_index("idx_historical_import_vendor", "letzshop_historical_import_jobs", ["vendor_id", "status"], unique=False) + op.create_index(op.f("ix_letzshop_historical_import_jobs_id"), "letzshop_historical_import_jobs", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_historical_import_jobs_vendor_id"), "letzshop_historical_import_jobs", ["vendor_id"], unique=False) def downgrade() -> None: - op.drop_index(op.f('ix_letzshop_historical_import_jobs_vendor_id'), table_name='letzshop_historical_import_jobs') - op.drop_index(op.f('ix_letzshop_historical_import_jobs_id'), table_name='letzshop_historical_import_jobs') - op.drop_index('idx_historical_import_vendor', table_name='letzshop_historical_import_jobs') - op.drop_table('letzshop_historical_import_jobs') + op.drop_index(op.f("ix_letzshop_historical_import_jobs_vendor_id"), table_name="letzshop_historical_import_jobs") + op.drop_index(op.f("ix_letzshop_historical_import_jobs_id"), table_name="letzshop_historical_import_jobs") + op.drop_index("idx_historical_import_vendor", table_name="letzshop_historical_import_jobs") + op.drop_table("letzshop_historical_import_jobs") diff --git a/alembic/versions_backup/2362c2723a93_add_order_date_to_letzshop_orders.py b/alembic/versions_backup/2362c2723a93_add_order_date_to_letzshop_orders.py index c96cc7e1..52125995 100644 --- a/alembic/versions_backup/2362c2723a93_add_order_date_to_letzshop_orders.py +++ b/alembic/versions_backup/2362c2723a93_add_order_date_to_letzshop_orders.py @@ -5,23 +5,23 @@ Revises: 204273a59d73 Create Date: 2025-12-19 08:46:23.731912 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '2362c2723a93' -down_revision: Union[str, None] = '204273a59d73' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "2362c2723a93" +down_revision: str | None = "204273a59d73" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add order_date column to letzshop_orders table - op.add_column('letzshop_orders', sa.Column('order_date', sa.DateTime(timezone=True), nullable=True)) + op.add_column("letzshop_orders", sa.Column("order_date", sa.DateTime(timezone=True), nullable=True)) def downgrade() -> None: - op.drop_column('letzshop_orders', 'order_date') + op.drop_column("letzshop_orders", "order_date") diff --git a/alembic/versions_backup/28d44d503cac_add_contact_fields_to_vendor.py b/alembic/versions_backup/28d44d503cac_add_contact_fields_to_vendor.py index efe0a18d..15238d4f 100644 --- a/alembic/versions_backup/28d44d503cac_add_contact_fields_to_vendor.py +++ b/alembic/versions_backup/28d44d503cac_add_contact_fields_to_vendor.py @@ -5,33 +5,33 @@ Revises: 9f3a25ea4991 Create Date: 2025-12-03 22:26:02.161087 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '28d44d503cac' -down_revision: Union[str, None] = '9f3a25ea4991' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "28d44d503cac" +down_revision: str | None = "9f3a25ea4991" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add nullable contact fields to vendor table # These allow vendor-specific branding/identity, overriding company defaults - op.add_column('vendors', sa.Column('contact_email', sa.String(255), nullable=True)) - op.add_column('vendors', sa.Column('contact_phone', sa.String(50), nullable=True)) - op.add_column('vendors', sa.Column('website', sa.String(255), nullable=True)) - op.add_column('vendors', sa.Column('business_address', sa.Text(), nullable=True)) - op.add_column('vendors', sa.Column('tax_number', sa.String(100), nullable=True)) + op.add_column("vendors", sa.Column("contact_email", sa.String(255), nullable=True)) + op.add_column("vendors", sa.Column("contact_phone", sa.String(50), nullable=True)) + op.add_column("vendors", sa.Column("website", sa.String(255), nullable=True)) + op.add_column("vendors", sa.Column("business_address", sa.Text(), nullable=True)) + op.add_column("vendors", sa.Column("tax_number", sa.String(100), nullable=True)) def downgrade() -> None: # Remove contact fields from vendor table - op.drop_column('vendors', 'tax_number') - op.drop_column('vendors', 'business_address') - op.drop_column('vendors', 'website') - op.drop_column('vendors', 'contact_phone') - op.drop_column('vendors', 'contact_email') + op.drop_column("vendors", "tax_number") + op.drop_column("vendors", "business_address") + op.drop_column("vendors", "website") + op.drop_column("vendors", "contact_phone") + op.drop_column("vendors", "contact_email") diff --git a/alembic/versions_backup/2953ed10d22c_add_subscription_billing_tables.py b/alembic/versions_backup/2953ed10d22c_add_subscription_billing_tables.py index 346270fd..82c5f4f5 100644 --- a/alembic/versions_backup/2953ed10d22c_add_subscription_billing_tables.py +++ b/alembic/versions_backup/2953ed10d22c_add_subscription_billing_tables.py @@ -5,18 +5,20 @@ Revises: e1bfb453fbe9 Create Date: 2025-12-25 18:29:34.167773 """ +from collections.abc import Sequence from datetime import datetime -from typing import Sequence, Union + +import sqlalchemy as sa from alembic import op -import sqlalchemy as sa + # Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL) # revision identifiers, used by Alembic. -revision: str = '2953ed10d22c' -down_revision: Union[str, None] = 'e1bfb453fbe9' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "2953ed10d22c" +down_revision: str | None = "e1bfb453fbe9" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: @@ -25,146 +27,146 @@ def upgrade() -> None: # ========================================================================= # subscription_tiers - Database-driven tier definitions - op.create_table('subscription_tiers', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('code', sa.String(length=30), nullable=False), - sa.Column('name', sa.String(length=100), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('price_monthly_cents', sa.Integer(), nullable=False), - sa.Column('price_annual_cents', sa.Integer(), nullable=True), - sa.Column('orders_per_month', sa.Integer(), nullable=True), - sa.Column('products_limit', sa.Integer(), nullable=True), - sa.Column('team_members', sa.Integer(), nullable=True), - sa.Column('order_history_months', sa.Integer(), nullable=True), - sa.Column('features', sa.JSON(), nullable=True), - sa.Column('stripe_product_id', sa.String(length=100), nullable=True), - sa.Column('stripe_price_monthly_id', sa.String(length=100), nullable=True), - sa.Column('stripe_price_annual_id', sa.String(length=100), nullable=True), - sa.Column('display_order', sa.Integer(), nullable=True), - sa.Column('is_active', sa.Boolean(), nullable=False), - sa.Column('is_public', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table("subscription_tiers", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=30), nullable=False), + sa.Column("name", sa.String(length=100), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("price_monthly_cents", sa.Integer(), nullable=False), + sa.Column("price_annual_cents", sa.Integer(), nullable=True), + sa.Column("orders_per_month", sa.Integer(), nullable=True), + sa.Column("products_limit", sa.Integer(), nullable=True), + sa.Column("team_members", sa.Integer(), nullable=True), + sa.Column("order_history_months", sa.Integer(), nullable=True), + sa.Column("features", sa.JSON(), nullable=True), + sa.Column("stripe_product_id", sa.String(length=100), nullable=True), + sa.Column("stripe_price_monthly_id", sa.String(length=100), nullable=True), + sa.Column("stripe_price_annual_id", sa.String(length=100), nullable=True), + sa.Column("display_order", sa.Integer(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("is_public", sa.Boolean(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_subscription_tiers_code'), 'subscription_tiers', ['code'], unique=True) - op.create_index(op.f('ix_subscription_tiers_id'), 'subscription_tiers', ['id'], unique=False) + op.create_index(op.f("ix_subscription_tiers_code"), "subscription_tiers", ["code"], unique=True) + op.create_index(op.f("ix_subscription_tiers_id"), "subscription_tiers", ["id"], unique=False) # addon_products - Purchasable add-ons (domains, SSL, email) - op.create_table('addon_products', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('code', sa.String(length=50), nullable=False), - sa.Column('name', sa.String(length=100), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('category', sa.String(length=50), nullable=False), - sa.Column('price_cents', sa.Integer(), nullable=False), - sa.Column('billing_period', sa.String(length=20), nullable=False), - sa.Column('quantity_unit', sa.String(length=50), nullable=True), - sa.Column('quantity_value', sa.Integer(), nullable=True), - sa.Column('stripe_product_id', sa.String(length=100), nullable=True), - sa.Column('stripe_price_id', sa.String(length=100), nullable=True), - sa.Column('display_order', sa.Integer(), nullable=True), - sa.Column('is_active', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table("addon_products", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=50), nullable=False), + sa.Column("name", sa.String(length=100), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("category", sa.String(length=50), nullable=False), + sa.Column("price_cents", sa.Integer(), nullable=False), + sa.Column("billing_period", sa.String(length=20), nullable=False), + sa.Column("quantity_unit", sa.String(length=50), nullable=True), + sa.Column("quantity_value", sa.Integer(), nullable=True), + sa.Column("stripe_product_id", sa.String(length=100), nullable=True), + sa.Column("stripe_price_id", sa.String(length=100), nullable=True), + sa.Column("display_order", sa.Integer(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_addon_products_category'), 'addon_products', ['category'], unique=False) - op.create_index(op.f('ix_addon_products_code'), 'addon_products', ['code'], unique=True) - op.create_index(op.f('ix_addon_products_id'), 'addon_products', ['id'], unique=False) + op.create_index(op.f("ix_addon_products_category"), "addon_products", ["category"], unique=False) + op.create_index(op.f("ix_addon_products_code"), "addon_products", ["code"], unique=True) + op.create_index(op.f("ix_addon_products_id"), "addon_products", ["id"], unique=False) # billing_history - Invoice and payment history - op.create_table('billing_history', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('stripe_invoice_id', sa.String(length=100), nullable=True), - sa.Column('stripe_payment_intent_id', sa.String(length=100), nullable=True), - sa.Column('invoice_number', sa.String(length=50), nullable=True), - sa.Column('invoice_date', sa.DateTime(timezone=True), nullable=False), - sa.Column('due_date', sa.DateTime(timezone=True), nullable=True), - sa.Column('subtotal_cents', sa.Integer(), nullable=False), - sa.Column('tax_cents', sa.Integer(), nullable=False), - sa.Column('total_cents', sa.Integer(), nullable=False), - sa.Column('amount_paid_cents', sa.Integer(), nullable=False), - sa.Column('currency', sa.String(length=3), nullable=False), - sa.Column('status', sa.String(length=20), nullable=False), - sa.Column('invoice_pdf_url', sa.String(length=500), nullable=True), - sa.Column('hosted_invoice_url', sa.String(length=500), nullable=True), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('line_items', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("billing_history", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("stripe_invoice_id", sa.String(length=100), nullable=True), + sa.Column("stripe_payment_intent_id", sa.String(length=100), nullable=True), + sa.Column("invoice_number", sa.String(length=50), nullable=True), + sa.Column("invoice_date", sa.DateTime(timezone=True), nullable=False), + sa.Column("due_date", sa.DateTime(timezone=True), nullable=True), + sa.Column("subtotal_cents", sa.Integer(), nullable=False), + sa.Column("tax_cents", sa.Integer(), nullable=False), + sa.Column("total_cents", sa.Integer(), nullable=False), + sa.Column("amount_paid_cents", sa.Integer(), nullable=False), + sa.Column("currency", sa.String(length=3), nullable=False), + sa.Column("status", sa.String(length=20), nullable=False), + sa.Column("invoice_pdf_url", sa.String(length=500), nullable=True), + sa.Column("hosted_invoice_url", sa.String(length=500), nullable=True), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("line_items", sa.JSON(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_billing_status', 'billing_history', ['vendor_id', 'status'], unique=False) - op.create_index('idx_billing_vendor_date', 'billing_history', ['vendor_id', 'invoice_date'], unique=False) - op.create_index(op.f('ix_billing_history_id'), 'billing_history', ['id'], unique=False) - op.create_index(op.f('ix_billing_history_status'), 'billing_history', ['status'], unique=False) - op.create_index(op.f('ix_billing_history_stripe_invoice_id'), 'billing_history', ['stripe_invoice_id'], unique=True) - op.create_index(op.f('ix_billing_history_vendor_id'), 'billing_history', ['vendor_id'], unique=False) + op.create_index("idx_billing_status", "billing_history", ["vendor_id", "status"], unique=False) + op.create_index("idx_billing_vendor_date", "billing_history", ["vendor_id", "invoice_date"], unique=False) + op.create_index(op.f("ix_billing_history_id"), "billing_history", ["id"], unique=False) + op.create_index(op.f("ix_billing_history_status"), "billing_history", ["status"], unique=False) + op.create_index(op.f("ix_billing_history_stripe_invoice_id"), "billing_history", ["stripe_invoice_id"], unique=True) + op.create_index(op.f("ix_billing_history_vendor_id"), "billing_history", ["vendor_id"], unique=False) # vendor_addons - Add-ons purchased by vendor - op.create_table('vendor_addons', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('addon_product_id', sa.Integer(), nullable=False), - sa.Column('status', sa.String(length=20), nullable=False), - sa.Column('domain_name', sa.String(length=255), nullable=True), - sa.Column('quantity', sa.Integer(), nullable=False), - sa.Column('stripe_subscription_item_id', sa.String(length=100), nullable=True), - sa.Column('period_start', sa.DateTime(timezone=True), nullable=True), - sa.Column('period_end', sa.DateTime(timezone=True), nullable=True), - sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['addon_product_id'], ['addon_products.id'], ), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("vendor_addons", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("addon_product_id", sa.Integer(), nullable=False), + sa.Column("status", sa.String(length=20), nullable=False), + sa.Column("domain_name", sa.String(length=255), nullable=True), + sa.Column("quantity", sa.Integer(), nullable=False), + sa.Column("stripe_subscription_item_id", sa.String(length=100), nullable=True), + sa.Column("period_start", sa.DateTime(timezone=True), nullable=True), + sa.Column("period_end", sa.DateTime(timezone=True), nullable=True), + sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["addon_product_id"], ["addon_products.id"], ), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_vendor_addon_product', 'vendor_addons', ['vendor_id', 'addon_product_id'], unique=False) - op.create_index('idx_vendor_addon_status', 'vendor_addons', ['vendor_id', 'status'], unique=False) - op.create_index(op.f('ix_vendor_addons_addon_product_id'), 'vendor_addons', ['addon_product_id'], unique=False) - op.create_index(op.f('ix_vendor_addons_domain_name'), 'vendor_addons', ['domain_name'], unique=False) - op.create_index(op.f('ix_vendor_addons_id'), 'vendor_addons', ['id'], unique=False) - op.create_index(op.f('ix_vendor_addons_status'), 'vendor_addons', ['status'], unique=False) - op.create_index(op.f('ix_vendor_addons_vendor_id'), 'vendor_addons', ['vendor_id'], unique=False) + op.create_index("idx_vendor_addon_product", "vendor_addons", ["vendor_id", "addon_product_id"], unique=False) + op.create_index("idx_vendor_addon_status", "vendor_addons", ["vendor_id", "status"], unique=False) + op.create_index(op.f("ix_vendor_addons_addon_product_id"), "vendor_addons", ["addon_product_id"], unique=False) + op.create_index(op.f("ix_vendor_addons_domain_name"), "vendor_addons", ["domain_name"], unique=False) + op.create_index(op.f("ix_vendor_addons_id"), "vendor_addons", ["id"], unique=False) + op.create_index(op.f("ix_vendor_addons_status"), "vendor_addons", ["status"], unique=False) + op.create_index(op.f("ix_vendor_addons_vendor_id"), "vendor_addons", ["vendor_id"], unique=False) # stripe_webhook_events - Webhook idempotency tracking - op.create_table('stripe_webhook_events', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('event_id', sa.String(length=100), nullable=False), - sa.Column('event_type', sa.String(length=100), nullable=False), - sa.Column('status', sa.String(length=20), nullable=False), - sa.Column('processed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('payload_encrypted', sa.Text(), nullable=True), - sa.Column('vendor_id', sa.Integer(), nullable=True), - sa.Column('subscription_id', sa.Integer(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['subscription_id'], ['vendor_subscriptions.id'], ), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("stripe_webhook_events", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("event_id", sa.String(length=100), nullable=False), + sa.Column("event_type", sa.String(length=100), nullable=False), + sa.Column("status", sa.String(length=20), nullable=False), + sa.Column("processed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("payload_encrypted", sa.Text(), nullable=True), + sa.Column("vendor_id", sa.Integer(), nullable=True), + sa.Column("subscription_id", sa.Integer(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["subscription_id"], ["vendor_subscriptions.id"], ), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_webhook_event_type_status', 'stripe_webhook_events', ['event_type', 'status'], unique=False) - op.create_index(op.f('ix_stripe_webhook_events_event_id'), 'stripe_webhook_events', ['event_id'], unique=True) - op.create_index(op.f('ix_stripe_webhook_events_event_type'), 'stripe_webhook_events', ['event_type'], unique=False) - op.create_index(op.f('ix_stripe_webhook_events_id'), 'stripe_webhook_events', ['id'], unique=False) - op.create_index(op.f('ix_stripe_webhook_events_status'), 'stripe_webhook_events', ['status'], unique=False) - op.create_index(op.f('ix_stripe_webhook_events_subscription_id'), 'stripe_webhook_events', ['subscription_id'], unique=False) - op.create_index(op.f('ix_stripe_webhook_events_vendor_id'), 'stripe_webhook_events', ['vendor_id'], unique=False) + op.create_index("idx_webhook_event_type_status", "stripe_webhook_events", ["event_type", "status"], unique=False) + op.create_index(op.f("ix_stripe_webhook_events_event_id"), "stripe_webhook_events", ["event_id"], unique=True) + op.create_index(op.f("ix_stripe_webhook_events_event_type"), "stripe_webhook_events", ["event_type"], unique=False) + op.create_index(op.f("ix_stripe_webhook_events_id"), "stripe_webhook_events", ["id"], unique=False) + op.create_index(op.f("ix_stripe_webhook_events_status"), "stripe_webhook_events", ["status"], unique=False) + op.create_index(op.f("ix_stripe_webhook_events_subscription_id"), "stripe_webhook_events", ["subscription_id"], unique=False) + op.create_index(op.f("ix_stripe_webhook_events_vendor_id"), "stripe_webhook_events", ["vendor_id"], unique=False) # ========================================================================= # Add new columns to vendor_subscriptions # ========================================================================= - op.add_column('vendor_subscriptions', sa.Column('stripe_price_id', sa.String(length=100), nullable=True)) - op.add_column('vendor_subscriptions', sa.Column('stripe_payment_method_id', sa.String(length=100), nullable=True)) - op.add_column('vendor_subscriptions', sa.Column('proration_behavior', sa.String(length=50), nullable=True)) - op.add_column('vendor_subscriptions', sa.Column('scheduled_tier_change', sa.String(length=30), nullable=True)) - op.add_column('vendor_subscriptions', sa.Column('scheduled_change_at', sa.DateTime(timezone=True), nullable=True)) - op.add_column('vendor_subscriptions', sa.Column('payment_retry_count', sa.Integer(), server_default='0', nullable=False)) - op.add_column('vendor_subscriptions', sa.Column('last_payment_error', sa.Text(), nullable=True)) + op.add_column("vendor_subscriptions", sa.Column("stripe_price_id", sa.String(length=100), nullable=True)) + op.add_column("vendor_subscriptions", sa.Column("stripe_payment_method_id", sa.String(length=100), nullable=True)) + op.add_column("vendor_subscriptions", sa.Column("proration_behavior", sa.String(length=50), nullable=True)) + op.add_column("vendor_subscriptions", sa.Column("scheduled_tier_change", sa.String(length=30), nullable=True)) + op.add_column("vendor_subscriptions", sa.Column("scheduled_change_at", sa.DateTime(timezone=True), nullable=True)) + op.add_column("vendor_subscriptions", sa.Column("payment_retry_count", sa.Integer(), server_default="0", nullable=False)) + op.add_column("vendor_subscriptions", sa.Column("last_payment_error", sa.Text(), nullable=True)) # ========================================================================= # Seed subscription tiers @@ -172,106 +174,106 @@ def upgrade() -> None: now = datetime.utcnow() subscription_tiers = sa.table( - 'subscription_tiers', - sa.column('code', sa.String), - sa.column('name', sa.String), - sa.column('description', sa.Text), - sa.column('price_monthly_cents', sa.Integer), - sa.column('price_annual_cents', sa.Integer), - sa.column('orders_per_month', sa.Integer), - sa.column('products_limit', sa.Integer), - sa.column('team_members', sa.Integer), - sa.column('order_history_months', sa.Integer), - sa.column('features', sa.JSON), - sa.column('display_order', sa.Integer), - sa.column('is_active', sa.Boolean), - sa.column('is_public', sa.Boolean), - sa.column('created_at', sa.DateTime), - sa.column('updated_at', sa.DateTime), + "subscription_tiers", + sa.column("code", sa.String), + sa.column("name", sa.String), + sa.column("description", sa.Text), + sa.column("price_monthly_cents", sa.Integer), + sa.column("price_annual_cents", sa.Integer), + sa.column("orders_per_month", sa.Integer), + sa.column("products_limit", sa.Integer), + sa.column("team_members", sa.Integer), + sa.column("order_history_months", sa.Integer), + sa.column("features", sa.JSON), + sa.column("display_order", sa.Integer), + sa.column("is_active", sa.Boolean), + sa.column("is_public", sa.Boolean), + sa.column("created_at", sa.DateTime), + sa.column("updated_at", sa.DateTime), ) op.bulk_insert(subscription_tiers, [ { - 'code': 'essential', - 'name': 'Essential', - 'description': 'Perfect for solo vendors getting started with Letzshop', - 'price_monthly_cents': 4900, - 'price_annual_cents': 49000, - 'orders_per_month': 100, - 'products_limit': 200, - 'team_members': 1, - 'order_history_months': 6, - 'features': ['letzshop_sync', 'inventory_basic', 'invoice_lu', 'customer_view'], - 'display_order': 1, - 'is_active': True, - 'is_public': True, - 'created_at': now, - 'updated_at': now, + "code": "essential", + "name": "Essential", + "description": "Perfect for solo vendors getting started with Letzshop", + "price_monthly_cents": 4900, + "price_annual_cents": 49000, + "orders_per_month": 100, + "products_limit": 200, + "team_members": 1, + "order_history_months": 6, + "features": ["letzshop_sync", "inventory_basic", "invoice_lu", "customer_view"], + "display_order": 1, + "is_active": True, + "is_public": True, + "created_at": now, + "updated_at": now, }, { - 'code': 'professional', - 'name': 'Professional', - 'description': 'For active multi-channel vendors shipping EU-wide', - 'price_monthly_cents': 9900, - 'price_annual_cents': 99000, - 'orders_per_month': 500, - 'products_limit': None, - 'team_members': 3, - 'order_history_months': 24, - 'features': [ - 'letzshop_sync', 'inventory_locations', 'inventory_purchase_orders', - 'invoice_lu', 'invoice_eu_vat', 'customer_view', 'customer_export' + "code": "professional", + "name": "Professional", + "description": "For active multi-channel vendors shipping EU-wide", + "price_monthly_cents": 9900, + "price_annual_cents": 99000, + "orders_per_month": 500, + "products_limit": None, + "team_members": 3, + "order_history_months": 24, + "features": [ + "letzshop_sync", "inventory_locations", "inventory_purchase_orders", + "invoice_lu", "invoice_eu_vat", "customer_view", "customer_export" ], - 'display_order': 2, - 'is_active': True, - 'is_public': True, - 'created_at': now, - 'updated_at': now, + "display_order": 2, + "is_active": True, + "is_public": True, + "created_at": now, + "updated_at": now, }, { - 'code': 'business', - 'name': 'Business', - 'description': 'For high-volume vendors with teams and data-driven operations', - 'price_monthly_cents': 19900, - 'price_annual_cents': 199000, - 'orders_per_month': 2000, - 'products_limit': None, - 'team_members': 10, - 'order_history_months': None, - 'features': [ - 'letzshop_sync', 'inventory_locations', 'inventory_purchase_orders', - 'invoice_lu', 'invoice_eu_vat', 'invoice_bulk', 'customer_view', - 'customer_export', 'analytics_dashboard', 'accounting_export', - 'api_access', 'automation_rules', 'team_roles' + "code": "business", + "name": "Business", + "description": "For high-volume vendors with teams and data-driven operations", + "price_monthly_cents": 19900, + "price_annual_cents": 199000, + "orders_per_month": 2000, + "products_limit": None, + "team_members": 10, + "order_history_months": None, + "features": [ + "letzshop_sync", "inventory_locations", "inventory_purchase_orders", + "invoice_lu", "invoice_eu_vat", "invoice_bulk", "customer_view", + "customer_export", "analytics_dashboard", "accounting_export", + "api_access", "automation_rules", "team_roles" ], - 'display_order': 3, - 'is_active': True, - 'is_public': True, - 'created_at': now, - 'updated_at': now, + "display_order": 3, + "is_active": True, + "is_public": True, + "created_at": now, + "updated_at": now, }, { - 'code': 'enterprise', - 'name': 'Enterprise', - 'description': 'Custom solutions for large operations and agencies', - 'price_monthly_cents': 39900, - 'price_annual_cents': None, - 'orders_per_month': None, - 'products_limit': None, - 'team_members': None, - 'order_history_months': None, - 'features': [ - 'letzshop_sync', 'inventory_locations', 'inventory_purchase_orders', - 'invoice_lu', 'invoice_eu_vat', 'invoice_bulk', 'customer_view', - 'customer_export', 'analytics_dashboard', 'accounting_export', - 'api_access', 'automation_rules', 'team_roles', 'white_label', - 'multi_vendor', 'custom_integrations', 'sla_guarantee', 'dedicated_support' + "code": "enterprise", + "name": "Enterprise", + "description": "Custom solutions for large operations and agencies", + "price_monthly_cents": 39900, + "price_annual_cents": None, + "orders_per_month": None, + "products_limit": None, + "team_members": None, + "order_history_months": None, + "features": [ + "letzshop_sync", "inventory_locations", "inventory_purchase_orders", + "invoice_lu", "invoice_eu_vat", "invoice_bulk", "customer_view", + "customer_export", "analytics_dashboard", "accounting_export", + "api_access", "automation_rules", "team_roles", "white_label", + "multi_vendor", "custom_integrations", "sla_guarantee", "dedicated_support" ], - 'display_order': 4, - 'is_active': True, - 'is_public': False, - 'created_at': now, - 'updated_at': now, + "display_order": 4, + "is_active": True, + "is_public": False, + "created_at": now, + "updated_at": now, }, ]) @@ -279,141 +281,141 @@ def upgrade() -> None: # Seed add-on products # ========================================================================= addon_products = sa.table( - 'addon_products', - sa.column('code', sa.String), - sa.column('name', sa.String), - sa.column('description', sa.Text), - sa.column('category', sa.String), - sa.column('price_cents', sa.Integer), - sa.column('billing_period', sa.String), - sa.column('quantity_unit', sa.String), - sa.column('quantity_value', sa.Integer), - sa.column('display_order', sa.Integer), - sa.column('is_active', sa.Boolean), - sa.column('created_at', sa.DateTime), - sa.column('updated_at', sa.DateTime), + "addon_products", + sa.column("code", sa.String), + sa.column("name", sa.String), + sa.column("description", sa.Text), + sa.column("category", sa.String), + sa.column("price_cents", sa.Integer), + sa.column("billing_period", sa.String), + sa.column("quantity_unit", sa.String), + sa.column("quantity_value", sa.Integer), + sa.column("display_order", sa.Integer), + sa.column("is_active", sa.Boolean), + sa.column("created_at", sa.DateTime), + sa.column("updated_at", sa.DateTime), ) op.bulk_insert(addon_products, [ { - 'code': 'domain', - 'name': 'Custom Domain', - 'description': 'Connect your own domain with SSL certificate included', - 'category': 'domain', - 'price_cents': 1500, - 'billing_period': 'annual', - 'quantity_unit': None, - 'quantity_value': None, - 'display_order': 1, - 'is_active': True, - 'created_at': now, - 'updated_at': now, + "code": "domain", + "name": "Custom Domain", + "description": "Connect your own domain with SSL certificate included", + "category": "domain", + "price_cents": 1500, + "billing_period": "annual", + "quantity_unit": None, + "quantity_value": None, + "display_order": 1, + "is_active": True, + "created_at": now, + "updated_at": now, }, { - 'code': 'email_5', - 'name': '5 Email Addresses', - 'description': 'Professional email addresses on your domain', - 'category': 'email', - 'price_cents': 500, - 'billing_period': 'monthly', - 'quantity_unit': 'emails', - 'quantity_value': 5, - 'display_order': 2, - 'is_active': True, - 'created_at': now, - 'updated_at': now, + "code": "email_5", + "name": "5 Email Addresses", + "description": "Professional email addresses on your domain", + "category": "email", + "price_cents": 500, + "billing_period": "monthly", + "quantity_unit": "emails", + "quantity_value": 5, + "display_order": 2, + "is_active": True, + "created_at": now, + "updated_at": now, }, { - 'code': 'email_10', - 'name': '10 Email Addresses', - 'description': 'Professional email addresses on your domain', - 'category': 'email', - 'price_cents': 900, - 'billing_period': 'monthly', - 'quantity_unit': 'emails', - 'quantity_value': 10, - 'display_order': 3, - 'is_active': True, - 'created_at': now, - 'updated_at': now, + "code": "email_10", + "name": "10 Email Addresses", + "description": "Professional email addresses on your domain", + "category": "email", + "price_cents": 900, + "billing_period": "monthly", + "quantity_unit": "emails", + "quantity_value": 10, + "display_order": 3, + "is_active": True, + "created_at": now, + "updated_at": now, }, { - 'code': 'email_25', - 'name': '25 Email Addresses', - 'description': 'Professional email addresses on your domain', - 'category': 'email', - 'price_cents': 1900, - 'billing_period': 'monthly', - 'quantity_unit': 'emails', - 'quantity_value': 25, - 'display_order': 4, - 'is_active': True, - 'created_at': now, - 'updated_at': now, + "code": "email_25", + "name": "25 Email Addresses", + "description": "Professional email addresses on your domain", + "category": "email", + "price_cents": 1900, + "billing_period": "monthly", + "quantity_unit": "emails", + "quantity_value": 25, + "display_order": 4, + "is_active": True, + "created_at": now, + "updated_at": now, }, { - 'code': 'storage_10gb', - 'name': 'Additional Storage (10GB)', - 'description': 'Extra storage for product images and files', - 'category': 'storage', - 'price_cents': 500, - 'billing_period': 'monthly', - 'quantity_unit': 'GB', - 'quantity_value': 10, - 'display_order': 5, - 'is_active': True, - 'created_at': now, - 'updated_at': now, + "code": "storage_10gb", + "name": "Additional Storage (10GB)", + "description": "Extra storage for product images and files", + "category": "storage", + "price_cents": 500, + "billing_period": "monthly", + "quantity_unit": "GB", + "quantity_value": 10, + "display_order": 5, + "is_active": True, + "created_at": now, + "updated_at": now, }, ]) def downgrade() -> None: # Remove new columns from vendor_subscriptions - op.drop_column('vendor_subscriptions', 'last_payment_error') - op.drop_column('vendor_subscriptions', 'payment_retry_count') - op.drop_column('vendor_subscriptions', 'scheduled_change_at') - op.drop_column('vendor_subscriptions', 'scheduled_tier_change') - op.drop_column('vendor_subscriptions', 'proration_behavior') - op.drop_column('vendor_subscriptions', 'stripe_payment_method_id') - op.drop_column('vendor_subscriptions', 'stripe_price_id') + op.drop_column("vendor_subscriptions", "last_payment_error") + op.drop_column("vendor_subscriptions", "payment_retry_count") + op.drop_column("vendor_subscriptions", "scheduled_change_at") + op.drop_column("vendor_subscriptions", "scheduled_tier_change") + op.drop_column("vendor_subscriptions", "proration_behavior") + op.drop_column("vendor_subscriptions", "stripe_payment_method_id") + op.drop_column("vendor_subscriptions", "stripe_price_id") # Drop stripe_webhook_events - op.drop_index(op.f('ix_stripe_webhook_events_vendor_id'), table_name='stripe_webhook_events') - op.drop_index(op.f('ix_stripe_webhook_events_subscription_id'), table_name='stripe_webhook_events') - op.drop_index(op.f('ix_stripe_webhook_events_status'), table_name='stripe_webhook_events') - op.drop_index(op.f('ix_stripe_webhook_events_id'), table_name='stripe_webhook_events') - op.drop_index(op.f('ix_stripe_webhook_events_event_type'), table_name='stripe_webhook_events') - op.drop_index(op.f('ix_stripe_webhook_events_event_id'), table_name='stripe_webhook_events') - op.drop_index('idx_webhook_event_type_status', table_name='stripe_webhook_events') - op.drop_table('stripe_webhook_events') + op.drop_index(op.f("ix_stripe_webhook_events_vendor_id"), table_name="stripe_webhook_events") + op.drop_index(op.f("ix_stripe_webhook_events_subscription_id"), table_name="stripe_webhook_events") + op.drop_index(op.f("ix_stripe_webhook_events_status"), table_name="stripe_webhook_events") + op.drop_index(op.f("ix_stripe_webhook_events_id"), table_name="stripe_webhook_events") + op.drop_index(op.f("ix_stripe_webhook_events_event_type"), table_name="stripe_webhook_events") + op.drop_index(op.f("ix_stripe_webhook_events_event_id"), table_name="stripe_webhook_events") + op.drop_index("idx_webhook_event_type_status", table_name="stripe_webhook_events") + op.drop_table("stripe_webhook_events") # Drop vendor_addons - op.drop_index(op.f('ix_vendor_addons_vendor_id'), table_name='vendor_addons') - op.drop_index(op.f('ix_vendor_addons_status'), table_name='vendor_addons') - op.drop_index(op.f('ix_vendor_addons_id'), table_name='vendor_addons') - op.drop_index(op.f('ix_vendor_addons_domain_name'), table_name='vendor_addons') - op.drop_index(op.f('ix_vendor_addons_addon_product_id'), table_name='vendor_addons') - op.drop_index('idx_vendor_addon_status', table_name='vendor_addons') - op.drop_index('idx_vendor_addon_product', table_name='vendor_addons') - op.drop_table('vendor_addons') + op.drop_index(op.f("ix_vendor_addons_vendor_id"), table_name="vendor_addons") + op.drop_index(op.f("ix_vendor_addons_status"), table_name="vendor_addons") + op.drop_index(op.f("ix_vendor_addons_id"), table_name="vendor_addons") + op.drop_index(op.f("ix_vendor_addons_domain_name"), table_name="vendor_addons") + op.drop_index(op.f("ix_vendor_addons_addon_product_id"), table_name="vendor_addons") + op.drop_index("idx_vendor_addon_status", table_name="vendor_addons") + op.drop_index("idx_vendor_addon_product", table_name="vendor_addons") + op.drop_table("vendor_addons") # Drop billing_history - op.drop_index(op.f('ix_billing_history_vendor_id'), table_name='billing_history') - op.drop_index(op.f('ix_billing_history_stripe_invoice_id'), table_name='billing_history') - op.drop_index(op.f('ix_billing_history_status'), table_name='billing_history') - op.drop_index(op.f('ix_billing_history_id'), table_name='billing_history') - op.drop_index('idx_billing_vendor_date', table_name='billing_history') - op.drop_index('idx_billing_status', table_name='billing_history') - op.drop_table('billing_history') + op.drop_index(op.f("ix_billing_history_vendor_id"), table_name="billing_history") + op.drop_index(op.f("ix_billing_history_stripe_invoice_id"), table_name="billing_history") + op.drop_index(op.f("ix_billing_history_status"), table_name="billing_history") + op.drop_index(op.f("ix_billing_history_id"), table_name="billing_history") + op.drop_index("idx_billing_vendor_date", table_name="billing_history") + op.drop_index("idx_billing_status", table_name="billing_history") + op.drop_table("billing_history") # Drop addon_products - op.drop_index(op.f('ix_addon_products_id'), table_name='addon_products') - op.drop_index(op.f('ix_addon_products_code'), table_name='addon_products') - op.drop_index(op.f('ix_addon_products_category'), table_name='addon_products') - op.drop_table('addon_products') + op.drop_index(op.f("ix_addon_products_id"), table_name="addon_products") + op.drop_index(op.f("ix_addon_products_code"), table_name="addon_products") + op.drop_index(op.f("ix_addon_products_category"), table_name="addon_products") + op.drop_table("addon_products") # Drop subscription_tiers - op.drop_index(op.f('ix_subscription_tiers_id'), table_name='subscription_tiers') - op.drop_index(op.f('ix_subscription_tiers_code'), table_name='subscription_tiers') - op.drop_table('subscription_tiers') + op.drop_index(op.f("ix_subscription_tiers_id"), table_name="subscription_tiers") + op.drop_index(op.f("ix_subscription_tiers_code"), table_name="subscription_tiers") + op.drop_table("subscription_tiers") diff --git a/alembic/versions_backup/404b3e2d2865_add_letzshop_vendor_fields_and_trial_.py b/alembic/versions_backup/404b3e2d2865_add_letzshop_vendor_fields_and_trial_.py index 6fa0242f..e57ecdb0 100644 --- a/alembic/versions_backup/404b3e2d2865_add_letzshop_vendor_fields_and_trial_.py +++ b/alembic/versions_backup/404b3e2d2865_add_letzshop_vendor_fields_and_trial_.py @@ -9,36 +9,36 @@ Adds: - vendors.letzshop_vendor_slug - Letzshop shop URL slug - vendor_subscriptions.card_collected_at - Track when card was collected for trial """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '404b3e2d2865' -down_revision: Union[str, None] = 'l0a1b2c3d4e5' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "404b3e2d2865" +down_revision: str | None = "l0a1b2c3d4e5" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add Letzshop vendor identity fields to vendors table - op.add_column('vendors', sa.Column('letzshop_vendor_id', sa.String(length=100), nullable=True)) - op.add_column('vendors', sa.Column('letzshop_vendor_slug', sa.String(length=200), nullable=True)) - op.create_index(op.f('ix_vendors_letzshop_vendor_id'), 'vendors', ['letzshop_vendor_id'], unique=True) - op.create_index(op.f('ix_vendors_letzshop_vendor_slug'), 'vendors', ['letzshop_vendor_slug'], unique=False) + op.add_column("vendors", sa.Column("letzshop_vendor_id", sa.String(length=100), nullable=True)) + op.add_column("vendors", sa.Column("letzshop_vendor_slug", sa.String(length=200), nullable=True)) + op.create_index(op.f("ix_vendors_letzshop_vendor_id"), "vendors", ["letzshop_vendor_id"], unique=True) + op.create_index(op.f("ix_vendors_letzshop_vendor_slug"), "vendors", ["letzshop_vendor_slug"], unique=False) # Add card collection tracking to vendor_subscriptions - op.add_column('vendor_subscriptions', sa.Column('card_collected_at', sa.DateTime(timezone=True), nullable=True)) + op.add_column("vendor_subscriptions", sa.Column("card_collected_at", sa.DateTime(timezone=True), nullable=True)) def downgrade() -> None: # Remove card collection tracking from vendor_subscriptions - op.drop_column('vendor_subscriptions', 'card_collected_at') + op.drop_column("vendor_subscriptions", "card_collected_at") # Remove Letzshop vendor identity fields from vendors - op.drop_index(op.f('ix_vendors_letzshop_vendor_slug'), table_name='vendors') - op.drop_index(op.f('ix_vendors_letzshop_vendor_id'), table_name='vendors') - op.drop_column('vendors', 'letzshop_vendor_slug') - op.drop_column('vendors', 'letzshop_vendor_id') + op.drop_index(op.f("ix_vendors_letzshop_vendor_slug"), table_name="vendors") + op.drop_index(op.f("ix_vendors_letzshop_vendor_id"), table_name="vendors") + op.drop_column("vendors", "letzshop_vendor_slug") + op.drop_column("vendors", "letzshop_vendor_id") diff --git a/alembic/versions_backup/4951b2e50581_initial_migration_all_tables.py b/alembic/versions_backup/4951b2e50581_initial_migration_all_tables.py index bdf730a1..2d74e741 100644 --- a/alembic/versions_backup/4951b2e50581_initial_migration_all_tables.py +++ b/alembic/versions_backup/4951b2e50581_initial_migration_all_tables.py @@ -6,7 +6,7 @@ Create Date: 2025-10-27 22:28:33.137564 """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa @@ -14,9 +14,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "4951b2e50581" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = None +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/55b92e155566_add_order_tracking_fields.py b/alembic/versions_backup/55b92e155566_add_order_tracking_fields.py index 8e80682d..031afa87 100644 --- a/alembic/versions_backup/55b92e155566_add_order_tracking_fields.py +++ b/alembic/versions_backup/55b92e155566_add_order_tracking_fields.py @@ -5,27 +5,27 @@ Revises: d2e3f4a5b6c7 Create Date: 2025-12-20 18:07:51.144136 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '55b92e155566' -down_revision: Union[str, None] = 'd2e3f4a5b6c7' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "55b92e155566" +down_revision: str | None = "d2e3f4a5b6c7" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add new tracking fields to orders table - op.add_column('orders', sa.Column('tracking_url', sa.String(length=500), nullable=True)) - op.add_column('orders', sa.Column('shipment_number', sa.String(length=100), nullable=True)) - op.add_column('orders', sa.Column('shipping_carrier', sa.String(length=50), nullable=True)) + op.add_column("orders", sa.Column("tracking_url", sa.String(length=500), nullable=True)) + op.add_column("orders", sa.Column("shipment_number", sa.String(length=100), nullable=True)) + op.add_column("orders", sa.Column("shipping_carrier", sa.String(length=50), nullable=True)) def downgrade() -> None: - op.drop_column('orders', 'shipping_carrier') - op.drop_column('orders', 'shipment_number') - op.drop_column('orders', 'tracking_url') + op.drop_column("orders", "shipping_carrier") + op.drop_column("orders", "shipment_number") + op.drop_column("orders", "tracking_url") diff --git a/alembic/versions_backup/5818330181a5_make_vendor_owner_user_id_nullable_for_.py b/alembic/versions_backup/5818330181a5_make_vendor_owner_user_id_nullable_for_.py index cb3a74bc..fb360e18 100644 --- a/alembic/versions_backup/5818330181a5_make_vendor_owner_user_id_nullable_for_.py +++ b/alembic/versions_backup/5818330181a5_make_vendor_owner_user_id_nullable_for_.py @@ -5,17 +5,17 @@ Revises: d0325d7c0f25 Create Date: 2025-12-01 20:30:06.158027 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '5818330181a5' -down_revision: Union[str, None] = 'd0325d7c0f25' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "5818330181a5" +down_revision: str | None = "d0325d7c0f25" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: @@ -29,8 +29,8 @@ def upgrade() -> None: This allows one company owner to manage multiple vendor brands. """ # Use batch operations for SQLite compatibility - with op.batch_alter_table('vendors', schema=None) as batch_op: - batch_op.alter_column('owner_user_id', + with op.batch_alter_table("vendors", schema=None) as batch_op: + batch_op.alter_column("owner_user_id", existing_type=sa.INTEGER(), nullable=True) @@ -42,7 +42,7 @@ def downgrade() -> None: WARNING: This will fail if there are vendors without owner_user_id! """ # Use batch operations for SQLite compatibility - with op.batch_alter_table('vendors', schema=None) as batch_op: - batch_op.alter_column('owner_user_id', + with op.batch_alter_table("vendors", schema=None) as batch_op: + batch_op.alter_column("owner_user_id", existing_type=sa.INTEGER(), nullable=False) diff --git a/alembic/versions_backup/72aa309d4007_ensure_content_pages_table_with_all_.py b/alembic/versions_backup/72aa309d4007_ensure_content_pages_table_with_all_.py index 24a6d43d..9cb8ff16 100644 --- a/alembic/versions_backup/72aa309d4007_ensure_content_pages_table_with_all_.py +++ b/alembic/versions_backup/72aa309d4007_ensure_content_pages_table_with_all_.py @@ -6,7 +6,7 @@ Create Date: 2025-11-22 15:16:13.213613 """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa @@ -14,9 +14,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "72aa309d4007" -down_revision: Union[str, None] = "fef1d20ce8b4" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "fef1d20ce8b4" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/7a7ce92593d5_add_architecture_quality_tracking_tables.py b/alembic/versions_backup/7a7ce92593d5_add_architecture_quality_tracking_tables.py index f5e60325..46bf1b64 100644 --- a/alembic/versions_backup/7a7ce92593d5_add_architecture_quality_tracking_tables.py +++ b/alembic/versions_backup/7a7ce92593d5_add_architecture_quality_tracking_tables.py @@ -6,18 +6,17 @@ Create Date: 2025-11-28 09:21:16.545203 """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa -from sqlalchemy.dialects import postgresql from alembic import op # revision identifiers, used by Alembic. revision: str = "7a7ce92593d5" -down_revision: Union[str, None] = "a2064e1dfcd4" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "a2064e1dfcd4" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/82ea1b4a3ccb_add_test_run_tables.py b/alembic/versions_backup/82ea1b4a3ccb_add_test_run_tables.py index 2c9f6986..871e4e8f 100644 --- a/alembic/versions_backup/82ea1b4a3ccb_add_test_run_tables.py +++ b/alembic/versions_backup/82ea1b4a3ccb_add_test_run_tables.py @@ -5,99 +5,99 @@ Revises: b4c5d6e7f8a9 Create Date: 2025-12-12 22:48:09.501172 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '82ea1b4a3ccb' -down_revision: Union[str, None] = 'b4c5d6e7f8a9' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "82ea1b4a3ccb" +down_revision: str | None = "b4c5d6e7f8a9" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Create test_collections table - op.create_table('test_collections', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('total_tests', sa.Integer(), nullable=True), - sa.Column('total_files', sa.Integer(), nullable=True), - sa.Column('total_classes', sa.Integer(), nullable=True), - sa.Column('unit_tests', sa.Integer(), nullable=True), - sa.Column('integration_tests', sa.Integer(), nullable=True), - sa.Column('performance_tests', sa.Integer(), nullable=True), - sa.Column('system_tests', sa.Integer(), nullable=True), - sa.Column('test_files', sa.JSON(), nullable=True), - sa.Column('collected_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table("test_collections", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("total_tests", sa.Integer(), nullable=True), + sa.Column("total_files", sa.Integer(), nullable=True), + sa.Column("total_classes", sa.Integer(), nullable=True), + sa.Column("unit_tests", sa.Integer(), nullable=True), + sa.Column("integration_tests", sa.Integer(), nullable=True), + sa.Column("performance_tests", sa.Integer(), nullable=True), + sa.Column("system_tests", sa.Integer(), nullable=True), + sa.Column("test_files", sa.JSON(), nullable=True), + sa.Column("collected_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_test_collections_id'), 'test_collections', ['id'], unique=False) + op.create_index(op.f("ix_test_collections_id"), "test_collections", ["id"], unique=False) # Create test_runs table - op.create_table('test_runs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('timestamp', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('total_tests', sa.Integer(), nullable=True), - sa.Column('passed', sa.Integer(), nullable=True), - sa.Column('failed', sa.Integer(), nullable=True), - sa.Column('errors', sa.Integer(), nullable=True), - sa.Column('skipped', sa.Integer(), nullable=True), - sa.Column('xfailed', sa.Integer(), nullable=True), - sa.Column('xpassed', sa.Integer(), nullable=True), - sa.Column('coverage_percent', sa.Float(), nullable=True), - sa.Column('duration_seconds', sa.Float(), nullable=True), - sa.Column('triggered_by', sa.String(length=100), nullable=True), - sa.Column('git_commit_hash', sa.String(length=40), nullable=True), - sa.Column('git_branch', sa.String(length=100), nullable=True), - sa.Column('test_path', sa.String(length=500), nullable=True), - sa.Column('pytest_args', sa.String(length=500), nullable=True), - sa.Column('status', sa.String(length=20), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table("test_runs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("timestamp", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("total_tests", sa.Integer(), nullable=True), + sa.Column("passed", sa.Integer(), nullable=True), + sa.Column("failed", sa.Integer(), nullable=True), + sa.Column("errors", sa.Integer(), nullable=True), + sa.Column("skipped", sa.Integer(), nullable=True), + sa.Column("xfailed", sa.Integer(), nullable=True), + sa.Column("xpassed", sa.Integer(), nullable=True), + sa.Column("coverage_percent", sa.Float(), nullable=True), + sa.Column("duration_seconds", sa.Float(), nullable=True), + sa.Column("triggered_by", sa.String(length=100), nullable=True), + sa.Column("git_commit_hash", sa.String(length=40), nullable=True), + sa.Column("git_branch", sa.String(length=100), nullable=True), + sa.Column("test_path", sa.String(length=500), nullable=True), + sa.Column("pytest_args", sa.String(length=500), nullable=True), + sa.Column("status", sa.String(length=20), nullable=True), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_test_runs_id'), 'test_runs', ['id'], unique=False) - op.create_index(op.f('ix_test_runs_status'), 'test_runs', ['status'], unique=False) - op.create_index(op.f('ix_test_runs_timestamp'), 'test_runs', ['timestamp'], unique=False) + op.create_index(op.f("ix_test_runs_id"), "test_runs", ["id"], unique=False) + op.create_index(op.f("ix_test_runs_status"), "test_runs", ["status"], unique=False) + op.create_index(op.f("ix_test_runs_timestamp"), "test_runs", ["timestamp"], unique=False) # Create test_results table - op.create_table('test_results', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('run_id', sa.Integer(), nullable=False), - sa.Column('node_id', sa.String(length=500), nullable=False), - sa.Column('test_name', sa.String(length=200), nullable=False), - sa.Column('test_file', sa.String(length=300), nullable=False), - sa.Column('test_class', sa.String(length=200), nullable=True), - sa.Column('outcome', sa.String(length=20), nullable=False), - sa.Column('duration_seconds', sa.Float(), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('traceback', sa.Text(), nullable=True), - sa.Column('markers', sa.JSON(), nullable=True), - sa.Column('parameters', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.ForeignKeyConstraint(['run_id'], ['test_runs.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("test_results", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("run_id", sa.Integer(), nullable=False), + sa.Column("node_id", sa.String(length=500), nullable=False), + sa.Column("test_name", sa.String(length=200), nullable=False), + sa.Column("test_file", sa.String(length=300), nullable=False), + sa.Column("test_class", sa.String(length=200), nullable=True), + sa.Column("outcome", sa.String(length=20), nullable=False), + sa.Column("duration_seconds", sa.Float(), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("traceback", sa.Text(), nullable=True), + sa.Column("markers", sa.JSON(), nullable=True), + sa.Column("parameters", sa.JSON(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.ForeignKeyConstraint(["run_id"], ["test_runs.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_test_results_id'), 'test_results', ['id'], unique=False) - op.create_index(op.f('ix_test_results_node_id'), 'test_results', ['node_id'], unique=False) - op.create_index(op.f('ix_test_results_outcome'), 'test_results', ['outcome'], unique=False) - op.create_index(op.f('ix_test_results_run_id'), 'test_results', ['run_id'], unique=False) + op.create_index(op.f("ix_test_results_id"), "test_results", ["id"], unique=False) + op.create_index(op.f("ix_test_results_node_id"), "test_results", ["node_id"], unique=False) + op.create_index(op.f("ix_test_results_outcome"), "test_results", ["outcome"], unique=False) + op.create_index(op.f("ix_test_results_run_id"), "test_results", ["run_id"], unique=False) def downgrade() -> None: # Drop test_results table first (has foreign key to test_runs) - op.drop_index(op.f('ix_test_results_run_id'), table_name='test_results') - op.drop_index(op.f('ix_test_results_outcome'), table_name='test_results') - op.drop_index(op.f('ix_test_results_node_id'), table_name='test_results') - op.drop_index(op.f('ix_test_results_id'), table_name='test_results') - op.drop_table('test_results') + op.drop_index(op.f("ix_test_results_run_id"), table_name="test_results") + op.drop_index(op.f("ix_test_results_outcome"), table_name="test_results") + op.drop_index(op.f("ix_test_results_node_id"), table_name="test_results") + op.drop_index(op.f("ix_test_results_id"), table_name="test_results") + op.drop_table("test_results") # Drop test_runs table - op.drop_index(op.f('ix_test_runs_timestamp'), table_name='test_runs') - op.drop_index(op.f('ix_test_runs_status'), table_name='test_runs') - op.drop_index(op.f('ix_test_runs_id'), table_name='test_runs') - op.drop_table('test_runs') + op.drop_index(op.f("ix_test_runs_timestamp"), table_name="test_runs") + op.drop_index(op.f("ix_test_runs_status"), table_name="test_runs") + op.drop_index(op.f("ix_test_runs_id"), table_name="test_runs") + op.drop_table("test_runs") # Drop test_collections table - op.drop_index(op.f('ix_test_collections_id'), table_name='test_collections') - op.drop_table('test_collections') + op.drop_index(op.f("ix_test_collections_id"), table_name="test_collections") + op.drop_table("test_collections") diff --git a/alembic/versions_backup/91d02647efae_add_marketplace_import_errors_table.py b/alembic/versions_backup/91d02647efae_add_marketplace_import_errors_table.py index 29db8b03..4fb78b25 100644 --- a/alembic/versions_backup/91d02647efae_add_marketplace_import_errors_table.py +++ b/alembic/versions_backup/91d02647efae_add_marketplace_import_errors_table.py @@ -5,40 +5,41 @@ Revises: 987b4ecfa503 Create Date: 2025-12-13 13:13:46.969503 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op + # revision identifiers, used by Alembic. -revision: str = '91d02647efae' -down_revision: Union[str, None] = '987b4ecfa503' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "91d02647efae" +down_revision: str | None = "987b4ecfa503" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Create marketplace_import_errors table to store detailed import error information - op.create_table('marketplace_import_errors', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('import_job_id', sa.Integer(), nullable=False), - sa.Column('row_number', sa.Integer(), nullable=False), - sa.Column('identifier', sa.String(), nullable=True), - sa.Column('error_type', sa.String(length=50), nullable=False), - sa.Column('error_message', sa.Text(), nullable=False), - sa.Column('row_data', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['import_job_id'], ['marketplace_import_jobs.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + op.create_table("marketplace_import_errors", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("import_job_id", sa.Integer(), nullable=False), + sa.Column("row_number", sa.Integer(), nullable=False), + sa.Column("identifier", sa.String(), nullable=True), + sa.Column("error_type", sa.String(length=50), nullable=False), + sa.Column("error_message", sa.Text(), nullable=False), + sa.Column("row_data", sa.JSON(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["import_job_id"], ["marketplace_import_jobs.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_import_error_job_id', 'marketplace_import_errors', ['import_job_id'], unique=False) - op.create_index('idx_import_error_type', 'marketplace_import_errors', ['error_type'], unique=False) - op.create_index(op.f('ix_marketplace_import_errors_id'), 'marketplace_import_errors', ['id'], unique=False) + op.create_index("idx_import_error_job_id", "marketplace_import_errors", ["import_job_id"], unique=False) + op.create_index("idx_import_error_type", "marketplace_import_errors", ["error_type"], unique=False) + op.create_index(op.f("ix_marketplace_import_errors_id"), "marketplace_import_errors", ["id"], unique=False) def downgrade() -> None: - op.drop_index(op.f('ix_marketplace_import_errors_id'), table_name='marketplace_import_errors') - op.drop_index('idx_import_error_type', table_name='marketplace_import_errors') - op.drop_index('idx_import_error_job_id', table_name='marketplace_import_errors') - op.drop_table('marketplace_import_errors') + op.drop_index(op.f("ix_marketplace_import_errors_id"), table_name="marketplace_import_errors") + op.drop_index("idx_import_error_type", table_name="marketplace_import_errors") + op.drop_index("idx_import_error_job_id", table_name="marketplace_import_errors") + op.drop_table("marketplace_import_errors") diff --git a/alembic/versions_backup/987b4ecfa503_add_letzshop_integration_tables.py b/alembic/versions_backup/987b4ecfa503_add_letzshop_integration_tables.py index 54605a38..d4388b59 100644 --- a/alembic/versions_backup/987b4ecfa503_add_letzshop_integration_tables.py +++ b/alembic/versions_backup/987b4ecfa503_add_letzshop_integration_tables.py @@ -11,169 +11,169 @@ This migration adds: - letzshop_sync_logs: Audit trail for sync operations - Adds channel fields to orders table for multi-marketplace support """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '987b4ecfa503' -down_revision: Union[str, None] = '82ea1b4a3ccb' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "987b4ecfa503" +down_revision: str | None = "82ea1b4a3ccb" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add channel fields to orders table - op.add_column('orders', sa.Column('channel', sa.String(length=50), nullable=True, server_default='direct')) - op.add_column('orders', sa.Column('external_order_id', sa.String(length=100), nullable=True)) - op.add_column('orders', sa.Column('external_channel_data', sa.JSON(), nullable=True)) - op.create_index(op.f('ix_orders_channel'), 'orders', ['channel'], unique=False) - op.create_index(op.f('ix_orders_external_order_id'), 'orders', ['external_order_id'], unique=False) + op.add_column("orders", sa.Column("channel", sa.String(length=50), nullable=True, server_default="direct")) + op.add_column("orders", sa.Column("external_order_id", sa.String(length=100), nullable=True)) + op.add_column("orders", sa.Column("external_channel_data", sa.JSON(), nullable=True)) + op.create_index(op.f("ix_orders_channel"), "orders", ["channel"], unique=False) + op.create_index(op.f("ix_orders_external_order_id"), "orders", ["external_order_id"], unique=False) # Create vendor_letzshop_credentials table - op.create_table('vendor_letzshop_credentials', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('api_key_encrypted', sa.Text(), nullable=False), - sa.Column('api_endpoint', sa.String(length=255), server_default='https://letzshop.lu/graphql', nullable=True), - sa.Column('auto_sync_enabled', sa.Boolean(), server_default='0', nullable=True), - sa.Column('sync_interval_minutes', sa.Integer(), server_default='15', nullable=True), - sa.Column('last_sync_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('last_sync_status', sa.String(length=50), nullable=True), - sa.Column('last_sync_error', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('vendor_id') + op.create_table("vendor_letzshop_credentials", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("api_key_encrypted", sa.Text(), nullable=False), + sa.Column("api_endpoint", sa.String(length=255), server_default="https://letzshop.lu/graphql", nullable=True), + sa.Column("auto_sync_enabled", sa.Boolean(), server_default="0", nullable=True), + sa.Column("sync_interval_minutes", sa.Integer(), server_default="15", nullable=True), + sa.Column("last_sync_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("last_sync_status", sa.String(length=50), nullable=True), + sa.Column("last_sync_error", sa.Text(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("vendor_id") ) - op.create_index(op.f('ix_vendor_letzshop_credentials_id'), 'vendor_letzshop_credentials', ['id'], unique=False) - op.create_index(op.f('ix_vendor_letzshop_credentials_vendor_id'), 'vendor_letzshop_credentials', ['vendor_id'], unique=True) + op.create_index(op.f("ix_vendor_letzshop_credentials_id"), "vendor_letzshop_credentials", ["id"], unique=False) + op.create_index(op.f("ix_vendor_letzshop_credentials_vendor_id"), "vendor_letzshop_credentials", ["vendor_id"], unique=True) # Create letzshop_orders table - op.create_table('letzshop_orders', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('letzshop_order_id', sa.String(length=100), nullable=False), - sa.Column('letzshop_shipment_id', sa.String(length=100), nullable=True), - sa.Column('letzshop_order_number', sa.String(length=100), nullable=True), - sa.Column('local_order_id', sa.Integer(), nullable=True), - sa.Column('letzshop_state', sa.String(length=50), nullable=True), - sa.Column('customer_email', sa.String(length=255), nullable=True), - sa.Column('customer_name', sa.String(length=255), nullable=True), - sa.Column('total_amount', sa.String(length=50), nullable=True), - sa.Column('currency', sa.String(length=10), server_default='EUR', nullable=True), - sa.Column('raw_order_data', sa.JSON(), nullable=True), - sa.Column('inventory_units', sa.JSON(), nullable=True), - sa.Column('sync_status', sa.String(length=50), server_default='pending', nullable=True), - sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('sync_error', sa.Text(), nullable=True), - sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('tracking_set_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('tracking_number', sa.String(length=100), nullable=True), - sa.Column('tracking_carrier', sa.String(length=100), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.ForeignKeyConstraint(['local_order_id'], ['orders.id'], ), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("letzshop_orders", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("letzshop_order_id", sa.String(length=100), nullable=False), + sa.Column("letzshop_shipment_id", sa.String(length=100), nullable=True), + sa.Column("letzshop_order_number", sa.String(length=100), nullable=True), + sa.Column("local_order_id", sa.Integer(), nullable=True), + sa.Column("letzshop_state", sa.String(length=50), nullable=True), + sa.Column("customer_email", sa.String(length=255), nullable=True), + sa.Column("customer_name", sa.String(length=255), nullable=True), + sa.Column("total_amount", sa.String(length=50), nullable=True), + sa.Column("currency", sa.String(length=10), server_default="EUR", nullable=True), + sa.Column("raw_order_data", sa.JSON(), nullable=True), + sa.Column("inventory_units", sa.JSON(), nullable=True), + sa.Column("sync_status", sa.String(length=50), server_default="pending", nullable=True), + sa.Column("last_synced_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("sync_error", sa.Text(), nullable=True), + sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("rejected_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("tracking_set_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("tracking_number", sa.String(length=100), nullable=True), + sa.Column("tracking_carrier", sa.String(length=100), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.ForeignKeyConstraint(["local_order_id"], ["orders.id"], ), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_letzshop_orders_id'), 'letzshop_orders', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_orders_letzshop_order_id'), 'letzshop_orders', ['letzshop_order_id'], unique=False) - op.create_index(op.f('ix_letzshop_orders_letzshop_shipment_id'), 'letzshop_orders', ['letzshop_shipment_id'], unique=False) - op.create_index(op.f('ix_letzshop_orders_vendor_id'), 'letzshop_orders', ['vendor_id'], unique=False) - op.create_index('idx_letzshop_order_vendor', 'letzshop_orders', ['vendor_id', 'letzshop_order_id'], unique=False) - op.create_index('idx_letzshop_order_state', 'letzshop_orders', ['vendor_id', 'letzshop_state'], unique=False) - op.create_index('idx_letzshop_order_sync', 'letzshop_orders', ['vendor_id', 'sync_status'], unique=False) + op.create_index(op.f("ix_letzshop_orders_id"), "letzshop_orders", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_orders_letzshop_order_id"), "letzshop_orders", ["letzshop_order_id"], unique=False) + op.create_index(op.f("ix_letzshop_orders_letzshop_shipment_id"), "letzshop_orders", ["letzshop_shipment_id"], unique=False) + op.create_index(op.f("ix_letzshop_orders_vendor_id"), "letzshop_orders", ["vendor_id"], unique=False) + op.create_index("idx_letzshop_order_vendor", "letzshop_orders", ["vendor_id", "letzshop_order_id"], unique=False) + op.create_index("idx_letzshop_order_state", "letzshop_orders", ["vendor_id", "letzshop_state"], unique=False) + op.create_index("idx_letzshop_order_sync", "letzshop_orders", ["vendor_id", "sync_status"], unique=False) # Create letzshop_fulfillment_queue table - op.create_table('letzshop_fulfillment_queue', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('letzshop_order_id', sa.Integer(), nullable=False), - sa.Column('operation', sa.String(length=50), nullable=False), - sa.Column('payload', sa.JSON(), nullable=False), - sa.Column('status', sa.String(length=50), server_default='pending', nullable=True), - sa.Column('attempts', sa.Integer(), server_default='0', nullable=True), - sa.Column('max_attempts', sa.Integer(), server_default='3', nullable=True), - sa.Column('last_attempt_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('next_retry_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('response_data', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.ForeignKeyConstraint(['letzshop_order_id'], ['letzshop_orders.id'], ), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("letzshop_fulfillment_queue", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("letzshop_order_id", sa.Integer(), nullable=False), + sa.Column("operation", sa.String(length=50), nullable=False), + sa.Column("payload", sa.JSON(), nullable=False), + sa.Column("status", sa.String(length=50), server_default="pending", nullable=True), + sa.Column("attempts", sa.Integer(), server_default="0", nullable=True), + sa.Column("max_attempts", sa.Integer(), server_default="3", nullable=True), + sa.Column("last_attempt_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("next_retry_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("response_data", sa.JSON(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.ForeignKeyConstraint(["letzshop_order_id"], ["letzshop_orders.id"], ), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_letzshop_fulfillment_queue_id'), 'letzshop_fulfillment_queue', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), 'letzshop_fulfillment_queue', ['vendor_id'], unique=False) - op.create_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue', ['status', 'vendor_id'], unique=False) - op.create_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue', ['status', 'next_retry_at'], unique=False) + op.create_index(op.f("ix_letzshop_fulfillment_queue_id"), "letzshop_fulfillment_queue", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), "letzshop_fulfillment_queue", ["vendor_id"], unique=False) + op.create_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue", ["status", "vendor_id"], unique=False) + op.create_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue", ["status", "next_retry_at"], unique=False) # Create letzshop_sync_logs table - op.create_table('letzshop_sync_logs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('operation_type', sa.String(length=50), nullable=False), - sa.Column('direction', sa.String(length=10), nullable=False), - sa.Column('status', sa.String(length=50), nullable=False), - sa.Column('records_processed', sa.Integer(), server_default='0', nullable=True), - sa.Column('records_succeeded', sa.Integer(), server_default='0', nullable=True), - sa.Column('records_failed', sa.Integer(), server_default='0', nullable=True), - sa.Column('error_details', sa.JSON(), nullable=True), - sa.Column('started_at', sa.DateTime(timezone=True), nullable=False), - sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('duration_seconds', sa.Integer(), nullable=True), - sa.Column('triggered_by', sa.String(length=100), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table("letzshop_sync_logs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("operation_type", sa.String(length=50), nullable=False), + sa.Column("direction", sa.String(length=10), nullable=False), + sa.Column("status", sa.String(length=50), nullable=False), + sa.Column("records_processed", sa.Integer(), server_default="0", nullable=True), + sa.Column("records_succeeded", sa.Integer(), server_default="0", nullable=True), + sa.Column("records_failed", sa.Integer(), server_default="0", nullable=True), + sa.Column("error_details", sa.JSON(), nullable=True), + sa.Column("started_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("duration_seconds", sa.Integer(), nullable=True), + sa.Column("triggered_by", sa.String(length=100), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_letzshop_sync_logs_id'), 'letzshop_sync_logs', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_sync_logs_vendor_id'), 'letzshop_sync_logs', ['vendor_id'], unique=False) - op.create_index('idx_sync_log_vendor_type', 'letzshop_sync_logs', ['vendor_id', 'operation_type'], unique=False) - op.create_index('idx_sync_log_vendor_date', 'letzshop_sync_logs', ['vendor_id', 'started_at'], unique=False) + op.create_index(op.f("ix_letzshop_sync_logs_id"), "letzshop_sync_logs", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_sync_logs_vendor_id"), "letzshop_sync_logs", ["vendor_id"], unique=False) + op.create_index("idx_sync_log_vendor_type", "letzshop_sync_logs", ["vendor_id", "operation_type"], unique=False) + op.create_index("idx_sync_log_vendor_date", "letzshop_sync_logs", ["vendor_id", "started_at"], unique=False) def downgrade() -> None: # Drop letzshop_sync_logs table - op.drop_index('idx_sync_log_vendor_date', table_name='letzshop_sync_logs') - op.drop_index('idx_sync_log_vendor_type', table_name='letzshop_sync_logs') - op.drop_index(op.f('ix_letzshop_sync_logs_vendor_id'), table_name='letzshop_sync_logs') - op.drop_index(op.f('ix_letzshop_sync_logs_id'), table_name='letzshop_sync_logs') - op.drop_table('letzshop_sync_logs') + op.drop_index("idx_sync_log_vendor_date", table_name="letzshop_sync_logs") + op.drop_index("idx_sync_log_vendor_type", table_name="letzshop_sync_logs") + op.drop_index(op.f("ix_letzshop_sync_logs_vendor_id"), table_name="letzshop_sync_logs") + op.drop_index(op.f("ix_letzshop_sync_logs_id"), table_name="letzshop_sync_logs") + op.drop_table("letzshop_sync_logs") # Drop letzshop_fulfillment_queue table - op.drop_index('idx_fulfillment_queue_retry', table_name='letzshop_fulfillment_queue') - op.drop_index('idx_fulfillment_queue_status', table_name='letzshop_fulfillment_queue') - op.drop_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), table_name='letzshop_fulfillment_queue') - op.drop_index(op.f('ix_letzshop_fulfillment_queue_id'), table_name='letzshop_fulfillment_queue') - op.drop_table('letzshop_fulfillment_queue') + op.drop_index("idx_fulfillment_queue_retry", table_name="letzshop_fulfillment_queue") + op.drop_index("idx_fulfillment_queue_status", table_name="letzshop_fulfillment_queue") + op.drop_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), table_name="letzshop_fulfillment_queue") + op.drop_index(op.f("ix_letzshop_fulfillment_queue_id"), table_name="letzshop_fulfillment_queue") + op.drop_table("letzshop_fulfillment_queue") # Drop letzshop_orders table - op.drop_index('idx_letzshop_order_sync', table_name='letzshop_orders') - op.drop_index('idx_letzshop_order_state', table_name='letzshop_orders') - op.drop_index('idx_letzshop_order_vendor', table_name='letzshop_orders') - op.drop_index(op.f('ix_letzshop_orders_vendor_id'), table_name='letzshop_orders') - op.drop_index(op.f('ix_letzshop_orders_letzshop_shipment_id'), table_name='letzshop_orders') - op.drop_index(op.f('ix_letzshop_orders_letzshop_order_id'), table_name='letzshop_orders') - op.drop_index(op.f('ix_letzshop_orders_id'), table_name='letzshop_orders') - op.drop_table('letzshop_orders') + op.drop_index("idx_letzshop_order_sync", table_name="letzshop_orders") + op.drop_index("idx_letzshop_order_state", table_name="letzshop_orders") + op.drop_index("idx_letzshop_order_vendor", table_name="letzshop_orders") + op.drop_index(op.f("ix_letzshop_orders_vendor_id"), table_name="letzshop_orders") + op.drop_index(op.f("ix_letzshop_orders_letzshop_shipment_id"), table_name="letzshop_orders") + op.drop_index(op.f("ix_letzshop_orders_letzshop_order_id"), table_name="letzshop_orders") + op.drop_index(op.f("ix_letzshop_orders_id"), table_name="letzshop_orders") + op.drop_table("letzshop_orders") # Drop vendor_letzshop_credentials table - op.drop_index(op.f('ix_vendor_letzshop_credentials_vendor_id'), table_name='vendor_letzshop_credentials') - op.drop_index(op.f('ix_vendor_letzshop_credentials_id'), table_name='vendor_letzshop_credentials') - op.drop_table('vendor_letzshop_credentials') + op.drop_index(op.f("ix_vendor_letzshop_credentials_vendor_id"), table_name="vendor_letzshop_credentials") + op.drop_index(op.f("ix_vendor_letzshop_credentials_id"), table_name="vendor_letzshop_credentials") + op.drop_table("vendor_letzshop_credentials") # Drop channel fields from orders table - op.drop_index(op.f('ix_orders_external_order_id'), table_name='orders') - op.drop_index(op.f('ix_orders_channel'), table_name='orders') - op.drop_column('orders', 'external_channel_data') - op.drop_column('orders', 'external_order_id') - op.drop_column('orders', 'channel') + op.drop_index(op.f("ix_orders_external_order_id"), table_name="orders") + op.drop_index(op.f("ix_orders_channel"), table_name="orders") + op.drop_column("orders", "external_channel_data") + op.drop_column("orders", "external_order_id") + op.drop_column("orders", "channel") diff --git a/alembic/versions_backup/9f3a25ea4991_remove_vendor_owner_user_id_column.py b/alembic/versions_backup/9f3a25ea4991_remove_vendor_owner_user_id_column.py index 98c9ffa6..f631f8df 100644 --- a/alembic/versions_backup/9f3a25ea4991_remove_vendor_owner_user_id_column.py +++ b/alembic/versions_backup/9f3a25ea4991_remove_vendor_owner_user_id_column.py @@ -13,17 +13,17 @@ Architecture Change: The vendor ownership is now determined via the company relationship: - vendor.company.owner_user_id contains the owner """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = '9f3a25ea4991' -down_revision: Union[str, None] = '5818330181a5' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "9f3a25ea4991" +down_revision: str | None = "5818330181a5" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: @@ -35,9 +35,9 @@ def upgrade() -> None: Note: SQLite batch mode recreates the table without the column, so we don't need to explicitly drop constraints. """ - with op.batch_alter_table('vendors', schema=None) as batch_op: + with op.batch_alter_table("vendors", schema=None) as batch_op: # Drop the column - batch mode handles constraints automatically - batch_op.drop_column('owner_user_id') + batch_op.drop_column("owner_user_id") def downgrade() -> None: @@ -48,13 +48,13 @@ def downgrade() -> None: You will need to manually populate owner_user_id from company.owner_user_id if reverting this migration. """ - with op.batch_alter_table('vendors', schema=None) as batch_op: + with op.batch_alter_table("vendors", schema=None) as batch_op: batch_op.add_column( - sa.Column('owner_user_id', sa.Integer(), nullable=True) + sa.Column("owner_user_id", sa.Integer(), nullable=True) ) batch_op.create_foreign_key( - 'vendors_owner_user_id_fkey', - 'users', - ['owner_user_id'], - ['id'] + "vendors_owner_user_id_fkey", + "users", + ["owner_user_id"], + ["id"] ) diff --git a/alembic/versions_backup/a2064e1dfcd4_add_cart_items_table.py b/alembic/versions_backup/a2064e1dfcd4_add_cart_items_table.py index 3eca3ff3..c3200a1e 100644 --- a/alembic/versions_backup/a2064e1dfcd4_add_cart_items_table.py +++ b/alembic/versions_backup/a2064e1dfcd4_add_cart_items_table.py @@ -6,7 +6,7 @@ Create Date: 2025-11-23 19:52:40.509538 """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa @@ -14,9 +14,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "a2064e1dfcd4" -down_revision: Union[str, None] = "f68d8da5315a" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "f68d8da5315a" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/a3b4c5d6e7f8_add_product_override_fields.py b/alembic/versions_backup/a3b4c5d6e7f8_add_product_override_fields.py index 2f402609..e8674cc3 100644 --- a/alembic/versions_backup/a3b4c5d6e7f8_add_product_override_fields.py +++ b/alembic/versions_backup/a3b4c5d6e7f8_add_product_override_fields.py @@ -15,7 +15,7 @@ The override pattern: NULL value means "inherit from marketplace_product". Setting a value creates a vendor-specific override. """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa @@ -23,9 +23,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "a3b4c5d6e7f8" -down_revision: Union[str, None] = "f2b3c4d5e6f7" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "f2b3c4d5e6f7" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/a9a86cef6cca_add_letzshop_order_locale_and_country_.py b/alembic/versions_backup/a9a86cef6cca_add_letzshop_order_locale_and_country_.py index 731d6ae6..9e4a0854 100644 --- a/alembic/versions_backup/a9a86cef6cca_add_letzshop_order_locale_and_country_.py +++ b/alembic/versions_backup/a9a86cef6cca_add_letzshop_order_locale_and_country_.py @@ -5,27 +5,27 @@ Revises: fcfdc02d5138 Create Date: 2025-12-17 20:55:41.477848 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'a9a86cef6cca' -down_revision: Union[str, None] = 'fcfdc02d5138' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "a9a86cef6cca" +down_revision: str | None = "fcfdc02d5138" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add new columns to letzshop_orders for customer locale and country - op.add_column('letzshop_orders', sa.Column('customer_locale', sa.String(length=10), nullable=True)) - op.add_column('letzshop_orders', sa.Column('shipping_country_iso', sa.String(length=5), nullable=True)) - op.add_column('letzshop_orders', sa.Column('billing_country_iso', sa.String(length=5), nullable=True)) + op.add_column("letzshop_orders", sa.Column("customer_locale", sa.String(length=10), nullable=True)) + op.add_column("letzshop_orders", sa.Column("shipping_country_iso", sa.String(length=5), nullable=True)) + op.add_column("letzshop_orders", sa.Column("billing_country_iso", sa.String(length=5), nullable=True)) def downgrade() -> None: - op.drop_column('letzshop_orders', 'billing_country_iso') - op.drop_column('letzshop_orders', 'shipping_country_iso') - op.drop_column('letzshop_orders', 'customer_locale') + op.drop_column("letzshop_orders", "billing_country_iso") + op.drop_column("letzshop_orders", "shipping_country_iso") + op.drop_column("letzshop_orders", "customer_locale") diff --git a/alembic/versions_backup/b412e0b49c2e_add_language_column_to_marketplace_.py b/alembic/versions_backup/b412e0b49c2e_add_language_column_to_marketplace_.py index 1c1ca208..7637af95 100644 --- a/alembic/versions_backup/b412e0b49c2e_add_language_column_to_marketplace_.py +++ b/alembic/versions_backup/b412e0b49c2e_add_language_column_to_marketplace_.py @@ -5,26 +5,26 @@ Revises: 91d02647efae Create Date: 2025-12-13 13:35:46.524893 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'b412e0b49c2e' -down_revision: Union[str, None] = '91d02647efae' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "b412e0b49c2e" +down_revision: str | None = "91d02647efae" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add language column with default value for existing rows op.add_column( - 'marketplace_import_jobs', - sa.Column('language', sa.String(length=5), nullable=False, server_default='en') + "marketplace_import_jobs", + sa.Column("language", sa.String(length=5), nullable=False, server_default="en") ) def downgrade() -> None: - op.drop_column('marketplace_import_jobs', 'language') + op.drop_column("marketplace_import_jobs", "language") diff --git a/alembic/versions_backup/b4c5d6e7f8a9_migrate_product_data_to_translations.py b/alembic/versions_backup/b4c5d6e7f8a9_migrate_product_data_to_translations.py index a597de9e..f8c54bdb 100644 --- a/alembic/versions_backup/b4c5d6e7f8a9_migrate_product_data_to_translations.py +++ b/alembic/versions_backup/b4c5d6e7f8a9_migrate_product_data_to_translations.py @@ -15,7 +15,7 @@ after migrating the data to the new structure. """ import re -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa from sqlalchemy import text @@ -24,9 +24,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "b4c5d6e7f8a9" -down_revision: Union[str, None] = "a3b4c5d6e7f8" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "a3b4c5d6e7f8" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def parse_price(price_str: str) -> float | None: diff --git a/alembic/versions_backup/ba2c0ce78396_add_show_in_legal_to_content_pages.py b/alembic/versions_backup/ba2c0ce78396_add_show_in_legal_to_content_pages.py index bd241679..1f78a8d8 100644 --- a/alembic/versions_backup/ba2c0ce78396_add_show_in_legal_to_content_pages.py +++ b/alembic/versions_backup/ba2c0ce78396_add_show_in_legal_to_content_pages.py @@ -5,17 +5,17 @@ Revises: m1b2c3d4e5f6 Create Date: 2025-12-28 20:00:24.263518 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'ba2c0ce78396' -down_revision: Union[str, None] = 'm1b2c3d4e5f6' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "ba2c0ce78396" +down_revision: str | None = "m1b2c3d4e5f6" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: @@ -25,8 +25,8 @@ def upgrade() -> None: alongside the copyright notice (e.g., Privacy Policy, Terms of Service). """ op.add_column( - 'content_pages', - sa.Column('show_in_legal', sa.Boolean(), nullable=True, default=False) + "content_pages", + sa.Column("show_in_legal", sa.Boolean(), nullable=True, default=False) ) # Set default value for existing rows (PostgreSQL uses true/false for boolean) @@ -38,4 +38,4 @@ def upgrade() -> None: def downgrade() -> None: """Remove show_in_legal column from content_pages table.""" - op.drop_column('content_pages', 'show_in_legal') + op.drop_column("content_pages", "show_in_legal") diff --git a/alembic/versions_backup/c00d2985701f_add_letzshop_credentials_carrier_fields.py b/alembic/versions_backup/c00d2985701f_add_letzshop_credentials_carrier_fields.py index c4a9bab2..d9d47aba 100644 --- a/alembic/versions_backup/c00d2985701f_add_letzshop_credentials_carrier_fields.py +++ b/alembic/versions_backup/c00d2985701f_add_letzshop_credentials_carrier_fields.py @@ -5,31 +5,31 @@ Revises: 55b92e155566 Create Date: 2025-12-20 18:49:53.432904 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'c00d2985701f' -down_revision: Union[str, None] = '55b92e155566' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "c00d2985701f" +down_revision: str | None = "55b92e155566" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add carrier settings and test mode to vendor_letzshop_credentials - op.add_column('vendor_letzshop_credentials', sa.Column('test_mode_enabled', sa.Boolean(), nullable=True, server_default='0')) - op.add_column('vendor_letzshop_credentials', sa.Column('default_carrier', sa.String(length=50), nullable=True)) - op.add_column('vendor_letzshop_credentials', sa.Column('carrier_greco_label_url', sa.String(length=500), nullable=True, server_default='https://dispatchweb.fr/Tracky/Home/')) - op.add_column('vendor_letzshop_credentials', sa.Column('carrier_colissimo_label_url', sa.String(length=500), nullable=True)) - op.add_column('vendor_letzshop_credentials', sa.Column('carrier_xpresslogistics_label_url', sa.String(length=500), nullable=True)) + op.add_column("vendor_letzshop_credentials", sa.Column("test_mode_enabled", sa.Boolean(), nullable=True, server_default="0")) + op.add_column("vendor_letzshop_credentials", sa.Column("default_carrier", sa.String(length=50), nullable=True)) + op.add_column("vendor_letzshop_credentials", sa.Column("carrier_greco_label_url", sa.String(length=500), nullable=True, server_default="https://dispatchweb.fr/Tracky/Home/")) + op.add_column("vendor_letzshop_credentials", sa.Column("carrier_colissimo_label_url", sa.String(length=500), nullable=True)) + op.add_column("vendor_letzshop_credentials", sa.Column("carrier_xpresslogistics_label_url", sa.String(length=500), nullable=True)) def downgrade() -> None: - op.drop_column('vendor_letzshop_credentials', 'carrier_xpresslogistics_label_url') - op.drop_column('vendor_letzshop_credentials', 'carrier_colissimo_label_url') - op.drop_column('vendor_letzshop_credentials', 'carrier_greco_label_url') - op.drop_column('vendor_letzshop_credentials', 'default_carrier') - op.drop_column('vendor_letzshop_credentials', 'test_mode_enabled') + op.drop_column("vendor_letzshop_credentials", "carrier_xpresslogistics_label_url") + op.drop_column("vendor_letzshop_credentials", "carrier_colissimo_label_url") + op.drop_column("vendor_letzshop_credentials", "carrier_greco_label_url") + op.drop_column("vendor_letzshop_credentials", "default_carrier") + op.drop_column("vendor_letzshop_credentials", "test_mode_enabled") diff --git a/alembic/versions_backup/c1d2e3f4a5b6_unified_order_schema.py b/alembic/versions_backup/c1d2e3f4a5b6_unified_order_schema.py index 0f70e8fb..ff44d79b 100644 --- a/alembic/versions_backup/c1d2e3f4a5b6_unified_order_schema.py +++ b/alembic/versions_backup/c1d2e3f4a5b6_unified_order_schema.py @@ -21,18 +21,18 @@ Design principles: - Customer/address data snapshotted at order time - Products must exist in catalog (enforced by FK) """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa from sqlalchemy import inspect +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'c1d2e3f4a5b6' -down_revision: Union[str, None] = '2362c2723a93' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "c1d2e3f4a5b6" +down_revision: str | None = "2362c2723a93" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def table_exists(table_name: str) -> bool: @@ -48,7 +48,7 @@ def index_exists(index_name: str, table_name: str) -> bool: inspector = inspect(bind) try: indexes = inspector.get_indexes(table_name) - return any(idx['name'] == index_name for idx in indexes) + return any(idx["name"] == index_name for idx in indexes) except Exception: return False @@ -71,382 +71,382 @@ def upgrade() -> None: # ========================================================================= # Drop letzshop_fulfillment_queue (references letzshop_orders) - if table_exists('letzshop_fulfillment_queue'): - safe_drop_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue') - safe_drop_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue') - safe_drop_index('ix_letzshop_fulfillment_queue_vendor_id', 'letzshop_fulfillment_queue') - safe_drop_index('ix_letzshop_fulfillment_queue_id', 'letzshop_fulfillment_queue') - op.drop_table('letzshop_fulfillment_queue') + if table_exists("letzshop_fulfillment_queue"): + safe_drop_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue") + safe_drop_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue") + safe_drop_index("ix_letzshop_fulfillment_queue_vendor_id", "letzshop_fulfillment_queue") + safe_drop_index("ix_letzshop_fulfillment_queue_id", "letzshop_fulfillment_queue") + op.drop_table("letzshop_fulfillment_queue") # Drop letzshop_orders table (replaced by unified orders) - if table_exists('letzshop_orders'): - safe_drop_index('idx_letzshop_order_sync', 'letzshop_orders') - safe_drop_index('idx_letzshop_order_state', 'letzshop_orders') - safe_drop_index('idx_letzshop_order_vendor', 'letzshop_orders') - safe_drop_index('ix_letzshop_orders_vendor_id', 'letzshop_orders') - safe_drop_index('ix_letzshop_orders_letzshop_shipment_id', 'letzshop_orders') - safe_drop_index('ix_letzshop_orders_letzshop_order_id', 'letzshop_orders') - safe_drop_index('ix_letzshop_orders_id', 'letzshop_orders') - op.drop_table('letzshop_orders') + if table_exists("letzshop_orders"): + safe_drop_index("idx_letzshop_order_sync", "letzshop_orders") + safe_drop_index("idx_letzshop_order_state", "letzshop_orders") + safe_drop_index("idx_letzshop_order_vendor", "letzshop_orders") + safe_drop_index("ix_letzshop_orders_vendor_id", "letzshop_orders") + safe_drop_index("ix_letzshop_orders_letzshop_shipment_id", "letzshop_orders") + safe_drop_index("ix_letzshop_orders_letzshop_order_id", "letzshop_orders") + safe_drop_index("ix_letzshop_orders_id", "letzshop_orders") + op.drop_table("letzshop_orders") # Drop order_items (references orders) - if table_exists('order_items'): - safe_drop_index('ix_order_items_id', 'order_items') - safe_drop_index('ix_order_items_order_id', 'order_items') - op.drop_table('order_items') + if table_exists("order_items"): + safe_drop_index("ix_order_items_id", "order_items") + safe_drop_index("ix_order_items_order_id", "order_items") + op.drop_table("order_items") # Drop old orders table - if table_exists('orders'): - safe_drop_index('ix_orders_external_order_id', 'orders') - safe_drop_index('ix_orders_channel', 'orders') - safe_drop_index('ix_orders_vendor_id', 'orders') - safe_drop_index('ix_orders_status', 'orders') - safe_drop_index('ix_orders_order_number', 'orders') - safe_drop_index('ix_orders_id', 'orders') - safe_drop_index('ix_orders_customer_id', 'orders') - op.drop_table('orders') + if table_exists("orders"): + safe_drop_index("ix_orders_external_order_id", "orders") + safe_drop_index("ix_orders_channel", "orders") + safe_drop_index("ix_orders_vendor_id", "orders") + safe_drop_index("ix_orders_status", "orders") + safe_drop_index("ix_orders_order_number", "orders") + safe_drop_index("ix_orders_id", "orders") + safe_drop_index("ix_orders_customer_id", "orders") + op.drop_table("orders") # ========================================================================= # Step 2: Create new unified orders table # ========================================================================= - op.create_table('orders', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('customer_id', sa.Integer(), nullable=False), - sa.Column('order_number', sa.String(length=100), nullable=False), + op.create_table("orders", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("customer_id", sa.Integer(), nullable=False), + sa.Column("order_number", sa.String(length=100), nullable=False), # Channel/Source - sa.Column('channel', sa.String(length=50), nullable=False, server_default='direct'), + sa.Column("channel", sa.String(length=50), nullable=False, server_default="direct"), # External references (for marketplace orders) - sa.Column('external_order_id', sa.String(length=100), nullable=True), - sa.Column('external_shipment_id', sa.String(length=100), nullable=True), - sa.Column('external_order_number', sa.String(length=100), nullable=True), - sa.Column('external_data', sa.JSON(), nullable=True), + sa.Column("external_order_id", sa.String(length=100), nullable=True), + sa.Column("external_shipment_id", sa.String(length=100), nullable=True), + sa.Column("external_order_number", sa.String(length=100), nullable=True), + sa.Column("external_data", sa.JSON(), nullable=True), # Status - sa.Column('status', sa.String(length=50), nullable=False, server_default='pending'), + sa.Column("status", sa.String(length=50), nullable=False, server_default="pending"), # Financials - sa.Column('subtotal', sa.Float(), nullable=True), - sa.Column('tax_amount', sa.Float(), nullable=True), - sa.Column('shipping_amount', sa.Float(), nullable=True), - sa.Column('discount_amount', sa.Float(), nullable=True), - sa.Column('total_amount', sa.Float(), nullable=False), - sa.Column('currency', sa.String(length=10), server_default='EUR', nullable=True), + sa.Column("subtotal", sa.Float(), nullable=True), + sa.Column("tax_amount", sa.Float(), nullable=True), + sa.Column("shipping_amount", sa.Float(), nullable=True), + sa.Column("discount_amount", sa.Float(), nullable=True), + sa.Column("total_amount", sa.Float(), nullable=False), + sa.Column("currency", sa.String(length=10), server_default="EUR", nullable=True), # Customer snapshot - sa.Column('customer_first_name', sa.String(length=100), nullable=False), - sa.Column('customer_last_name', sa.String(length=100), nullable=False), - sa.Column('customer_email', sa.String(length=255), nullable=False), - sa.Column('customer_phone', sa.String(length=50), nullable=True), - sa.Column('customer_locale', sa.String(length=10), nullable=True), + sa.Column("customer_first_name", sa.String(length=100), nullable=False), + sa.Column("customer_last_name", sa.String(length=100), nullable=False), + sa.Column("customer_email", sa.String(length=255), nullable=False), + sa.Column("customer_phone", sa.String(length=50), nullable=True), + sa.Column("customer_locale", sa.String(length=10), nullable=True), # Shipping address snapshot - sa.Column('ship_first_name', sa.String(length=100), nullable=False), - sa.Column('ship_last_name', sa.String(length=100), nullable=False), - sa.Column('ship_company', sa.String(length=200), nullable=True), - sa.Column('ship_address_line_1', sa.String(length=255), nullable=False), - sa.Column('ship_address_line_2', sa.String(length=255), nullable=True), - sa.Column('ship_city', sa.String(length=100), nullable=False), - sa.Column('ship_postal_code', sa.String(length=20), nullable=False), - sa.Column('ship_country_iso', sa.String(length=5), nullable=False), + sa.Column("ship_first_name", sa.String(length=100), nullable=False), + sa.Column("ship_last_name", sa.String(length=100), nullable=False), + sa.Column("ship_company", sa.String(length=200), nullable=True), + sa.Column("ship_address_line_1", sa.String(length=255), nullable=False), + sa.Column("ship_address_line_2", sa.String(length=255), nullable=True), + sa.Column("ship_city", sa.String(length=100), nullable=False), + sa.Column("ship_postal_code", sa.String(length=20), nullable=False), + sa.Column("ship_country_iso", sa.String(length=5), nullable=False), # Billing address snapshot - sa.Column('bill_first_name', sa.String(length=100), nullable=False), - sa.Column('bill_last_name', sa.String(length=100), nullable=False), - sa.Column('bill_company', sa.String(length=200), nullable=True), - sa.Column('bill_address_line_1', sa.String(length=255), nullable=False), - sa.Column('bill_address_line_2', sa.String(length=255), nullable=True), - sa.Column('bill_city', sa.String(length=100), nullable=False), - sa.Column('bill_postal_code', sa.String(length=20), nullable=False), - sa.Column('bill_country_iso', sa.String(length=5), nullable=False), + sa.Column("bill_first_name", sa.String(length=100), nullable=False), + sa.Column("bill_last_name", sa.String(length=100), nullable=False), + sa.Column("bill_company", sa.String(length=200), nullable=True), + sa.Column("bill_address_line_1", sa.String(length=255), nullable=False), + sa.Column("bill_address_line_2", sa.String(length=255), nullable=True), + sa.Column("bill_city", sa.String(length=100), nullable=False), + sa.Column("bill_postal_code", sa.String(length=20), nullable=False), + sa.Column("bill_country_iso", sa.String(length=5), nullable=False), # Tracking - sa.Column('shipping_method', sa.String(length=100), nullable=True), - sa.Column('tracking_number', sa.String(length=100), nullable=True), - sa.Column('tracking_provider', sa.String(length=100), nullable=True), + sa.Column("shipping_method", sa.String(length=100), nullable=True), + sa.Column("tracking_number", sa.String(length=100), nullable=True), + sa.Column("tracking_provider", sa.String(length=100), nullable=True), # Notes - sa.Column('customer_notes', sa.Text(), nullable=True), - sa.Column('internal_notes', sa.Text(), nullable=True), + sa.Column("customer_notes", sa.Text(), nullable=True), + sa.Column("internal_notes", sa.Text(), nullable=True), # Timestamps - sa.Column('order_date', sa.DateTime(timezone=True), nullable=False), - sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('shipped_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('delivered_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.Column("order_date", sa.DateTime(timezone=True), nullable=False), + sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("shipped_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("delivered_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), # Foreign keys - sa.ForeignKeyConstraint(['customer_id'], ['customers.id']), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']), - sa.PrimaryKeyConstraint('id') + sa.ForeignKeyConstraint(["customer_id"], ["customers.id"]), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]), + sa.PrimaryKeyConstraint("id") ) # Indexes for orders - op.create_index(op.f('ix_orders_id'), 'orders', ['id'], unique=False) - op.create_index(op.f('ix_orders_vendor_id'), 'orders', ['vendor_id'], unique=False) - op.create_index(op.f('ix_orders_customer_id'), 'orders', ['customer_id'], unique=False) - op.create_index(op.f('ix_orders_order_number'), 'orders', ['order_number'], unique=True) - op.create_index(op.f('ix_orders_channel'), 'orders', ['channel'], unique=False) - op.create_index(op.f('ix_orders_status'), 'orders', ['status'], unique=False) - op.create_index(op.f('ix_orders_external_order_id'), 'orders', ['external_order_id'], unique=False) - op.create_index(op.f('ix_orders_external_shipment_id'), 'orders', ['external_shipment_id'], unique=False) - op.create_index('idx_order_vendor_status', 'orders', ['vendor_id', 'status'], unique=False) - op.create_index('idx_order_vendor_channel', 'orders', ['vendor_id', 'channel'], unique=False) - op.create_index('idx_order_vendor_date', 'orders', ['vendor_id', 'order_date'], unique=False) + op.create_index(op.f("ix_orders_id"), "orders", ["id"], unique=False) + op.create_index(op.f("ix_orders_vendor_id"), "orders", ["vendor_id"], unique=False) + op.create_index(op.f("ix_orders_customer_id"), "orders", ["customer_id"], unique=False) + op.create_index(op.f("ix_orders_order_number"), "orders", ["order_number"], unique=True) + op.create_index(op.f("ix_orders_channel"), "orders", ["channel"], unique=False) + op.create_index(op.f("ix_orders_status"), "orders", ["status"], unique=False) + op.create_index(op.f("ix_orders_external_order_id"), "orders", ["external_order_id"], unique=False) + op.create_index(op.f("ix_orders_external_shipment_id"), "orders", ["external_shipment_id"], unique=False) + op.create_index("idx_order_vendor_status", "orders", ["vendor_id", "status"], unique=False) + op.create_index("idx_order_vendor_channel", "orders", ["vendor_id", "channel"], unique=False) + op.create_index("idx_order_vendor_date", "orders", ["vendor_id", "order_date"], unique=False) # ========================================================================= # Step 3: Create new order_items table # ========================================================================= - op.create_table('order_items', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('order_id', sa.Integer(), nullable=False), - sa.Column('product_id', sa.Integer(), nullable=False), + op.create_table("order_items", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("order_id", sa.Integer(), nullable=False), + sa.Column("product_id", sa.Integer(), nullable=False), # Product snapshot - sa.Column('product_name', sa.String(length=255), nullable=False), - sa.Column('product_sku', sa.String(length=100), nullable=True), - sa.Column('gtin', sa.String(length=50), nullable=True), - sa.Column('gtin_type', sa.String(length=20), nullable=True), + sa.Column("product_name", sa.String(length=255), nullable=False), + sa.Column("product_sku", sa.String(length=100), nullable=True), + sa.Column("gtin", sa.String(length=50), nullable=True), + sa.Column("gtin_type", sa.String(length=20), nullable=True), # Pricing - sa.Column('quantity', sa.Integer(), nullable=False), - sa.Column('unit_price', sa.Float(), nullable=False), - sa.Column('total_price', sa.Float(), nullable=False), + sa.Column("quantity", sa.Integer(), nullable=False), + sa.Column("unit_price", sa.Float(), nullable=False), + sa.Column("total_price", sa.Float(), nullable=False), # External references (for marketplace items) - sa.Column('external_item_id', sa.String(length=100), nullable=True), - sa.Column('external_variant_id', sa.String(length=100), nullable=True), + sa.Column("external_item_id", sa.String(length=100), nullable=True), + sa.Column("external_variant_id", sa.String(length=100), nullable=True), # Item state (for marketplace confirmation flow) - sa.Column('item_state', sa.String(length=50), nullable=True), + sa.Column("item_state", sa.String(length=50), nullable=True), # Inventory tracking - sa.Column('inventory_reserved', sa.Boolean(), server_default='0', nullable=True), - sa.Column('inventory_fulfilled', sa.Boolean(), server_default='0', nullable=True), + sa.Column("inventory_reserved", sa.Boolean(), server_default="0", nullable=True), + sa.Column("inventory_fulfilled", sa.Boolean(), server_default="0", nullable=True), # Timestamps - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), # Foreign keys - sa.ForeignKeyConstraint(['order_id'], ['orders.id']), - sa.ForeignKeyConstraint(['product_id'], ['products.id']), - sa.PrimaryKeyConstraint('id') + sa.ForeignKeyConstraint(["order_id"], ["orders.id"]), + sa.ForeignKeyConstraint(["product_id"], ["products.id"]), + sa.PrimaryKeyConstraint("id") ) # Indexes for order_items - op.create_index(op.f('ix_order_items_id'), 'order_items', ['id'], unique=False) - op.create_index(op.f('ix_order_items_order_id'), 'order_items', ['order_id'], unique=False) - op.create_index(op.f('ix_order_items_product_id'), 'order_items', ['product_id'], unique=False) - op.create_index(op.f('ix_order_items_gtin'), 'order_items', ['gtin'], unique=False) + op.create_index(op.f("ix_order_items_id"), "order_items", ["id"], unique=False) + op.create_index(op.f("ix_order_items_order_id"), "order_items", ["order_id"], unique=False) + op.create_index(op.f("ix_order_items_product_id"), "order_items", ["product_id"], unique=False) + op.create_index(op.f("ix_order_items_gtin"), "order_items", ["gtin"], unique=False) # ========================================================================= # Step 4: Create updated letzshop_fulfillment_queue (references orders) # ========================================================================= - op.create_table('letzshop_fulfillment_queue', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('order_id', sa.Integer(), nullable=False), + op.create_table("letzshop_fulfillment_queue", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("order_id", sa.Integer(), nullable=False), # Operation type - sa.Column('operation', sa.String(length=50), nullable=False), + sa.Column("operation", sa.String(length=50), nullable=False), # Operation payload - sa.Column('payload', sa.JSON(), nullable=False), + sa.Column("payload", sa.JSON(), nullable=False), # Status and retry - sa.Column('status', sa.String(length=50), server_default='pending', nullable=True), - sa.Column('attempts', sa.Integer(), server_default='0', nullable=True), - sa.Column('max_attempts', sa.Integer(), server_default='3', nullable=True), - sa.Column('last_attempt_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('next_retry_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True), + sa.Column("status", sa.String(length=50), server_default="pending", nullable=True), + sa.Column("attempts", sa.Integer(), server_default="0", nullable=True), + sa.Column("max_attempts", sa.Integer(), server_default="3", nullable=True), + sa.Column("last_attempt_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("next_retry_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), # Response from Letzshop - sa.Column('response_data', sa.JSON(), nullable=True), + sa.Column("response_data", sa.JSON(), nullable=True), # Timestamps - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), # Foreign keys - sa.ForeignKeyConstraint(['order_id'], ['orders.id']), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']), - sa.PrimaryKeyConstraint('id') + sa.ForeignKeyConstraint(["order_id"], ["orders.id"]), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]), + sa.PrimaryKeyConstraint("id") ) # Indexes for letzshop_fulfillment_queue - op.create_index(op.f('ix_letzshop_fulfillment_queue_id'), 'letzshop_fulfillment_queue', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), 'letzshop_fulfillment_queue', ['vendor_id'], unique=False) - op.create_index(op.f('ix_letzshop_fulfillment_queue_order_id'), 'letzshop_fulfillment_queue', ['order_id'], unique=False) - op.create_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue', ['status', 'vendor_id'], unique=False) - op.create_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue', ['status', 'next_retry_at'], unique=False) - op.create_index('idx_fulfillment_queue_order', 'letzshop_fulfillment_queue', ['order_id'], unique=False) + op.create_index(op.f("ix_letzshop_fulfillment_queue_id"), "letzshop_fulfillment_queue", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), "letzshop_fulfillment_queue", ["vendor_id"], unique=False) + op.create_index(op.f("ix_letzshop_fulfillment_queue_order_id"), "letzshop_fulfillment_queue", ["order_id"], unique=False) + op.create_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue", ["status", "vendor_id"], unique=False) + op.create_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue", ["status", "next_retry_at"], unique=False) + op.create_index("idx_fulfillment_queue_order", "letzshop_fulfillment_queue", ["order_id"], unique=False) def downgrade() -> None: # Drop new letzshop_fulfillment_queue - safe_drop_index('idx_fulfillment_queue_order', 'letzshop_fulfillment_queue') - safe_drop_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue') - safe_drop_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue') - safe_drop_index('ix_letzshop_fulfillment_queue_order_id', 'letzshop_fulfillment_queue') - safe_drop_index('ix_letzshop_fulfillment_queue_vendor_id', 'letzshop_fulfillment_queue') - safe_drop_index('ix_letzshop_fulfillment_queue_id', 'letzshop_fulfillment_queue') - safe_drop_table('letzshop_fulfillment_queue') + safe_drop_index("idx_fulfillment_queue_order", "letzshop_fulfillment_queue") + safe_drop_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue") + safe_drop_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue") + safe_drop_index("ix_letzshop_fulfillment_queue_order_id", "letzshop_fulfillment_queue") + safe_drop_index("ix_letzshop_fulfillment_queue_vendor_id", "letzshop_fulfillment_queue") + safe_drop_index("ix_letzshop_fulfillment_queue_id", "letzshop_fulfillment_queue") + safe_drop_table("letzshop_fulfillment_queue") # Drop new order_items - safe_drop_index('ix_order_items_gtin', 'order_items') - safe_drop_index('ix_order_items_product_id', 'order_items') - safe_drop_index('ix_order_items_order_id', 'order_items') - safe_drop_index('ix_order_items_id', 'order_items') - safe_drop_table('order_items') + safe_drop_index("ix_order_items_gtin", "order_items") + safe_drop_index("ix_order_items_product_id", "order_items") + safe_drop_index("ix_order_items_order_id", "order_items") + safe_drop_index("ix_order_items_id", "order_items") + safe_drop_table("order_items") # Drop new orders - safe_drop_index('idx_order_vendor_date', 'orders') - safe_drop_index('idx_order_vendor_channel', 'orders') - safe_drop_index('idx_order_vendor_status', 'orders') - safe_drop_index('ix_orders_external_shipment_id', 'orders') - safe_drop_index('ix_orders_external_order_id', 'orders') - safe_drop_index('ix_orders_status', 'orders') - safe_drop_index('ix_orders_channel', 'orders') - safe_drop_index('ix_orders_order_number', 'orders') - safe_drop_index('ix_orders_customer_id', 'orders') - safe_drop_index('ix_orders_vendor_id', 'orders') - safe_drop_index('ix_orders_id', 'orders') - safe_drop_table('orders') + safe_drop_index("idx_order_vendor_date", "orders") + safe_drop_index("idx_order_vendor_channel", "orders") + safe_drop_index("idx_order_vendor_status", "orders") + safe_drop_index("ix_orders_external_shipment_id", "orders") + safe_drop_index("ix_orders_external_order_id", "orders") + safe_drop_index("ix_orders_status", "orders") + safe_drop_index("ix_orders_channel", "orders") + safe_drop_index("ix_orders_order_number", "orders") + safe_drop_index("ix_orders_customer_id", "orders") + safe_drop_index("ix_orders_vendor_id", "orders") + safe_drop_index("ix_orders_id", "orders") + safe_drop_table("orders") # Recreate old orders table - op.create_table('orders', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('customer_id', sa.Integer(), nullable=False), - sa.Column('order_number', sa.String(), nullable=False), - sa.Column('channel', sa.String(length=50), nullable=True, server_default='direct'), - sa.Column('external_order_id', sa.String(length=100), nullable=True), - sa.Column('external_channel_data', sa.JSON(), nullable=True), - sa.Column('status', sa.String(), nullable=False), - sa.Column('subtotal', sa.Float(), nullable=False), - sa.Column('tax_amount', sa.Float(), nullable=True), - sa.Column('shipping_amount', sa.Float(), nullable=True), - sa.Column('discount_amount', sa.Float(), nullable=True), - sa.Column('total_amount', sa.Float(), nullable=False), - sa.Column('currency', sa.String(), nullable=True), - sa.Column('shipping_address_id', sa.Integer(), nullable=False), - sa.Column('billing_address_id', sa.Integer(), nullable=False), - sa.Column('shipping_method', sa.String(), nullable=True), - sa.Column('tracking_number', sa.String(), nullable=True), - sa.Column('customer_notes', sa.Text(), nullable=True), - sa.Column('internal_notes', sa.Text(), nullable=True), - sa.Column('paid_at', sa.DateTime(), nullable=True), - sa.Column('shipped_at', sa.DateTime(), nullable=True), - sa.Column('delivered_at', sa.DateTime(), nullable=True), - sa.Column('cancelled_at', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['billing_address_id'], ['customer_addresses.id']), - sa.ForeignKeyConstraint(['customer_id'], ['customers.id']), - sa.ForeignKeyConstraint(['shipping_address_id'], ['customer_addresses.id']), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']), - sa.PrimaryKeyConstraint('id') + op.create_table("orders", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("customer_id", sa.Integer(), nullable=False), + sa.Column("order_number", sa.String(), nullable=False), + sa.Column("channel", sa.String(length=50), nullable=True, server_default="direct"), + sa.Column("external_order_id", sa.String(length=100), nullable=True), + sa.Column("external_channel_data", sa.JSON(), nullable=True), + sa.Column("status", sa.String(), nullable=False), + sa.Column("subtotal", sa.Float(), nullable=False), + sa.Column("tax_amount", sa.Float(), nullable=True), + sa.Column("shipping_amount", sa.Float(), nullable=True), + sa.Column("discount_amount", sa.Float(), nullable=True), + sa.Column("total_amount", sa.Float(), nullable=False), + sa.Column("currency", sa.String(), nullable=True), + sa.Column("shipping_address_id", sa.Integer(), nullable=False), + sa.Column("billing_address_id", sa.Integer(), nullable=False), + sa.Column("shipping_method", sa.String(), nullable=True), + sa.Column("tracking_number", sa.String(), nullable=True), + sa.Column("customer_notes", sa.Text(), nullable=True), + sa.Column("internal_notes", sa.Text(), nullable=True), + sa.Column("paid_at", sa.DateTime(), nullable=True), + sa.Column("shipped_at", sa.DateTime(), nullable=True), + sa.Column("delivered_at", sa.DateTime(), nullable=True), + sa.Column("cancelled_at", sa.DateTime(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["billing_address_id"], ["customer_addresses.id"]), + sa.ForeignKeyConstraint(["customer_id"], ["customers.id"]), + sa.ForeignKeyConstraint(["shipping_address_id"], ["customer_addresses.id"]), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_orders_customer_id'), 'orders', ['customer_id'], unique=False) - op.create_index(op.f('ix_orders_id'), 'orders', ['id'], unique=False) - op.create_index(op.f('ix_orders_order_number'), 'orders', ['order_number'], unique=True) - op.create_index(op.f('ix_orders_status'), 'orders', ['status'], unique=False) - op.create_index(op.f('ix_orders_vendor_id'), 'orders', ['vendor_id'], unique=False) - op.create_index(op.f('ix_orders_channel'), 'orders', ['channel'], unique=False) - op.create_index(op.f('ix_orders_external_order_id'), 'orders', ['external_order_id'], unique=False) + op.create_index(op.f("ix_orders_customer_id"), "orders", ["customer_id"], unique=False) + op.create_index(op.f("ix_orders_id"), "orders", ["id"], unique=False) + op.create_index(op.f("ix_orders_order_number"), "orders", ["order_number"], unique=True) + op.create_index(op.f("ix_orders_status"), "orders", ["status"], unique=False) + op.create_index(op.f("ix_orders_vendor_id"), "orders", ["vendor_id"], unique=False) + op.create_index(op.f("ix_orders_channel"), "orders", ["channel"], unique=False) + op.create_index(op.f("ix_orders_external_order_id"), "orders", ["external_order_id"], unique=False) # Recreate old order_items table - op.create_table('order_items', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('order_id', sa.Integer(), nullable=False), - sa.Column('product_id', sa.Integer(), nullable=False), - sa.Column('product_name', sa.String(), nullable=False), - sa.Column('product_sku', sa.String(), nullable=True), - sa.Column('quantity', sa.Integer(), nullable=False), - sa.Column('unit_price', sa.Float(), nullable=False), - sa.Column('total_price', sa.Float(), nullable=False), - sa.Column('inventory_reserved', sa.Boolean(), nullable=True), - sa.Column('inventory_fulfilled', sa.Boolean(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['order_id'], ['orders.id']), - sa.ForeignKeyConstraint(['product_id'], ['products.id']), - sa.PrimaryKeyConstraint('id') + op.create_table("order_items", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("order_id", sa.Integer(), nullable=False), + sa.Column("product_id", sa.Integer(), nullable=False), + sa.Column("product_name", sa.String(), nullable=False), + sa.Column("product_sku", sa.String(), nullable=True), + sa.Column("quantity", sa.Integer(), nullable=False), + sa.Column("unit_price", sa.Float(), nullable=False), + sa.Column("total_price", sa.Float(), nullable=False), + sa.Column("inventory_reserved", sa.Boolean(), nullable=True), + sa.Column("inventory_fulfilled", sa.Boolean(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["order_id"], ["orders.id"]), + sa.ForeignKeyConstraint(["product_id"], ["products.id"]), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_order_items_id'), 'order_items', ['id'], unique=False) - op.create_index(op.f('ix_order_items_order_id'), 'order_items', ['order_id'], unique=False) + op.create_index(op.f("ix_order_items_id"), "order_items", ["id"], unique=False) + op.create_index(op.f("ix_order_items_order_id"), "order_items", ["order_id"], unique=False) # Recreate old letzshop_orders table - op.create_table('letzshop_orders', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('letzshop_order_id', sa.String(length=100), nullable=False), - sa.Column('letzshop_shipment_id', sa.String(length=100), nullable=True), - sa.Column('letzshop_order_number', sa.String(length=100), nullable=True), - sa.Column('local_order_id', sa.Integer(), nullable=True), - sa.Column('letzshop_state', sa.String(length=50), nullable=True), - sa.Column('customer_email', sa.String(length=255), nullable=True), - sa.Column('customer_name', sa.String(length=255), nullable=True), - sa.Column('total_amount', sa.String(length=50), nullable=True), - sa.Column('currency', sa.String(length=10), server_default='EUR', nullable=True), - sa.Column('customer_locale', sa.String(length=10), nullable=True), - sa.Column('shipping_country_iso', sa.String(length=5), nullable=True), - sa.Column('billing_country_iso', sa.String(length=5), nullable=True), - sa.Column('order_date', sa.DateTime(timezone=True), nullable=True), - sa.Column('raw_order_data', sa.JSON(), nullable=True), - sa.Column('inventory_units', sa.JSON(), nullable=True), - sa.Column('sync_status', sa.String(length=50), server_default='pending', nullable=True), - sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('sync_error', sa.Text(), nullable=True), - sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('tracking_set_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('tracking_number', sa.String(length=100), nullable=True), - sa.Column('tracking_carrier', sa.String(length=100), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.ForeignKeyConstraint(['local_order_id'], ['orders.id']), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']), - sa.PrimaryKeyConstraint('id') + op.create_table("letzshop_orders", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("letzshop_order_id", sa.String(length=100), nullable=False), + sa.Column("letzshop_shipment_id", sa.String(length=100), nullable=True), + sa.Column("letzshop_order_number", sa.String(length=100), nullable=True), + sa.Column("local_order_id", sa.Integer(), nullable=True), + sa.Column("letzshop_state", sa.String(length=50), nullable=True), + sa.Column("customer_email", sa.String(length=255), nullable=True), + sa.Column("customer_name", sa.String(length=255), nullable=True), + sa.Column("total_amount", sa.String(length=50), nullable=True), + sa.Column("currency", sa.String(length=10), server_default="EUR", nullable=True), + sa.Column("customer_locale", sa.String(length=10), nullable=True), + sa.Column("shipping_country_iso", sa.String(length=5), nullable=True), + sa.Column("billing_country_iso", sa.String(length=5), nullable=True), + sa.Column("order_date", sa.DateTime(timezone=True), nullable=True), + sa.Column("raw_order_data", sa.JSON(), nullable=True), + sa.Column("inventory_units", sa.JSON(), nullable=True), + sa.Column("sync_status", sa.String(length=50), server_default="pending", nullable=True), + sa.Column("last_synced_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("sync_error", sa.Text(), nullable=True), + sa.Column("confirmed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("rejected_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("tracking_set_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("tracking_number", sa.String(length=100), nullable=True), + sa.Column("tracking_carrier", sa.String(length=100), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.ForeignKeyConstraint(["local_order_id"], ["orders.id"]), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_letzshop_orders_id'), 'letzshop_orders', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_orders_letzshop_order_id'), 'letzshop_orders', ['letzshop_order_id'], unique=False) - op.create_index(op.f('ix_letzshop_orders_letzshop_shipment_id'), 'letzshop_orders', ['letzshop_shipment_id'], unique=False) - op.create_index(op.f('ix_letzshop_orders_vendor_id'), 'letzshop_orders', ['vendor_id'], unique=False) - op.create_index('idx_letzshop_order_vendor', 'letzshop_orders', ['vendor_id', 'letzshop_order_id'], unique=False) - op.create_index('idx_letzshop_order_state', 'letzshop_orders', ['vendor_id', 'letzshop_state'], unique=False) - op.create_index('idx_letzshop_order_sync', 'letzshop_orders', ['vendor_id', 'sync_status'], unique=False) + op.create_index(op.f("ix_letzshop_orders_id"), "letzshop_orders", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_orders_letzshop_order_id"), "letzshop_orders", ["letzshop_order_id"], unique=False) + op.create_index(op.f("ix_letzshop_orders_letzshop_shipment_id"), "letzshop_orders", ["letzshop_shipment_id"], unique=False) + op.create_index(op.f("ix_letzshop_orders_vendor_id"), "letzshop_orders", ["vendor_id"], unique=False) + op.create_index("idx_letzshop_order_vendor", "letzshop_orders", ["vendor_id", "letzshop_order_id"], unique=False) + op.create_index("idx_letzshop_order_state", "letzshop_orders", ["vendor_id", "letzshop_state"], unique=False) + op.create_index("idx_letzshop_order_sync", "letzshop_orders", ["vendor_id", "sync_status"], unique=False) # Recreate old letzshop_fulfillment_queue table - op.create_table('letzshop_fulfillment_queue', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('letzshop_order_id', sa.Integer(), nullable=False), - sa.Column('operation', sa.String(length=50), nullable=False), - sa.Column('payload', sa.JSON(), nullable=False), - sa.Column('status', sa.String(length=50), server_default='pending', nullable=True), - sa.Column('attempts', sa.Integer(), server_default='0', nullable=True), - sa.Column('max_attempts', sa.Integer(), server_default='3', nullable=True), - sa.Column('last_attempt_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('next_retry_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('response_data', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False), - sa.ForeignKeyConstraint(['letzshop_order_id'], ['letzshop_orders.id']), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']), - sa.PrimaryKeyConstraint('id') + op.create_table("letzshop_fulfillment_queue", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("letzshop_order_id", sa.Integer(), nullable=False), + sa.Column("operation", sa.String(length=50), nullable=False), + sa.Column("payload", sa.JSON(), nullable=False), + sa.Column("status", sa.String(length=50), server_default="pending", nullable=True), + sa.Column("attempts", sa.Integer(), server_default="0", nullable=True), + sa.Column("max_attempts", sa.Integer(), server_default="3", nullable=True), + sa.Column("last_attempt_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("next_retry_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("response_data", sa.JSON(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), + sa.ForeignKeyConstraint(["letzshop_order_id"], ["letzshop_orders.id"]), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_letzshop_fulfillment_queue_id'), 'letzshop_fulfillment_queue', ['id'], unique=False) - op.create_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), 'letzshop_fulfillment_queue', ['vendor_id'], unique=False) - op.create_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue', ['status', 'vendor_id'], unique=False) - op.create_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue', ['status', 'next_retry_at'], unique=False) + op.create_index(op.f("ix_letzshop_fulfillment_queue_id"), "letzshop_fulfillment_queue", ["id"], unique=False) + op.create_index(op.f("ix_letzshop_fulfillment_queue_vendor_id"), "letzshop_fulfillment_queue", ["vendor_id"], unique=False) + op.create_index("idx_fulfillment_queue_status", "letzshop_fulfillment_queue", ["status", "vendor_id"], unique=False) + op.create_index("idx_fulfillment_queue_retry", "letzshop_fulfillment_queue", ["status", "next_retry_at"], unique=False) diff --git a/alembic/versions_backup/c9e22eadf533_add_tax_rate_cost_and_letzshop_settings.py b/alembic/versions_backup/c9e22eadf533_add_tax_rate_cost_and_letzshop_settings.py index 6f1dcc02..e2a068ca 100644 --- a/alembic/versions_backup/c9e22eadf533_add_tax_rate_cost_and_letzshop_settings.py +++ b/alembic/versions_backup/c9e22eadf533_add_tax_rate_cost_and_letzshop_settings.py @@ -9,56 +9,56 @@ Adds: - cost_cents to products (for profit calculation) - Letzshop feed settings to vendors (tax_rate, boost_sort, delivery_method, preorder_days) """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'c9e22eadf533' -down_revision: Union[str, None] = 'e1f2a3b4c5d6' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "c9e22eadf533" +down_revision: str | None = "e1f2a3b4c5d6" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # === MARKETPLACE PRODUCTS: Add tax_rate_percent === - with op.batch_alter_table('marketplace_products', schema=None) as batch_op: - batch_op.add_column(sa.Column('tax_rate_percent', sa.Integer(), nullable=False, server_default='17')) + with op.batch_alter_table("marketplace_products", schema=None) as batch_op: + batch_op.add_column(sa.Column("tax_rate_percent", sa.Integer(), nullable=False, server_default="17")) # === PRODUCTS: Add tax_rate_percent and cost_cents, rename supplier_cost_cents === - with op.batch_alter_table('products', schema=None) as batch_op: - batch_op.add_column(sa.Column('tax_rate_percent', sa.Integer(), nullable=False, server_default='17')) - batch_op.add_column(sa.Column('cost_cents', sa.Integer(), nullable=True)) + with op.batch_alter_table("products", schema=None) as batch_op: + batch_op.add_column(sa.Column("tax_rate_percent", sa.Integer(), nullable=False, server_default="17")) + batch_op.add_column(sa.Column("cost_cents", sa.Integer(), nullable=True)) # Drop old supplier_cost_cents column (data migrated to cost_cents if needed) try: - batch_op.drop_column('supplier_cost_cents') + batch_op.drop_column("supplier_cost_cents") except Exception: pass # Column may not exist # === VENDORS: Add Letzshop feed settings === - with op.batch_alter_table('vendors', schema=None) as batch_op: - batch_op.add_column(sa.Column('letzshop_default_tax_rate', sa.Integer(), nullable=False, server_default='17')) - batch_op.add_column(sa.Column('letzshop_boost_sort', sa.String(length=10), nullable=True, server_default='5.0')) - batch_op.add_column(sa.Column('letzshop_delivery_method', sa.String(length=100), nullable=True, server_default='package_delivery')) - batch_op.add_column(sa.Column('letzshop_preorder_days', sa.Integer(), nullable=True, server_default='1')) + with op.batch_alter_table("vendors", schema=None) as batch_op: + batch_op.add_column(sa.Column("letzshop_default_tax_rate", sa.Integer(), nullable=False, server_default="17")) + batch_op.add_column(sa.Column("letzshop_boost_sort", sa.String(length=10), nullable=True, server_default="5.0")) + batch_op.add_column(sa.Column("letzshop_delivery_method", sa.String(length=100), nullable=True, server_default="package_delivery")) + batch_op.add_column(sa.Column("letzshop_preorder_days", sa.Integer(), nullable=True, server_default="1")) def downgrade() -> None: # === VENDORS: Remove Letzshop feed settings === - with op.batch_alter_table('vendors', schema=None) as batch_op: - batch_op.drop_column('letzshop_preorder_days') - batch_op.drop_column('letzshop_delivery_method') - batch_op.drop_column('letzshop_boost_sort') - batch_op.drop_column('letzshop_default_tax_rate') + with op.batch_alter_table("vendors", schema=None) as batch_op: + batch_op.drop_column("letzshop_preorder_days") + batch_op.drop_column("letzshop_delivery_method") + batch_op.drop_column("letzshop_boost_sort") + batch_op.drop_column("letzshop_default_tax_rate") # === PRODUCTS: Remove tax_rate_percent and cost_cents === - with op.batch_alter_table('products', schema=None) as batch_op: - batch_op.drop_column('cost_cents') - batch_op.drop_column('tax_rate_percent') - batch_op.add_column(sa.Column('supplier_cost_cents', sa.Integer(), nullable=True)) + with op.batch_alter_table("products", schema=None) as batch_op: + batch_op.drop_column("cost_cents") + batch_op.drop_column("tax_rate_percent") + batch_op.add_column(sa.Column("supplier_cost_cents", sa.Integer(), nullable=True)) # === MARKETPLACE PRODUCTS: Remove tax_rate_percent === - with op.batch_alter_table('marketplace_products', schema=None) as batch_op: - batch_op.drop_column('tax_rate_percent') + with op.batch_alter_table("marketplace_products", schema=None) as batch_op: + batch_op.drop_column("tax_rate_percent") diff --git a/alembic/versions_backup/cb88bc9b5f86_add_gtin_columns_to_product_table.py b/alembic/versions_backup/cb88bc9b5f86_add_gtin_columns_to_product_table.py index faa6a494..376e88fe 100644 --- a/alembic/versions_backup/cb88bc9b5f86_add_gtin_columns_to_product_table.py +++ b/alembic/versions_backup/cb88bc9b5f86_add_gtin_columns_to_product_table.py @@ -5,33 +5,33 @@ Revises: a9a86cef6cca Create Date: 2025-12-18 20:54:55.185857 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'cb88bc9b5f86' -down_revision: Union[str, None] = 'a9a86cef6cca' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "cb88bc9b5f86" +down_revision: str | None = "a9a86cef6cca" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add GTIN (EAN/UPC barcode) columns to products table for order EAN matching # gtin: The barcode number (e.g., "0889698273022") # gtin_type: The format type from Letzshop (e.g., "gtin13", "gtin14", "isbn13") - op.add_column('products', sa.Column('gtin', sa.String(length=50), nullable=True)) - op.add_column('products', sa.Column('gtin_type', sa.String(length=20), nullable=True)) + op.add_column("products", sa.Column("gtin", sa.String(length=50), nullable=True)) + op.add_column("products", sa.Column("gtin_type", sa.String(length=20), nullable=True)) # Add index for EAN lookups during order matching - op.create_index('idx_product_gtin', 'products', ['gtin'], unique=False) - op.create_index('idx_product_vendor_gtin', 'products', ['vendor_id', 'gtin'], unique=False) + op.create_index("idx_product_gtin", "products", ["gtin"], unique=False) + op.create_index("idx_product_vendor_gtin", "products", ["vendor_id", "gtin"], unique=False) def downgrade() -> None: - op.drop_index('idx_product_vendor_gtin', table_name='products') - op.drop_index('idx_product_gtin', table_name='products') - op.drop_column('products', 'gtin_type') - op.drop_column('products', 'gtin') + op.drop_index("idx_product_vendor_gtin", table_name="products") + op.drop_index("idx_product_gtin", table_name="products") + op.drop_column("products", "gtin_type") + op.drop_column("products", "gtin") diff --git a/alembic/versions_backup/d0325d7c0f25_add_companies_table_and_restructure_.py b/alembic/versions_backup/d0325d7c0f25_add_companies_table_and_restructure_.py index ceb0bba0..c4379821 100644 --- a/alembic/versions_backup/d0325d7c0f25_add_companies_table_and_restructure_.py +++ b/alembic/versions_backup/d0325d7c0f25_add_companies_table_and_restructure_.py @@ -5,73 +5,73 @@ Revises: 0bd9ffaaced1 Create Date: 2025-11-30 14:58:17.165142 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'd0325d7c0f25' -down_revision: Union[str, None] = '0bd9ffaaced1' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "d0325d7c0f25" +down_revision: str | None = "0bd9ffaaced1" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Create companies table op.create_table( - 'companies', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('owner_user_id', sa.Integer(), nullable=False), - sa.Column('contact_email', sa.String(), nullable=False), - sa.Column('contact_phone', sa.String(), nullable=True), - sa.Column('website', sa.String(), nullable=True), - sa.Column('business_address', sa.Text(), nullable=True), - sa.Column('tax_number', sa.String(), nullable=True), - sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'), - sa.Column('is_verified', sa.Boolean(), nullable=False, server_default='false'), - sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()), - sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()), - sa.ForeignKeyConstraint(['owner_user_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('id') + "companies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("owner_user_id", sa.Integer(), nullable=False), + sa.Column("contact_email", sa.String(), nullable=False), + sa.Column("contact_phone", sa.String(), nullable=True), + sa.Column("website", sa.String(), nullable=True), + sa.Column("business_address", sa.Text(), nullable=True), + sa.Column("tax_number", sa.String(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"), + sa.Column("is_verified", sa.Boolean(), nullable=False, server_default="false"), + sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.func.now()), + sa.Column("updated_at", sa.DateTime(), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()), + sa.ForeignKeyConstraint(["owner_user_id"], ["users.id"], ), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_companies_id'), 'companies', ['id'], unique=False) - op.create_index(op.f('ix_companies_name'), 'companies', ['name'], unique=False) + op.create_index(op.f("ix_companies_id"), "companies", ["id"], unique=False) + op.create_index(op.f("ix_companies_name"), "companies", ["name"], unique=False) # Use batch mode for SQLite to modify vendors table - with op.batch_alter_table('vendors', schema=None) as batch_op: + with op.batch_alter_table("vendors", schema=None) as batch_op: # Add company_id column - batch_op.add_column(sa.Column('company_id', sa.Integer(), nullable=True)) - batch_op.create_index(batch_op.f('ix_vendors_company_id'), ['company_id'], unique=False) - batch_op.create_foreign_key('fk_vendors_company_id', 'companies', ['company_id'], ['id']) + batch_op.add_column(sa.Column("company_id", sa.Integer(), nullable=True)) + batch_op.create_index(batch_op.f("ix_vendors_company_id"), ["company_id"], unique=False) + batch_op.create_foreign_key("fk_vendors_company_id", "companies", ["company_id"], ["id"]) # Remove old contact fields - batch_op.drop_column('contact_email') - batch_op.drop_column('contact_phone') - batch_op.drop_column('website') - batch_op.drop_column('business_address') - batch_op.drop_column('tax_number') + batch_op.drop_column("contact_email") + batch_op.drop_column("contact_phone") + batch_op.drop_column("website") + batch_op.drop_column("business_address") + batch_op.drop_column("tax_number") def downgrade() -> None: # Use batch mode for SQLite to modify vendors table - with op.batch_alter_table('vendors', schema=None) as batch_op: + with op.batch_alter_table("vendors", schema=None) as batch_op: # Re-add contact fields to vendors - batch_op.add_column(sa.Column('tax_number', sa.String(), nullable=True)) - batch_op.add_column(sa.Column('business_address', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('website', sa.String(), nullable=True)) - batch_op.add_column(sa.Column('contact_phone', sa.String(), nullable=True)) - batch_op.add_column(sa.Column('contact_email', sa.String(), nullable=True)) + batch_op.add_column(sa.Column("tax_number", sa.String(), nullable=True)) + batch_op.add_column(sa.Column("business_address", sa.Text(), nullable=True)) + batch_op.add_column(sa.Column("website", sa.String(), nullable=True)) + batch_op.add_column(sa.Column("contact_phone", sa.String(), nullable=True)) + batch_op.add_column(sa.Column("contact_email", sa.String(), nullable=True)) # Remove company_id from vendors - batch_op.drop_constraint('fk_vendors_company_id', type_='foreignkey') - batch_op.drop_index(batch_op.f('ix_vendors_company_id')) - batch_op.drop_column('company_id') + batch_op.drop_constraint("fk_vendors_company_id", type_="foreignkey") + batch_op.drop_index(batch_op.f("ix_vendors_company_id")) + batch_op.drop_column("company_id") # Drop companies table - op.drop_index(op.f('ix_companies_name'), table_name='companies') - op.drop_index(op.f('ix_companies_id'), table_name='companies') - op.drop_table('companies') + op.drop_index(op.f("ix_companies_name"), table_name="companies") + op.drop_index(op.f("ix_companies_id"), table_name="companies") + op.drop_table("companies") diff --git a/alembic/versions_backup/d2e3f4a5b6c7_add_order_item_exceptions.py b/alembic/versions_backup/d2e3f4a5b6c7_add_order_item_exceptions.py index 3de56663..d29ac1a5 100644 --- a/alembic/versions_backup/d2e3f4a5b6c7_add_order_item_exceptions.py +++ b/alembic/versions_backup/d2e3f4a5b6c7_add_order_item_exceptions.py @@ -12,25 +12,25 @@ The exception system allows marketplace orders to be imported even when products are not found by GTIN. Items are linked to a placeholder product and exceptions are tracked for QC resolution. """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa from sqlalchemy import inspect +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'd2e3f4a5b6c7' -down_revision: Union[str, None] = 'c1d2e3f4a5b6' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "d2e3f4a5b6c7" +down_revision: str | None = "c1d2e3f4a5b6" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def column_exists(table_name: str, column_name: str) -> bool: """Check if a column exists in a table.""" bind = op.get_bind() inspector = inspect(bind) - columns = [col['name'] for col in inspector.get_columns(table_name)] + columns = [col["name"] for col in inspector.get_columns(table_name)] return column_name in columns @@ -47,7 +47,7 @@ def index_exists(index_name: str, table_name: str) -> bool: inspector = inspect(bind) try: indexes = inspector.get_indexes(table_name) - return any(idx['name'] == index_name for idx in indexes) + return any(idx["name"] == index_name for idx in indexes) except Exception: return False @@ -56,124 +56,124 @@ def upgrade() -> None: # ========================================================================= # Step 1: Add needs_product_match column to order_items # ========================================================================= - if not column_exists('order_items', 'needs_product_match'): + if not column_exists("order_items", "needs_product_match"): op.add_column( - 'order_items', + "order_items", sa.Column( - 'needs_product_match', + "needs_product_match", sa.Boolean(), - server_default='0', + server_default="0", nullable=False ) ) - if not index_exists('ix_order_items_needs_product_match', 'order_items'): + if not index_exists("ix_order_items_needs_product_match", "order_items"): op.create_index( - 'ix_order_items_needs_product_match', - 'order_items', - ['needs_product_match'] + "ix_order_items_needs_product_match", + "order_items", + ["needs_product_match"] ) # ========================================================================= # Step 2: Create order_item_exceptions table # ========================================================================= - if not table_exists('order_item_exceptions'): + if not table_exists("order_item_exceptions"): op.create_table( - 'order_item_exceptions', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('order_item_id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), - sa.Column('original_gtin', sa.String(length=50), nullable=True), - sa.Column('original_product_name', sa.String(length=500), nullable=True), - sa.Column('original_sku', sa.String(length=100), nullable=True), + "order_item_exceptions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("order_item_id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), + sa.Column("original_gtin", sa.String(length=50), nullable=True), + sa.Column("original_product_name", sa.String(length=500), nullable=True), + sa.Column("original_sku", sa.String(length=100), nullable=True), sa.Column( - 'exception_type', + "exception_type", sa.String(length=50), nullable=False, - server_default='product_not_found' + server_default="product_not_found" ), sa.Column( - 'status', + "status", sa.String(length=50), nullable=False, - server_default='pending' + server_default="pending" ), - sa.Column('resolved_product_id', sa.Integer(), nullable=True), - sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('resolved_by', sa.Integer(), nullable=True), - sa.Column('resolution_notes', sa.Text(), nullable=True), + sa.Column("resolved_product_id", sa.Integer(), nullable=True), + sa.Column("resolved_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("resolved_by", sa.Integer(), nullable=True), + sa.Column("resolution_notes", sa.Text(), nullable=True), sa.Column( - 'created_at', + "created_at", sa.DateTime(timezone=True), - server_default=sa.text('(CURRENT_TIMESTAMP)'), + server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False ), sa.Column( - 'updated_at', + "updated_at", sa.DateTime(timezone=True), - server_default=sa.text('(CURRENT_TIMESTAMP)'), + server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False ), sa.ForeignKeyConstraint( - ['order_item_id'], - ['order_items.id'], - ondelete='CASCADE' + ["order_item_id"], + ["order_items.id"], + ondelete="CASCADE" ), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']), - sa.ForeignKeyConstraint(['resolved_product_id'], ['products.id']), - sa.ForeignKeyConstraint(['resolved_by'], ['users.id']), - sa.PrimaryKeyConstraint('id') + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]), + sa.ForeignKeyConstraint(["resolved_product_id"], ["products.id"]), + sa.ForeignKeyConstraint(["resolved_by"], ["users.id"]), + sa.PrimaryKeyConstraint("id") ) # Create indexes op.create_index( - 'ix_order_item_exceptions_id', - 'order_item_exceptions', - ['id'] + "ix_order_item_exceptions_id", + "order_item_exceptions", + ["id"] ) op.create_index( - 'ix_order_item_exceptions_vendor_id', - 'order_item_exceptions', - ['vendor_id'] + "ix_order_item_exceptions_vendor_id", + "order_item_exceptions", + ["vendor_id"] ) op.create_index( - 'ix_order_item_exceptions_status', - 'order_item_exceptions', - ['status'] + "ix_order_item_exceptions_status", + "order_item_exceptions", + ["status"] ) op.create_index( - 'idx_exception_vendor_status', - 'order_item_exceptions', - ['vendor_id', 'status'] + "idx_exception_vendor_status", + "order_item_exceptions", + ["vendor_id", "status"] ) op.create_index( - 'idx_exception_gtin', - 'order_item_exceptions', - ['vendor_id', 'original_gtin'] + "idx_exception_gtin", + "order_item_exceptions", + ["vendor_id", "original_gtin"] ) # Unique constraint on order_item_id (one exception per item) op.create_index( - 'uq_order_item_exception', - 'order_item_exceptions', - ['order_item_id'], + "uq_order_item_exception", + "order_item_exceptions", + ["order_item_id"], unique=True ) def downgrade() -> None: # Drop order_item_exceptions table - if table_exists('order_item_exceptions'): - op.drop_index('uq_order_item_exception', table_name='order_item_exceptions') - op.drop_index('idx_exception_gtin', table_name='order_item_exceptions') - op.drop_index('idx_exception_vendor_status', table_name='order_item_exceptions') - op.drop_index('ix_order_item_exceptions_status', table_name='order_item_exceptions') - op.drop_index('ix_order_item_exceptions_vendor_id', table_name='order_item_exceptions') - op.drop_index('ix_order_item_exceptions_id', table_name='order_item_exceptions') - op.drop_table('order_item_exceptions') + if table_exists("order_item_exceptions"): + op.drop_index("uq_order_item_exception", table_name="order_item_exceptions") + op.drop_index("idx_exception_gtin", table_name="order_item_exceptions") + op.drop_index("idx_exception_vendor_status", table_name="order_item_exceptions") + op.drop_index("ix_order_item_exceptions_status", table_name="order_item_exceptions") + op.drop_index("ix_order_item_exceptions_vendor_id", table_name="order_item_exceptions") + op.drop_index("ix_order_item_exceptions_id", table_name="order_item_exceptions") + op.drop_table("order_item_exceptions") # Remove needs_product_match column from order_items - if column_exists('order_items', 'needs_product_match'): - if index_exists('ix_order_items_needs_product_match', 'order_items'): - op.drop_index('ix_order_items_needs_product_match', table_name='order_items') - op.drop_column('order_items', 'needs_product_match') + if column_exists("order_items", "needs_product_match"): + if index_exists("ix_order_items_needs_product_match", "order_items"): + op.drop_index("ix_order_items_needs_product_match", table_name="order_items") + op.drop_column("order_items", "needs_product_match") diff --git a/alembic/versions_backup/d7a4a3f06394_add_email_templates_and_logs_tables.py b/alembic/versions_backup/d7a4a3f06394_add_email_templates_and_logs_tables.py index e3dfe996..a7facd44 100644 --- a/alembic/versions_backup/d7a4a3f06394_add_email_templates_and_logs_tables.py +++ b/alembic/versions_backup/d7a4a3f06394_add_email_templates_and_logs_tables.py @@ -5,87 +5,87 @@ Revises: 404b3e2d2865 Create Date: 2025-12-27 20:48:00.661523 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa from sqlalchemy import text +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'd7a4a3f06394' -down_revision: Union[str, None] = '404b3e2d2865' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "d7a4a3f06394" +down_revision: str | None = "404b3e2d2865" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Create email_templates table - op.create_table('email_templates', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('code', sa.String(length=100), nullable=False), - sa.Column('language', sa.String(length=5), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('category', sa.String(length=50), nullable=False), - sa.Column('subject', sa.String(length=500), nullable=False), - sa.Column('body_html', sa.Text(), nullable=False), - sa.Column('body_text', sa.Text(), nullable=True), - sa.Column('variables', sa.Text(), nullable=True), - sa.Column('is_active', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id'), + op.create_table("email_templates", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("code", sa.String(length=100), nullable=False), + sa.Column("language", sa.String(length=5), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("category", sa.String(length=50), nullable=False), + sa.Column("subject", sa.String(length=500), nullable=False), + sa.Column("body_html", sa.Text(), nullable=False), + sa.Column("body_text", sa.Text(), nullable=True), + sa.Column("variables", sa.Text(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), ) - op.create_index(op.f('ix_email_templates_category'), 'email_templates', ['category'], unique=False) - op.create_index(op.f('ix_email_templates_code'), 'email_templates', ['code'], unique=False) - op.create_index(op.f('ix_email_templates_id'), 'email_templates', ['id'], unique=False) + op.create_index(op.f("ix_email_templates_category"), "email_templates", ["category"], unique=False) + op.create_index(op.f("ix_email_templates_code"), "email_templates", ["code"], unique=False) + op.create_index(op.f("ix_email_templates_id"), "email_templates", ["id"], unique=False) # Create email_logs table - op.create_table('email_logs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('template_code', sa.String(length=100), nullable=True), - sa.Column('template_id', sa.Integer(), nullable=True), - sa.Column('recipient_email', sa.String(length=255), nullable=False), - sa.Column('recipient_name', sa.String(length=255), nullable=True), - sa.Column('subject', sa.String(length=500), nullable=False), - sa.Column('body_html', sa.Text(), nullable=True), - sa.Column('body_text', sa.Text(), nullable=True), - sa.Column('from_email', sa.String(length=255), nullable=False), - sa.Column('from_name', sa.String(length=255), nullable=True), - sa.Column('reply_to', sa.String(length=255), nullable=True), - sa.Column('status', sa.String(length=20), nullable=False), - sa.Column('sent_at', sa.DateTime(), nullable=True), - sa.Column('delivered_at', sa.DateTime(), nullable=True), - sa.Column('opened_at', sa.DateTime(), nullable=True), - sa.Column('clicked_at', sa.DateTime(), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('retry_count', sa.Integer(), nullable=False), - sa.Column('provider', sa.String(length=50), nullable=True), - sa.Column('provider_message_id', sa.String(length=255), nullable=True), - sa.Column('vendor_id', sa.Integer(), nullable=True), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('related_type', sa.String(length=50), nullable=True), - sa.Column('related_id', sa.Integer(), nullable=True), - sa.Column('extra_data', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['template_id'], ['email_templates.id']), - sa.ForeignKeyConstraint(['user_id'], ['users.id']), - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']), - sa.PrimaryKeyConstraint('id') + op.create_table("email_logs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("template_code", sa.String(length=100), nullable=True), + sa.Column("template_id", sa.Integer(), nullable=True), + sa.Column("recipient_email", sa.String(length=255), nullable=False), + sa.Column("recipient_name", sa.String(length=255), nullable=True), + sa.Column("subject", sa.String(length=500), nullable=False), + sa.Column("body_html", sa.Text(), nullable=True), + sa.Column("body_text", sa.Text(), nullable=True), + sa.Column("from_email", sa.String(length=255), nullable=False), + sa.Column("from_name", sa.String(length=255), nullable=True), + sa.Column("reply_to", sa.String(length=255), nullable=True), + sa.Column("status", sa.String(length=20), nullable=False), + sa.Column("sent_at", sa.DateTime(), nullable=True), + sa.Column("delivered_at", sa.DateTime(), nullable=True), + sa.Column("opened_at", sa.DateTime(), nullable=True), + sa.Column("clicked_at", sa.DateTime(), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("retry_count", sa.Integer(), nullable=False), + sa.Column("provider", sa.String(length=50), nullable=True), + sa.Column("provider_message_id", sa.String(length=255), nullable=True), + sa.Column("vendor_id", sa.Integer(), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("related_type", sa.String(length=50), nullable=True), + sa.Column("related_id", sa.Integer(), nullable=True), + sa.Column("extra_data", sa.Text(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["template_id"], ["email_templates.id"]), + sa.ForeignKeyConstraint(["user_id"], ["users.id"]), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]), + sa.PrimaryKeyConstraint("id") ) - op.create_index(op.f('ix_email_logs_id'), 'email_logs', ['id'], unique=False) - op.create_index(op.f('ix_email_logs_provider_message_id'), 'email_logs', ['provider_message_id'], unique=False) - op.create_index(op.f('ix_email_logs_recipient_email'), 'email_logs', ['recipient_email'], unique=False) - op.create_index(op.f('ix_email_logs_status'), 'email_logs', ['status'], unique=False) - op.create_index(op.f('ix_email_logs_template_code'), 'email_logs', ['template_code'], unique=False) - op.create_index(op.f('ix_email_logs_user_id'), 'email_logs', ['user_id'], unique=False) - op.create_index(op.f('ix_email_logs_vendor_id'), 'email_logs', ['vendor_id'], unique=False) + op.create_index(op.f("ix_email_logs_id"), "email_logs", ["id"], unique=False) + op.create_index(op.f("ix_email_logs_provider_message_id"), "email_logs", ["provider_message_id"], unique=False) + op.create_index(op.f("ix_email_logs_recipient_email"), "email_logs", ["recipient_email"], unique=False) + op.create_index(op.f("ix_email_logs_status"), "email_logs", ["status"], unique=False) + op.create_index(op.f("ix_email_logs_template_code"), "email_logs", ["template_code"], unique=False) + op.create_index(op.f("ix_email_logs_user_id"), "email_logs", ["user_id"], unique=False) + op.create_index(op.f("ix_email_logs_vendor_id"), "email_logs", ["vendor_id"], unique=False) # application_logs - alter columns - op.alter_column('application_logs', 'created_at', existing_type=sa.DATETIME(), nullable=False) - op.alter_column('application_logs', 'updated_at', existing_type=sa.DATETIME(), nullable=False) + op.alter_column("application_logs", "created_at", existing_type=sa.DATETIME(), nullable=False) + op.alter_column("application_logs", "updated_at", existing_type=sa.DATETIME(), nullable=False) # capacity_snapshots indexes (PostgreSQL IF EXISTS/IF NOT EXISTS) op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_date")) @@ -93,17 +93,17 @@ def upgrade() -> None: op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_snapshot_date ON capacity_snapshots (snapshot_date)")) # cart_items - alter columns - op.alter_column('cart_items', 'created_at', existing_type=sa.DATETIME(), nullable=False) - op.alter_column('cart_items', 'updated_at', existing_type=sa.DATETIME(), nullable=False) + op.alter_column("cart_items", "created_at", existing_type=sa.DATETIME(), nullable=False) + op.alter_column("cart_items", "updated_at", existing_type=sa.DATETIME(), nullable=False) # customer_addresses index rename op.execute(text("DROP INDEX IF EXISTS ix_customers_addresses_id")) op.execute(text("CREATE INDEX IF NOT EXISTS ix_customer_addresses_id ON customer_addresses (id)")) # inventory - alter columns and constraints - op.alter_column('inventory', 'warehouse', existing_type=sa.VARCHAR(), nullable=False) - op.alter_column('inventory', 'bin_location', existing_type=sa.VARCHAR(), nullable=False) - op.alter_column('inventory', 'location', existing_type=sa.VARCHAR(), nullable=True) + op.alter_column("inventory", "warehouse", existing_type=sa.VARCHAR(), nullable=False) + op.alter_column("inventory", "bin_location", existing_type=sa.VARCHAR(), nullable=False) + op.alter_column("inventory", "location", existing_type=sa.VARCHAR(), nullable=True) op.execute(text("DROP INDEX IF EXISTS idx_inventory_product_location")) op.execute(text("ALTER TABLE inventory DROP CONSTRAINT IF EXISTS uq_inventory_product_location")) op.execute(text(""" @@ -120,8 +120,8 @@ def upgrade() -> None: op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_product_translations_id ON marketplace_product_translations (id)")) # marketplace_products - alter columns - op.alter_column('marketplace_products', 'is_digital', existing_type=sa.BOOLEAN(), nullable=True) - op.alter_column('marketplace_products', 'is_active', existing_type=sa.BOOLEAN(), nullable=True) + op.alter_column("marketplace_products", "is_digital", existing_type=sa.BOOLEAN(), nullable=True) + op.alter_column("marketplace_products", "is_active", existing_type=sa.BOOLEAN(), nullable=True) # marketplace_products indexes op.execute(text("DROP INDEX IF EXISTS idx_mp_is_active")) @@ -146,7 +146,7 @@ def upgrade() -> None: """)) # order_items - alter column - op.alter_column('order_items', 'needs_product_match', existing_type=sa.BOOLEAN(), nullable=True) + op.alter_column("order_items", "needs_product_match", existing_type=sa.BOOLEAN(), nullable=True) # order_items indexes op.execute(text("DROP INDEX IF EXISTS ix_order_items_gtin")) @@ -185,7 +185,7 @@ def upgrade() -> None: op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_domains_id ON vendor_domains (id)")) # vendor_subscriptions - alter column and FK - op.alter_column('vendor_subscriptions', 'payment_retry_count', existing_type=sa.INTEGER(), nullable=False) + op.alter_column("vendor_subscriptions", "payment_retry_count", existing_type=sa.INTEGER(), nullable=False) op.execute(text(""" DO $$ BEGIN @@ -207,12 +207,12 @@ def upgrade() -> None: op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_users_invitation_token ON vendor_users (invitation_token)")) # vendors - alter column - op.alter_column('vendors', 'company_id', existing_type=sa.INTEGER(), nullable=False) + op.alter_column("vendors", "company_id", existing_type=sa.INTEGER(), nullable=False) def downgrade() -> None: # vendors - op.alter_column('vendors', 'company_id', existing_type=sa.INTEGER(), nullable=True) + op.alter_column("vendors", "company_id", existing_type=sa.INTEGER(), nullable=True) # vendor_users indexes op.execute(text("DROP INDEX IF EXISTS ix_vendor_users_invitation_token")) @@ -226,7 +226,7 @@ def downgrade() -> None: # vendor_subscriptions op.execute(text("ALTER TABLE vendor_subscriptions DROP CONSTRAINT IF EXISTS fk_vendor_subscriptions_tier_id")) - op.alter_column('vendor_subscriptions', 'payment_retry_count', existing_type=sa.INTEGER(), nullable=True) + op.alter_column("vendor_subscriptions", "payment_retry_count", existing_type=sa.INTEGER(), nullable=True) # vendor_domains indexes op.execute(text("DROP INDEX IF EXISTS ix_vendor_domains_id")) @@ -260,7 +260,7 @@ def downgrade() -> None: # order_items op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_product_id ON order_items (product_id)")) op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_gtin ON order_items (gtin)")) - op.alter_column('order_items', 'needs_product_match', existing_type=sa.BOOLEAN(), nullable=False) + op.alter_column("order_items", "needs_product_match", existing_type=sa.BOOLEAN(), nullable=False) # order_item_exceptions op.execute(text("ALTER TABLE order_item_exceptions DROP CONSTRAINT IF EXISTS uq_order_item_exceptions_order_item_id")) @@ -278,8 +278,8 @@ def downgrade() -> None: op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_is_active ON marketplace_products (is_active)")) # marketplace_products columns - op.alter_column('marketplace_products', 'is_active', existing_type=sa.BOOLEAN(), nullable=False) - op.alter_column('marketplace_products', 'is_digital', existing_type=sa.BOOLEAN(), nullable=False) + op.alter_column("marketplace_products", "is_active", existing_type=sa.BOOLEAN(), nullable=False) + op.alter_column("marketplace_products", "is_digital", existing_type=sa.BOOLEAN(), nullable=False) # marketplace imports op.execute(text("DROP INDEX IF EXISTS ix_marketplace_product_translations_id")) @@ -296,17 +296,17 @@ def downgrade() -> None: END $$; """)) op.execute(text("CREATE INDEX IF NOT EXISTS idx_inventory_product_location ON inventory (product_id, location)")) - op.alter_column('inventory', 'location', existing_type=sa.VARCHAR(), nullable=False) - op.alter_column('inventory', 'bin_location', existing_type=sa.VARCHAR(), nullable=True) - op.alter_column('inventory', 'warehouse', existing_type=sa.VARCHAR(), nullable=True) + op.alter_column("inventory", "location", existing_type=sa.VARCHAR(), nullable=False) + op.alter_column("inventory", "bin_location", existing_type=sa.VARCHAR(), nullable=True) + op.alter_column("inventory", "warehouse", existing_type=sa.VARCHAR(), nullable=True) # customer_addresses op.execute(text("DROP INDEX IF EXISTS ix_customer_addresses_id")) op.execute(text("CREATE INDEX IF NOT EXISTS ix_customers_addresses_id ON customer_addresses (id)")) # cart_items - op.alter_column('cart_items', 'updated_at', existing_type=sa.DATETIME(), nullable=True) - op.alter_column('cart_items', 'created_at', existing_type=sa.DATETIME(), nullable=True) + op.alter_column("cart_items", "updated_at", existing_type=sa.DATETIME(), nullable=True) + op.alter_column("cart_items", "created_at", existing_type=sa.DATETIME(), nullable=True) # capacity_snapshots op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_snapshot_date")) @@ -314,19 +314,19 @@ def downgrade() -> None: op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_date ON capacity_snapshots (snapshot_date)")) # application_logs - op.alter_column('application_logs', 'updated_at', existing_type=sa.DATETIME(), nullable=True) - op.alter_column('application_logs', 'created_at', existing_type=sa.DATETIME(), nullable=True) + op.alter_column("application_logs", "updated_at", existing_type=sa.DATETIME(), nullable=True) + op.alter_column("application_logs", "created_at", existing_type=sa.DATETIME(), nullable=True) # Drop email tables - op.drop_index(op.f('ix_email_logs_vendor_id'), table_name='email_logs') - op.drop_index(op.f('ix_email_logs_user_id'), table_name='email_logs') - op.drop_index(op.f('ix_email_logs_template_code'), table_name='email_logs') - op.drop_index(op.f('ix_email_logs_status'), table_name='email_logs') - op.drop_index(op.f('ix_email_logs_recipient_email'), table_name='email_logs') - op.drop_index(op.f('ix_email_logs_provider_message_id'), table_name='email_logs') - op.drop_index(op.f('ix_email_logs_id'), table_name='email_logs') - op.drop_table('email_logs') - op.drop_index(op.f('ix_email_templates_id'), table_name='email_templates') - op.drop_index(op.f('ix_email_templates_code'), table_name='email_templates') - op.drop_index(op.f('ix_email_templates_category'), table_name='email_templates') - op.drop_table('email_templates') + op.drop_index(op.f("ix_email_logs_vendor_id"), table_name="email_logs") + op.drop_index(op.f("ix_email_logs_user_id"), table_name="email_logs") + op.drop_index(op.f("ix_email_logs_template_code"), table_name="email_logs") + op.drop_index(op.f("ix_email_logs_status"), table_name="email_logs") + op.drop_index(op.f("ix_email_logs_recipient_email"), table_name="email_logs") + op.drop_index(op.f("ix_email_logs_provider_message_id"), table_name="email_logs") + op.drop_index(op.f("ix_email_logs_id"), table_name="email_logs") + op.drop_table("email_logs") + op.drop_index(op.f("ix_email_templates_id"), table_name="email_templates") + op.drop_index(op.f("ix_email_templates_code"), table_name="email_templates") + op.drop_index(op.f("ix_email_templates_category"), table_name="email_templates") + op.drop_table("email_templates") diff --git a/alembic/versions_backup/e1a2b3c4d5e6_add_product_type_and_digital_fields.py b/alembic/versions_backup/e1a2b3c4d5e6_add_product_type_and_digital_fields.py index 6b728a8a..d3059094 100644 --- a/alembic/versions_backup/e1a2b3c4d5e6_add_product_type_and_digital_fields.py +++ b/alembic/versions_backup/e1a2b3c4d5e6_add_product_type_and_digital_fields.py @@ -17,16 +17,17 @@ It also renames 'product_type' to 'product_type_raw' to preserve the original Google Shopping feed value while using 'product_type' for the new enum. """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. revision: str = "e1a2b3c4d5e6" -down_revision: Union[str, None] = "28d44d503cac" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "28d44d503cac" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/e1bfb453fbe9_add_warehouse_and_bin_location_to_.py b/alembic/versions_backup/e1bfb453fbe9_add_warehouse_and_bin_location_to_.py index 80d263a7..831fe07a 100644 --- a/alembic/versions_backup/e1bfb453fbe9_add_warehouse_and_bin_location_to_.py +++ b/alembic/versions_backup/e1bfb453fbe9_add_warehouse_and_bin_location_to_.py @@ -5,18 +5,18 @@ Revises: j8e9f0a1b2c3 Create Date: 2025-12-25 12:21:24.006548 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa from sqlalchemy import text +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'e1bfb453fbe9' -down_revision: Union[str, None] = 'j8e9f0a1b2c3' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "e1bfb453fbe9" +down_revision: str | None = "j8e9f0a1b2c3" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def get_column_names(conn, table_name: str) -> set: @@ -43,11 +43,11 @@ def upgrade() -> None: # Check if columns already exist (idempotent) columns = get_column_names(conn, "inventory") - if 'warehouse' not in columns: - op.add_column('inventory', sa.Column('warehouse', sa.String(), nullable=False, server_default='strassen')) + if "warehouse" not in columns: + op.add_column("inventory", sa.Column("warehouse", sa.String(), nullable=False, server_default="strassen")) - if 'bin_location' not in columns: - op.add_column('inventory', sa.Column('bin_location', sa.String(), nullable=False, server_default='')) + if "bin_location" not in columns: + op.add_column("inventory", sa.Column("bin_location", sa.String(), nullable=False, server_default="")) # Migrate existing data: copy location to bin_location, set default warehouse conn.execute(text(""" @@ -60,12 +60,12 @@ def upgrade() -> None: # Create indexes if they don't exist existing_indexes = get_index_names(conn, "inventory") - if 'idx_inventory_warehouse_bin' not in existing_indexes: - op.create_index('idx_inventory_warehouse_bin', 'inventory', ['warehouse', 'bin_location'], unique=False) - if 'ix_inventory_bin_location' not in existing_indexes: - op.create_index(op.f('ix_inventory_bin_location'), 'inventory', ['bin_location'], unique=False) - if 'ix_inventory_warehouse' not in existing_indexes: - op.create_index(op.f('ix_inventory_warehouse'), 'inventory', ['warehouse'], unique=False) + if "idx_inventory_warehouse_bin" not in existing_indexes: + op.create_index("idx_inventory_warehouse_bin", "inventory", ["warehouse", "bin_location"], unique=False) + if "ix_inventory_bin_location" not in existing_indexes: + op.create_index(op.f("ix_inventory_bin_location"), "inventory", ["bin_location"], unique=False) + if "ix_inventory_warehouse" not in existing_indexes: + op.create_index(op.f("ix_inventory_warehouse"), "inventory", ["warehouse"], unique=False) def downgrade() -> None: @@ -74,17 +74,17 @@ def downgrade() -> None: # Check which indexes exist before dropping existing_indexes = get_index_names(conn, "inventory") - if 'ix_inventory_warehouse' in existing_indexes: - op.drop_index(op.f('ix_inventory_warehouse'), table_name='inventory') - if 'ix_inventory_bin_location' in existing_indexes: - op.drop_index(op.f('ix_inventory_bin_location'), table_name='inventory') - if 'idx_inventory_warehouse_bin' in existing_indexes: - op.drop_index('idx_inventory_warehouse_bin', table_name='inventory') + if "ix_inventory_warehouse" in existing_indexes: + op.drop_index(op.f("ix_inventory_warehouse"), table_name="inventory") + if "ix_inventory_bin_location" in existing_indexes: + op.drop_index(op.f("ix_inventory_bin_location"), table_name="inventory") + if "idx_inventory_warehouse_bin" in existing_indexes: + op.drop_index("idx_inventory_warehouse_bin", table_name="inventory") # Check if columns exist before dropping columns = get_column_names(conn, "inventory") - if 'bin_location' in columns: - op.drop_column('inventory', 'bin_location') - if 'warehouse' in columns: - op.drop_column('inventory', 'warehouse') + if "bin_location" in columns: + op.drop_column("inventory", "bin_location") + if "warehouse" in columns: + op.drop_column("inventory", "warehouse") diff --git a/alembic/versions_backup/e1f2a3b4c5d6_convert_prices_to_integer_cents.py b/alembic/versions_backup/e1f2a3b4c5d6_convert_prices_to_integer_cents.py index e35312a9..bb5b2a07 100644 --- a/alembic/versions_backup/e1f2a3b4c5d6_convert_prices_to_integer_cents.py +++ b/alembic/versions_backup/e1f2a3b4c5d6_convert_prices_to_integer_cents.py @@ -20,17 +20,17 @@ Affected tables: See docs/architecture/money-handling.md for full documentation. """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'e1f2a3b4c5d6' -down_revision: Union[str, None] = 'c00d2985701f' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "e1f2a3b4c5d6" +down_revision: str | None = "c00d2985701f" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: @@ -38,186 +38,186 @@ def upgrade() -> None: # Strategy: Add new _cents columns, migrate data, drop old columns # === PRODUCTS TABLE === - with op.batch_alter_table('products', schema=None) as batch_op: + with op.batch_alter_table("products", schema=None) as batch_op: # Add new cents columns - batch_op.add_column(sa.Column('price_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('sale_price_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('supplier_cost_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('margin_percent_x100', sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("price_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("sale_price_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("supplier_cost_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("margin_percent_x100", sa.Integer(), nullable=True)) # Migrate data for products - op.execute('UPDATE products SET price_cents = ROUND(COALESCE(price, 0) * 100)') - op.execute('UPDATE products SET sale_price_cents = ROUND(sale_price * 100) WHERE sale_price IS NOT NULL') - op.execute('UPDATE products SET supplier_cost_cents = ROUND(supplier_cost * 100) WHERE supplier_cost IS NOT NULL') - op.execute('UPDATE products SET margin_percent_x100 = ROUND(margin_percent * 100) WHERE margin_percent IS NOT NULL') + op.execute("UPDATE products SET price_cents = ROUND(COALESCE(price, 0) * 100)") + op.execute("UPDATE products SET sale_price_cents = ROUND(sale_price * 100) WHERE sale_price IS NOT NULL") + op.execute("UPDATE products SET supplier_cost_cents = ROUND(supplier_cost * 100) WHERE supplier_cost IS NOT NULL") + op.execute("UPDATE products SET margin_percent_x100 = ROUND(margin_percent * 100) WHERE margin_percent IS NOT NULL") # Drop old columns - with op.batch_alter_table('products', schema=None) as batch_op: - batch_op.drop_column('price') - batch_op.drop_column('sale_price') - batch_op.drop_column('supplier_cost') - batch_op.drop_column('margin_percent') + with op.batch_alter_table("products", schema=None) as batch_op: + batch_op.drop_column("price") + batch_op.drop_column("sale_price") + batch_op.drop_column("supplier_cost") + batch_op.drop_column("margin_percent") # === ORDERS TABLE === - with op.batch_alter_table('orders', schema=None) as batch_op: - batch_op.add_column(sa.Column('subtotal_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('tax_amount_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('shipping_amount_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('discount_amount_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('total_amount_cents', sa.Integer(), nullable=True)) + with op.batch_alter_table("orders", schema=None) as batch_op: + batch_op.add_column(sa.Column("subtotal_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("tax_amount_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("shipping_amount_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("discount_amount_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("total_amount_cents", sa.Integer(), nullable=True)) # Migrate data for orders - op.execute('UPDATE orders SET subtotal_cents = ROUND(COALESCE(subtotal, 0) * 100)') - op.execute('UPDATE orders SET tax_amount_cents = ROUND(COALESCE(tax_amount, 0) * 100)') - op.execute('UPDATE orders SET shipping_amount_cents = ROUND(COALESCE(shipping_amount, 0) * 100)') - op.execute('UPDATE orders SET discount_amount_cents = ROUND(COALESCE(discount_amount, 0) * 100)') - op.execute('UPDATE orders SET total_amount_cents = ROUND(COALESCE(total_amount, 0) * 100)') + op.execute("UPDATE orders SET subtotal_cents = ROUND(COALESCE(subtotal, 0) * 100)") + op.execute("UPDATE orders SET tax_amount_cents = ROUND(COALESCE(tax_amount, 0) * 100)") + op.execute("UPDATE orders SET shipping_amount_cents = ROUND(COALESCE(shipping_amount, 0) * 100)") + op.execute("UPDATE orders SET discount_amount_cents = ROUND(COALESCE(discount_amount, 0) * 100)") + op.execute("UPDATE orders SET total_amount_cents = ROUND(COALESCE(total_amount, 0) * 100)") # Make total_amount_cents NOT NULL after migration - with op.batch_alter_table('orders', schema=None) as batch_op: - batch_op.drop_column('subtotal') - batch_op.drop_column('tax_amount') - batch_op.drop_column('shipping_amount') - batch_op.drop_column('discount_amount') - batch_op.drop_column('total_amount') + with op.batch_alter_table("orders", schema=None) as batch_op: + batch_op.drop_column("subtotal") + batch_op.drop_column("tax_amount") + batch_op.drop_column("shipping_amount") + batch_op.drop_column("discount_amount") + batch_op.drop_column("total_amount") # Alter total_amount_cents to be NOT NULL - batch_op.alter_column('total_amount_cents', + batch_op.alter_column("total_amount_cents", existing_type=sa.Integer(), nullable=False) # === ORDER_ITEMS TABLE === - with op.batch_alter_table('order_items', schema=None) as batch_op: - batch_op.add_column(sa.Column('unit_price_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('total_price_cents', sa.Integer(), nullable=True)) + with op.batch_alter_table("order_items", schema=None) as batch_op: + batch_op.add_column(sa.Column("unit_price_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("total_price_cents", sa.Integer(), nullable=True)) # Migrate data for order_items - op.execute('UPDATE order_items SET unit_price_cents = ROUND(COALESCE(unit_price, 0) * 100)') - op.execute('UPDATE order_items SET total_price_cents = ROUND(COALESCE(total_price, 0) * 100)') + op.execute("UPDATE order_items SET unit_price_cents = ROUND(COALESCE(unit_price, 0) * 100)") + op.execute("UPDATE order_items SET total_price_cents = ROUND(COALESCE(total_price, 0) * 100)") - with op.batch_alter_table('order_items', schema=None) as batch_op: - batch_op.drop_column('unit_price') - batch_op.drop_column('total_price') - batch_op.alter_column('unit_price_cents', + with op.batch_alter_table("order_items", schema=None) as batch_op: + batch_op.drop_column("unit_price") + batch_op.drop_column("total_price") + batch_op.alter_column("unit_price_cents", existing_type=sa.Integer(), nullable=False) - batch_op.alter_column('total_price_cents', + batch_op.alter_column("total_price_cents", existing_type=sa.Integer(), nullable=False) # === CART_ITEMS TABLE === - with op.batch_alter_table('cart_items', schema=None) as batch_op: - batch_op.add_column(sa.Column('price_at_add_cents', sa.Integer(), nullable=True)) + with op.batch_alter_table("cart_items", schema=None) as batch_op: + batch_op.add_column(sa.Column("price_at_add_cents", sa.Integer(), nullable=True)) # Migrate data for cart_items - op.execute('UPDATE cart_items SET price_at_add_cents = ROUND(COALESCE(price_at_add, 0) * 100)') + op.execute("UPDATE cart_items SET price_at_add_cents = ROUND(COALESCE(price_at_add, 0) * 100)") - with op.batch_alter_table('cart_items', schema=None) as batch_op: - batch_op.drop_column('price_at_add') - batch_op.alter_column('price_at_add_cents', + with op.batch_alter_table("cart_items", schema=None) as batch_op: + batch_op.drop_column("price_at_add") + batch_op.alter_column("price_at_add_cents", existing_type=sa.Integer(), nullable=False) # === MARKETPLACE_PRODUCTS TABLE === - with op.batch_alter_table('marketplace_products', schema=None) as batch_op: - batch_op.add_column(sa.Column('price_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('sale_price_cents', sa.Integer(), nullable=True)) - batch_op.add_column(sa.Column('weight_grams', sa.Integer(), nullable=True)) + with op.batch_alter_table("marketplace_products", schema=None) as batch_op: + batch_op.add_column(sa.Column("price_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("sale_price_cents", sa.Integer(), nullable=True)) + batch_op.add_column(sa.Column("weight_grams", sa.Integer(), nullable=True)) # Migrate data for marketplace_products - op.execute('UPDATE marketplace_products SET price_cents = ROUND(price_numeric * 100) WHERE price_numeric IS NOT NULL') - op.execute('UPDATE marketplace_products SET sale_price_cents = ROUND(sale_price_numeric * 100) WHERE sale_price_numeric IS NOT NULL') - op.execute('UPDATE marketplace_products SET weight_grams = ROUND(weight * 1000) WHERE weight IS NOT NULL') + op.execute("UPDATE marketplace_products SET price_cents = ROUND(price_numeric * 100) WHERE price_numeric IS NOT NULL") + op.execute("UPDATE marketplace_products SET sale_price_cents = ROUND(sale_price_numeric * 100) WHERE sale_price_numeric IS NOT NULL") + op.execute("UPDATE marketplace_products SET weight_grams = ROUND(weight * 1000) WHERE weight IS NOT NULL") - with op.batch_alter_table('marketplace_products', schema=None) as batch_op: - batch_op.drop_column('price_numeric') - batch_op.drop_column('sale_price_numeric') - batch_op.drop_column('weight') + with op.batch_alter_table("marketplace_products", schema=None) as batch_op: + batch_op.drop_column("price_numeric") + batch_op.drop_column("sale_price_numeric") + batch_op.drop_column("weight") def downgrade() -> None: # === MARKETPLACE_PRODUCTS TABLE === - with op.batch_alter_table('marketplace_products', schema=None) as batch_op: - batch_op.add_column(sa.Column('price_numeric', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('sale_price_numeric', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('weight', sa.Float(), nullable=True)) + with op.batch_alter_table("marketplace_products", schema=None) as batch_op: + batch_op.add_column(sa.Column("price_numeric", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("sale_price_numeric", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("weight", sa.Float(), nullable=True)) - op.execute('UPDATE marketplace_products SET price_numeric = price_cents / 100.0 WHERE price_cents IS NOT NULL') - op.execute('UPDATE marketplace_products SET sale_price_numeric = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL') - op.execute('UPDATE marketplace_products SET weight = weight_grams / 1000.0 WHERE weight_grams IS NOT NULL') + op.execute("UPDATE marketplace_products SET price_numeric = price_cents / 100.0 WHERE price_cents IS NOT NULL") + op.execute("UPDATE marketplace_products SET sale_price_numeric = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL") + op.execute("UPDATE marketplace_products SET weight = weight_grams / 1000.0 WHERE weight_grams IS NOT NULL") - with op.batch_alter_table('marketplace_products', schema=None) as batch_op: - batch_op.drop_column('price_cents') - batch_op.drop_column('sale_price_cents') - batch_op.drop_column('weight_grams') + with op.batch_alter_table("marketplace_products", schema=None) as batch_op: + batch_op.drop_column("price_cents") + batch_op.drop_column("sale_price_cents") + batch_op.drop_column("weight_grams") # === CART_ITEMS TABLE === - with op.batch_alter_table('cart_items', schema=None) as batch_op: - batch_op.add_column(sa.Column('price_at_add', sa.Float(), nullable=True)) + with op.batch_alter_table("cart_items", schema=None) as batch_op: + batch_op.add_column(sa.Column("price_at_add", sa.Float(), nullable=True)) - op.execute('UPDATE cart_items SET price_at_add = price_at_add_cents / 100.0') + op.execute("UPDATE cart_items SET price_at_add = price_at_add_cents / 100.0") - with op.batch_alter_table('cart_items', schema=None) as batch_op: - batch_op.drop_column('price_at_add_cents') - batch_op.alter_column('price_at_add', + with op.batch_alter_table("cart_items", schema=None) as batch_op: + batch_op.drop_column("price_at_add_cents") + batch_op.alter_column("price_at_add", existing_type=sa.Float(), nullable=False) # === ORDER_ITEMS TABLE === - with op.batch_alter_table('order_items', schema=None) as batch_op: - batch_op.add_column(sa.Column('unit_price', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('total_price', sa.Float(), nullable=True)) + with op.batch_alter_table("order_items", schema=None) as batch_op: + batch_op.add_column(sa.Column("unit_price", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("total_price", sa.Float(), nullable=True)) - op.execute('UPDATE order_items SET unit_price = unit_price_cents / 100.0') - op.execute('UPDATE order_items SET total_price = total_price_cents / 100.0') + op.execute("UPDATE order_items SET unit_price = unit_price_cents / 100.0") + op.execute("UPDATE order_items SET total_price = total_price_cents / 100.0") - with op.batch_alter_table('order_items', schema=None) as batch_op: - batch_op.drop_column('unit_price_cents') - batch_op.drop_column('total_price_cents') - batch_op.alter_column('unit_price', + with op.batch_alter_table("order_items", schema=None) as batch_op: + batch_op.drop_column("unit_price_cents") + batch_op.drop_column("total_price_cents") + batch_op.alter_column("unit_price", existing_type=sa.Float(), nullable=False) - batch_op.alter_column('total_price', + batch_op.alter_column("total_price", existing_type=sa.Float(), nullable=False) # === ORDERS TABLE === - with op.batch_alter_table('orders', schema=None) as batch_op: - batch_op.add_column(sa.Column('subtotal', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('tax_amount', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('shipping_amount', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('discount_amount', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('total_amount', sa.Float(), nullable=True)) + with op.batch_alter_table("orders", schema=None) as batch_op: + batch_op.add_column(sa.Column("subtotal", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("tax_amount", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("shipping_amount", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("discount_amount", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("total_amount", sa.Float(), nullable=True)) - op.execute('UPDATE orders SET subtotal = subtotal_cents / 100.0') - op.execute('UPDATE orders SET tax_amount = tax_amount_cents / 100.0') - op.execute('UPDATE orders SET shipping_amount = shipping_amount_cents / 100.0') - op.execute('UPDATE orders SET discount_amount = discount_amount_cents / 100.0') - op.execute('UPDATE orders SET total_amount = total_amount_cents / 100.0') + op.execute("UPDATE orders SET subtotal = subtotal_cents / 100.0") + op.execute("UPDATE orders SET tax_amount = tax_amount_cents / 100.0") + op.execute("UPDATE orders SET shipping_amount = shipping_amount_cents / 100.0") + op.execute("UPDATE orders SET discount_amount = discount_amount_cents / 100.0") + op.execute("UPDATE orders SET total_amount = total_amount_cents / 100.0") - with op.batch_alter_table('orders', schema=None) as batch_op: - batch_op.drop_column('subtotal_cents') - batch_op.drop_column('tax_amount_cents') - batch_op.drop_column('shipping_amount_cents') - batch_op.drop_column('discount_amount_cents') - batch_op.drop_column('total_amount_cents') - batch_op.alter_column('total_amount', + with op.batch_alter_table("orders", schema=None) as batch_op: + batch_op.drop_column("subtotal_cents") + batch_op.drop_column("tax_amount_cents") + batch_op.drop_column("shipping_amount_cents") + batch_op.drop_column("discount_amount_cents") + batch_op.drop_column("total_amount_cents") + batch_op.alter_column("total_amount", existing_type=sa.Float(), nullable=False) # === PRODUCTS TABLE === - with op.batch_alter_table('products', schema=None) as batch_op: - batch_op.add_column(sa.Column('price', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('sale_price', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('supplier_cost', sa.Float(), nullable=True)) - batch_op.add_column(sa.Column('margin_percent', sa.Float(), nullable=True)) + with op.batch_alter_table("products", schema=None) as batch_op: + batch_op.add_column(sa.Column("price", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("sale_price", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("supplier_cost", sa.Float(), nullable=True)) + batch_op.add_column(sa.Column("margin_percent", sa.Float(), nullable=True)) - op.execute('UPDATE products SET price = price_cents / 100.0 WHERE price_cents IS NOT NULL') - op.execute('UPDATE products SET sale_price = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL') - op.execute('UPDATE products SET supplier_cost = supplier_cost_cents / 100.0 WHERE supplier_cost_cents IS NOT NULL') - op.execute('UPDATE products SET margin_percent = margin_percent_x100 / 100.0 WHERE margin_percent_x100 IS NOT NULL') + op.execute("UPDATE products SET price = price_cents / 100.0 WHERE price_cents IS NOT NULL") + op.execute("UPDATE products SET sale_price = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL") + op.execute("UPDATE products SET supplier_cost = supplier_cost_cents / 100.0 WHERE supplier_cost_cents IS NOT NULL") + op.execute("UPDATE products SET margin_percent = margin_percent_x100 / 100.0 WHERE margin_percent_x100 IS NOT NULL") - with op.batch_alter_table('products', schema=None) as batch_op: - batch_op.drop_column('price_cents') - batch_op.drop_column('sale_price_cents') - batch_op.drop_column('supplier_cost_cents') - batch_op.drop_column('margin_percent_x100') + with op.batch_alter_table("products", schema=None) as batch_op: + batch_op.drop_column("price_cents") + batch_op.drop_column("sale_price_cents") + batch_op.drop_column("supplier_cost_cents") + batch_op.drop_column("margin_percent_x100") diff --git a/alembic/versions_backup/e3f4a5b6c7d8_add_messaging_tables.py b/alembic/versions_backup/e3f4a5b6c7d8_add_messaging_tables.py index 62b9937d..2f84bb9d 100644 --- a/alembic/versions_backup/e3f4a5b6c7d8_add_messaging_tables.py +++ b/alembic/versions_backup/e3f4a5b6c7d8_add_messaging_tables.py @@ -16,18 +16,18 @@ Supports three communication channels: - Admin <-> Customer """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa from sqlalchemy import inspect +from alembic import op # revision identifiers, used by Alembic. revision: str = "e3f4a5b6c7d8" -down_revision: Union[str, None] = "c9e22eadf533" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "c9e22eadf533" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def table_exists(table_name: str) -> bool: diff --git a/alembic/versions_backup/f2b3c4d5e6f7_create_translation_tables.py b/alembic/versions_backup/f2b3c4d5e6f7_create_translation_tables.py index 9db90556..f42573e4 100644 --- a/alembic/versions_backup/f2b3c4d5e6f7_create_translation_tables.py +++ b/alembic/versions_backup/f2b3c4d5e6f7_create_translation_tables.py @@ -13,7 +13,7 @@ language fallback capabilities. Fields in product_translations can be NULL to inherit from marketplace_product_translations. """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa @@ -21,9 +21,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "f2b3c4d5e6f7" -down_revision: Union[str, None] = "e1a2b3c4d5e6" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "e1a2b3c4d5e6" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/f4a5b6c7d8e9_add_validator_type_to_code_quality.py b/alembic/versions_backup/f4a5b6c7d8e9_add_validator_type_to_code_quality.py index 35e48b91..cac6fdc2 100644 --- a/alembic/versions_backup/f4a5b6c7d8e9_add_validator_type_to_code_quality.py +++ b/alembic/versions_backup/f4a5b6c7d8e9_add_validator_type_to_code_quality.py @@ -8,17 +8,17 @@ This migration adds validator_type column to architecture scans and violations to support multiple validator types (architecture, security, performance). """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "f4a5b6c7d8e9" -down_revision: Union[str, None] = "e3f4a5b6c7d8" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "e3f4a5b6c7d8" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/f68d8da5315a_add_template_field_to_content_pages_for_.py b/alembic/versions_backup/f68d8da5315a_add_template_field_to_content_pages_for_.py index b095835d..55df5940 100644 --- a/alembic/versions_backup/f68d8da5315a_add_template_field_to_content_pages_for_.py +++ b/alembic/versions_backup/f68d8da5315a_add_template_field_to_content_pages_for_.py @@ -6,7 +6,7 @@ Create Date: 2025-11-22 23:51:40.694983 """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa @@ -14,9 +14,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "f68d8da5315a" -down_revision: Union[str, None] = "72aa309d4007" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "72aa309d4007" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/fa7d4d10e358_add_rbac_enhancements.py b/alembic/versions_backup/fa7d4d10e358_add_rbac_enhancements.py index 6c0decce..4f61eaf5 100644 --- a/alembic/versions_backup/fa7d4d10e358_add_rbac_enhancements.py +++ b/alembic/versions_backup/fa7d4d10e358_add_rbac_enhancements.py @@ -7,7 +7,7 @@ Create Date: 2025-11-13 16:51:25.010057 SQLite-compatible version """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa @@ -15,9 +15,9 @@ from alembic import op # revision identifiers, used by Alembic. revision: str = "fa7d4d10e358" -down_revision: Union[str, None] = "4951b2e50581" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "4951b2e50581" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade(): @@ -80,10 +80,10 @@ def upgrade(): # SQLite-compatible UPDATE with subquery op.execute( """ - UPDATE vendor_users - SET user_type = 'owner' + UPDATE vendor_users + SET user_type = 'owner' WHERE (vendor_id, user_id) IN ( - SELECT id, owner_user_id + SELECT id, owner_user_id FROM vendors ) """ @@ -92,8 +92,8 @@ def upgrade(): # Set existing owners as active op.execute( """ - UPDATE vendor_users - SET is_active = TRUE + UPDATE vendor_users + SET is_active = TRUE WHERE user_type = 'owner' """ ) diff --git a/alembic/versions_backup/fcfdc02d5138_add_language_settings_to_vendor_user_.py b/alembic/versions_backup/fcfdc02d5138_add_language_settings_to_vendor_user_.py index 34d7fcfb..0db4e9b0 100644 --- a/alembic/versions_backup/fcfdc02d5138_add_language_settings_to_vendor_user_.py +++ b/alembic/versions_backup/fcfdc02d5138_add_language_settings_to_vendor_user_.py @@ -11,17 +11,17 @@ This migration adds language preference fields to support multi-language UI: Supported languages: en (English), fr (French), de (German), lb (Luxembourgish) """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'fcfdc02d5138' -down_revision: Union[str, None] = 'b412e0b49c2e' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "fcfdc02d5138" +down_revision: str | None = "b412e0b49c2e" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: @@ -30,25 +30,25 @@ def upgrade() -> None: # ======================================================================== # default_language: Default language for vendor content (products, etc.) op.add_column( - 'vendors', - sa.Column('default_language', sa.String(5), nullable=False, server_default='fr') + "vendors", + sa.Column("default_language", sa.String(5), nullable=False, server_default="fr") ) # dashboard_language: Language for vendor team dashboard UI op.add_column( - 'vendors', - sa.Column('dashboard_language', sa.String(5), nullable=False, server_default='fr') + "vendors", + sa.Column("dashboard_language", sa.String(5), nullable=False, server_default="fr") ) # storefront_language: Default language for customer-facing shop op.add_column( - 'vendors', - sa.Column('storefront_language', sa.String(5), nullable=False, server_default='fr') + "vendors", + sa.Column("storefront_language", sa.String(5), nullable=False, server_default="fr") ) # storefront_languages: JSON array of enabled languages for storefront # Allows vendors to enable/disable specific languages op.add_column( - 'vendors', + "vendors", sa.Column( - 'storefront_languages', + "storefront_languages", sa.JSON, nullable=False, server_default='["fr", "de", "en"]' @@ -60,8 +60,8 @@ def upgrade() -> None: # ======================================================================== # preferred_language: User's preferred UI language (NULL = use context default) op.add_column( - 'users', - sa.Column('preferred_language', sa.String(5), nullable=True) + "users", + sa.Column("preferred_language", sa.String(5), nullable=True) ) # ======================================================================== @@ -69,16 +69,16 @@ def upgrade() -> None: # ======================================================================== # preferred_language: Customer's preferred language (NULL = use storefront default) op.add_column( - 'customers', - sa.Column('preferred_language', sa.String(5), nullable=True) + "customers", + sa.Column("preferred_language", sa.String(5), nullable=True) ) def downgrade() -> None: # Remove columns in reverse order - op.drop_column('customers', 'preferred_language') - op.drop_column('users', 'preferred_language') - op.drop_column('vendors', 'storefront_languages') - op.drop_column('vendors', 'storefront_language') - op.drop_column('vendors', 'dashboard_language') - op.drop_column('vendors', 'default_language') + op.drop_column("customers", "preferred_language") + op.drop_column("users", "preferred_language") + op.drop_column("vendors", "storefront_languages") + op.drop_column("vendors", "storefront_language") + op.drop_column("vendors", "dashboard_language") + op.drop_column("vendors", "default_language") diff --git a/alembic/versions_backup/fef1d20ce8b4_add_content_pages_table_for_cms.py b/alembic/versions_backup/fef1d20ce8b4_add_content_pages_table_for_cms.py index cc24f81a..16ba9f6f 100644 --- a/alembic/versions_backup/fef1d20ce8b4_add_content_pages_table_for_cms.py +++ b/alembic/versions_backup/fef1d20ce8b4_add_content_pages_table_for_cms.py @@ -6,17 +6,15 @@ Create Date: 2025-11-22 13:41:18.069674 """ -from typing import Sequence, Union - -import sqlalchemy as sa +from collections.abc import Sequence from alembic import op # revision identifiers, used by Alembic. revision: str = "fef1d20ce8b4" -down_revision: Union[str, None] = "fa7d4d10e358" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "fa7d4d10e358" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/g5b6c7d8e9f0_add_scan_status_fields.py b/alembic/versions_backup/g5b6c7d8e9f0_add_scan_status_fields.py index 1558f49c..128bf12b 100644 --- a/alembic/versions_backup/g5b6c7d8e9f0_add_scan_status_fields.py +++ b/alembic/versions_backup/g5b6c7d8e9f0_add_scan_status_fields.py @@ -12,6 +12,7 @@ Create Date: 2024-12-21 from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. diff --git a/alembic/versions_backup/h6c7d8e9f0a1_add_invoice_tables.py b/alembic/versions_backup/h6c7d8e9f0a1_add_invoice_tables.py index b88cabc2..14842707 100644 --- a/alembic/versions_backup/h6c7d8e9f0a1_add_invoice_tables.py +++ b/alembic/versions_backup/h6c7d8e9f0a1_add_invoice_tables.py @@ -9,16 +9,17 @@ This migration adds: - invoices: Invoice records with seller/buyer snapshots """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. revision: str = "h6c7d8e9f0a1" -down_revision: Union[str, None] = "g5b6c7d8e9f0" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "g5b6c7d8e9f0" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/i7d8e9f0a1b2_add_vendor_subscriptions.py b/alembic/versions_backup/i7d8e9f0a1b2_add_vendor_subscriptions.py index cff6e188..c936bc2e 100644 --- a/alembic/versions_backup/i7d8e9f0a1b2_add_vendor_subscriptions.py +++ b/alembic/versions_backup/i7d8e9f0a1b2_add_vendor_subscriptions.py @@ -8,16 +8,17 @@ This migration adds: - vendor_subscriptions: Per-vendor subscription tracking with tier limits """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. revision: str = "i7d8e9f0a1b2" -down_revision: Union[str, None] = "h6c7d8e9f0a1" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "h6c7d8e9f0a1" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/j8e9f0a1b2c3_product_independence_populate_fields.py b/alembic/versions_backup/j8e9f0a1b2c3_product_independence_populate_fields.py index e03e6244..70ccf112 100644 --- a/alembic/versions_backup/j8e9f0a1b2c3_product_independence_populate_fields.py +++ b/alembic/versions_backup/j8e9f0a1b2c3_product_independence_populate_fields.py @@ -15,16 +15,17 @@ After this migration: - The marketplace_product_id FK is kept for "view original source" feature """ -from typing import Sequence, Union +from collections.abc import Sequence + +from sqlalchemy import text from alembic import op -from sqlalchemy import text # revision identifiers, used by Alembic. revision: str = "j8e9f0a1b2c3" -down_revision: Union[str, None] = "i7d8e9f0a1b2" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "i7d8e9f0a1b2" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: @@ -259,4 +260,3 @@ def downgrade() -> None: 1. It would lose any vendor customizations made after migration 2. The model code may still work with populated fields """ - pass diff --git a/alembic/versions_backup/k9f0a1b2c3d4_add_tier_id_fk_to_subscriptions.py b/alembic/versions_backup/k9f0a1b2c3d4_add_tier_id_fk_to_subscriptions.py index b7d74ac5..a185eba2 100644 --- a/alembic/versions_backup/k9f0a1b2c3d4_add_tier_id_fk_to_subscriptions.py +++ b/alembic/versions_backup/k9f0a1b2c3d4_add_tier_id_fk_to_subscriptions.py @@ -8,9 +8,9 @@ Adds tier_id column to vendor_subscriptions table with FK to subscription_tiers. Backfills tier_id based on existing tier (code) values. """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "k9f0a1b2c3d4" diff --git a/alembic/versions_backup/l0a1b2c3d4e5_add_capacity_snapshots_table.py b/alembic/versions_backup/l0a1b2c3d4e5_add_capacity_snapshots_table.py index a87beb08..4a2e6f5b 100644 --- a/alembic/versions_backup/l0a1b2c3d4e5_add_capacity_snapshots_table.py +++ b/alembic/versions_backup/l0a1b2c3d4e5_add_capacity_snapshots_table.py @@ -7,9 +7,9 @@ Create Date: 2025-12-26 Adds table for tracking daily platform capacity metrics for growth forecasting. """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "l0a1b2c3d4e5" diff --git a/alembic/versions_backup/m1b2c3d4e5f6_add_vendor_onboarding_table.py b/alembic/versions_backup/m1b2c3d4e5f6_add_vendor_onboarding_table.py index 849da006..2f9b5f31 100644 --- a/alembic/versions_backup/m1b2c3d4e5f6_add_vendor_onboarding_table.py +++ b/alembic/versions_backup/m1b2c3d4e5f6_add_vendor_onboarding_table.py @@ -5,67 +5,67 @@ Revises: d7a4a3f06394 Create Date: 2025-12-27 22:00:00.000000 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'm1b2c3d4e5f6' -down_revision: Union[str, None] = 'd7a4a3f06394' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "m1b2c3d4e5f6" +down_revision: str | None = "d7a4a3f06394" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: - op.create_table('vendor_onboarding', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vendor_id', sa.Integer(), nullable=False), + op.create_table("vendor_onboarding", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("vendor_id", sa.Integer(), nullable=False), # Overall status - sa.Column('status', sa.String(length=20), nullable=False, server_default='not_started'), - sa.Column('current_step', sa.String(length=30), nullable=False, server_default='company_profile'), + sa.Column("status", sa.String(length=20), nullable=False, server_default="not_started"), + sa.Column("current_step", sa.String(length=30), nullable=False, server_default="company_profile"), # Step 1: Company Profile - sa.Column('step_company_profile_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')), - sa.Column('step_company_profile_completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('step_company_profile_data', sa.JSON(), nullable=True), + sa.Column("step_company_profile_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")), + sa.Column("step_company_profile_completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("step_company_profile_data", sa.JSON(), nullable=True), # Step 2: Letzshop API Configuration - sa.Column('step_letzshop_api_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')), - sa.Column('step_letzshop_api_completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('step_letzshop_api_connection_verified', sa.Boolean(), nullable=False, server_default=sa.text('false')), + sa.Column("step_letzshop_api_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")), + sa.Column("step_letzshop_api_completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("step_letzshop_api_connection_verified", sa.Boolean(), nullable=False, server_default=sa.text("false")), # Step 3: Product Import - sa.Column('step_product_import_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')), - sa.Column('step_product_import_completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('step_product_import_csv_url_set', sa.Boolean(), nullable=False, server_default=sa.text('false')), + sa.Column("step_product_import_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")), + sa.Column("step_product_import_completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("step_product_import_csv_url_set", sa.Boolean(), nullable=False, server_default=sa.text("false")), # Step 4: Order Sync - sa.Column('step_order_sync_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')), - sa.Column('step_order_sync_completed_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('step_order_sync_job_id', sa.Integer(), nullable=True), + sa.Column("step_order_sync_completed", sa.Boolean(), nullable=False, server_default=sa.text("false")), + sa.Column("step_order_sync_completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("step_order_sync_job_id", sa.Integer(), nullable=True), # Completion tracking - sa.Column('started_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True), + sa.Column("started_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), # Admin override - sa.Column('skipped_by_admin', sa.Boolean(), nullable=False, server_default=sa.text('false')), - sa.Column('skipped_at', sa.DateTime(timezone=True), nullable=True), - sa.Column('skipped_reason', sa.Text(), nullable=True), - sa.Column('skipped_by_user_id', sa.Integer(), nullable=True), + sa.Column("skipped_by_admin", sa.Boolean(), nullable=False, server_default=sa.text("false")), + sa.Column("skipped_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("skipped_reason", sa.Text(), nullable=True), + sa.Column("skipped_by_user_id", sa.Integer(), nullable=True), # Timestamps - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), # Constraints - sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['skipped_by_user_id'], ['users.id']), - sa.PrimaryKeyConstraint('id'), + sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["skipped_by_user_id"], ["users.id"]), + sa.PrimaryKeyConstraint("id"), ) - op.create_index(op.f('ix_vendor_onboarding_id'), 'vendor_onboarding', ['id'], unique=False) - op.create_index(op.f('ix_vendor_onboarding_vendor_id'), 'vendor_onboarding', ['vendor_id'], unique=True) - op.create_index(op.f('ix_vendor_onboarding_status'), 'vendor_onboarding', ['status'], unique=False) - op.create_index('idx_onboarding_vendor_status', 'vendor_onboarding', ['vendor_id', 'status'], unique=False) + op.create_index(op.f("ix_vendor_onboarding_id"), "vendor_onboarding", ["id"], unique=False) + op.create_index(op.f("ix_vendor_onboarding_vendor_id"), "vendor_onboarding", ["vendor_id"], unique=True) + op.create_index(op.f("ix_vendor_onboarding_status"), "vendor_onboarding", ["status"], unique=False) + op.create_index("idx_onboarding_vendor_status", "vendor_onboarding", ["vendor_id", "status"], unique=False) def downgrade() -> None: - op.drop_index('idx_onboarding_vendor_status', table_name='vendor_onboarding') - op.drop_index(op.f('ix_vendor_onboarding_status'), table_name='vendor_onboarding') - op.drop_index(op.f('ix_vendor_onboarding_vendor_id'), table_name='vendor_onboarding') - op.drop_index(op.f('ix_vendor_onboarding_id'), table_name='vendor_onboarding') - op.drop_table('vendor_onboarding') + op.drop_index("idx_onboarding_vendor_status", table_name="vendor_onboarding") + op.drop_index(op.f("ix_vendor_onboarding_status"), table_name="vendor_onboarding") + op.drop_index(op.f("ix_vendor_onboarding_vendor_id"), table_name="vendor_onboarding") + op.drop_index(op.f("ix_vendor_onboarding_id"), table_name="vendor_onboarding") + op.drop_table("vendor_onboarding") diff --git a/alembic/versions_backup/module_billing/billing_001_merchant_subscriptions_and_feature_limits.py b/alembic/versions_backup/module_billing/billing_001_merchant_subscriptions_and_feature_limits.py index 2f7612ee..533d07d5 100644 --- a/alembic/versions_backup/module_billing/billing_001_merchant_subscriptions_and_feature_limits.py +++ b/alembic/versions_backup/module_billing/billing_001_merchant_subscriptions_and_feature_limits.py @@ -17,9 +17,9 @@ Alters: Revision ID: billing_001 """ -from alembic import op import sqlalchemy as sa +from alembic import op # Revision identifiers revision = "billing_001" diff --git a/alembic/versions_backup/module_loyalty/loyalty_001_add_loyalty_module_tables.py b/alembic/versions_backup/module_loyalty/loyalty_001_add_loyalty_module_tables.py index 3b3a54f5..b66f8b8b 100644 --- a/alembic/versions_backup/module_loyalty/loyalty_001_add_loyalty_module_tables.py +++ b/alembic/versions_backup/module_loyalty/loyalty_001_add_loyalty_module_tables.py @@ -5,646 +5,646 @@ Revises: zd3n4o5p6q7r8 Create Date: 2026-01-28 22:55:34.074321 """ -from typing import Sequence, Union +from collections.abc import Sequence + +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql, sqlite from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy.dialects import sqlite # revision identifiers, used by Alembic. -revision: str = '0fb5d6d6ff97' -down_revision: Union[str, None] = 'zd3n4o5p6q7r8' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "0fb5d6d6ff97" +down_revision: str | None = "zd3n4o5p6q7r8" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('loyalty_programs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('store_id', sa.Integer(), nullable=False), - sa.Column('loyalty_type', sa.String(length=20), nullable=False), - sa.Column('stamps_target', sa.Integer(), nullable=False, comment='Number of stamps needed for reward'), - sa.Column('stamps_reward_description', sa.String(length=255), nullable=False, comment='Description of stamp reward'), - sa.Column('stamps_reward_value_cents', sa.Integer(), nullable=True, comment='Value of stamp reward in cents (for analytics)'), - sa.Column('points_per_euro', sa.Integer(), nullable=False, comment='Points earned per euro spent'), - sa.Column('points_rewards', sqlite.JSON(), nullable=False, comment='List of point rewards: [{id, name, points_required, description}]'), - sa.Column('cooldown_minutes', sa.Integer(), nullable=False, comment='Minutes between stamps for same card'), - sa.Column('max_daily_stamps', sa.Integer(), nullable=False, comment='Maximum stamps per card per day'), - sa.Column('require_staff_pin', sa.Boolean(), nullable=False, comment='Require staff PIN for stamp/points operations'), - sa.Column('card_name', sa.String(length=100), nullable=True, comment='Display name for loyalty card'), - sa.Column('card_color', sa.String(length=7), nullable=False, comment='Primary color for card (hex)'), - sa.Column('card_secondary_color', sa.String(length=7), nullable=True, comment='Secondary color for card (hex)'), - sa.Column('logo_url', sa.String(length=500), nullable=True, comment='URL to store logo for card'), - sa.Column('hero_image_url', sa.String(length=500), nullable=True, comment='URL to hero image for card'), - sa.Column('google_issuer_id', sa.String(length=100), nullable=True, comment='Google Wallet Issuer ID'), - sa.Column('google_class_id', sa.String(length=200), nullable=True, comment='Google Wallet Loyalty Class ID'), - sa.Column('apple_pass_type_id', sa.String(length=100), nullable=True, comment='Apple Wallet Pass Type ID'), - sa.Column('terms_text', sa.Text(), nullable=True, comment='Loyalty program terms and conditions'), - sa.Column('privacy_url', sa.String(length=500), nullable=True, comment='URL to privacy policy'), - sa.Column('is_active', sa.Boolean(), nullable=False), - sa.Column('activated_at', sa.DateTime(timezone=True), nullable=True, comment='When program was first activated'), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + op.create_table("loyalty_programs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("store_id", sa.Integer(), nullable=False), + sa.Column("loyalty_type", sa.String(length=20), nullable=False), + sa.Column("stamps_target", sa.Integer(), nullable=False, comment="Number of stamps needed for reward"), + sa.Column("stamps_reward_description", sa.String(length=255), nullable=False, comment="Description of stamp reward"), + sa.Column("stamps_reward_value_cents", sa.Integer(), nullable=True, comment="Value of stamp reward in cents (for analytics)"), + sa.Column("points_per_euro", sa.Integer(), nullable=False, comment="Points earned per euro spent"), + sa.Column("points_rewards", sqlite.JSON(), nullable=False, comment="List of point rewards: [{id, name, points_required, description}]"), + sa.Column("cooldown_minutes", sa.Integer(), nullable=False, comment="Minutes between stamps for same card"), + sa.Column("max_daily_stamps", sa.Integer(), nullable=False, comment="Maximum stamps per card per day"), + sa.Column("require_staff_pin", sa.Boolean(), nullable=False, comment="Require staff PIN for stamp/points operations"), + sa.Column("card_name", sa.String(length=100), nullable=True, comment="Display name for loyalty card"), + sa.Column("card_color", sa.String(length=7), nullable=False, comment="Primary color for card (hex)"), + sa.Column("card_secondary_color", sa.String(length=7), nullable=True, comment="Secondary color for card (hex)"), + sa.Column("logo_url", sa.String(length=500), nullable=True, comment="URL to store logo for card"), + sa.Column("hero_image_url", sa.String(length=500), nullable=True, comment="URL to hero image for card"), + sa.Column("google_issuer_id", sa.String(length=100), nullable=True, comment="Google Wallet Issuer ID"), + sa.Column("google_class_id", sa.String(length=200), nullable=True, comment="Google Wallet Loyalty Class ID"), + sa.Column("apple_pass_type_id", sa.String(length=100), nullable=True, comment="Apple Wallet Pass Type ID"), + sa.Column("terms_text", sa.Text(), nullable=True, comment="Loyalty program terms and conditions"), + sa.Column("privacy_url", sa.String(length=500), nullable=True, comment="URL to privacy policy"), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("activated_at", sa.DateTime(timezone=True), nullable=True, comment="When program was first activated"), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_loyalty_program_store_active', 'loyalty_programs', ['store_id', 'is_active'], unique=False) - op.create_index(op.f('ix_loyalty_programs_id'), 'loyalty_programs', ['id'], unique=False) - op.create_index(op.f('ix_loyalty_programs_is_active'), 'loyalty_programs', ['is_active'], unique=False) - op.create_index(op.f('ix_loyalty_programs_store_id'), 'loyalty_programs', ['store_id'], unique=True) - op.create_table('loyalty_cards', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('customer_id', sa.Integer(), nullable=False), - sa.Column('program_id', sa.Integer(), nullable=False), - sa.Column('store_id', sa.Integer(), nullable=False, comment='Denormalized for query performance'), - sa.Column('card_number', sa.String(length=20), nullable=False, comment='Human-readable card number'), - sa.Column('qr_code_data', sa.String(length=50), nullable=False, comment='Data encoded in QR code for scanning'), - sa.Column('stamp_count', sa.Integer(), nullable=False, comment='Current stamps toward next reward'), - sa.Column('total_stamps_earned', sa.Integer(), nullable=False, comment='Lifetime stamps earned'), - sa.Column('stamps_redeemed', sa.Integer(), nullable=False, comment='Total rewards redeemed (stamps reset on redemption)'), - sa.Column('points_balance', sa.Integer(), nullable=False, comment='Current available points'), - sa.Column('total_points_earned', sa.Integer(), nullable=False, comment='Lifetime points earned'), - sa.Column('points_redeemed', sa.Integer(), nullable=False, comment='Lifetime points redeemed'), - sa.Column('google_object_id', sa.String(length=200), nullable=True, comment='Google Wallet Loyalty Object ID'), - sa.Column('google_object_jwt', sa.String(length=2000), nullable=True, comment="JWT for Google Wallet 'Add to Wallet' button"), - sa.Column('apple_serial_number', sa.String(length=100), nullable=True, comment='Apple Wallet pass serial number'), - sa.Column('apple_auth_token', sa.String(length=100), nullable=True, comment='Apple Wallet authentication token for updates'), - sa.Column('last_stamp_at', sa.DateTime(timezone=True), nullable=True, comment='Last stamp added (for cooldown)'), - sa.Column('last_points_at', sa.DateTime(timezone=True), nullable=True, comment='Last points earned'), - sa.Column('last_redemption_at', sa.DateTime(timezone=True), nullable=True, comment='Last reward redemption'), - sa.Column('is_active', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['program_id'], ['loyalty_programs.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + op.create_index("idx_loyalty_program_store_active", "loyalty_programs", ["store_id", "is_active"], unique=False) + op.create_index(op.f("ix_loyalty_programs_id"), "loyalty_programs", ["id"], unique=False) + op.create_index(op.f("ix_loyalty_programs_is_active"), "loyalty_programs", ["is_active"], unique=False) + op.create_index(op.f("ix_loyalty_programs_store_id"), "loyalty_programs", ["store_id"], unique=True) + op.create_table("loyalty_cards", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("customer_id", sa.Integer(), nullable=False), + sa.Column("program_id", sa.Integer(), nullable=False), + sa.Column("store_id", sa.Integer(), nullable=False, comment="Denormalized for query performance"), + sa.Column("card_number", sa.String(length=20), nullable=False, comment="Human-readable card number"), + sa.Column("qr_code_data", sa.String(length=50), nullable=False, comment="Data encoded in QR code for scanning"), + sa.Column("stamp_count", sa.Integer(), nullable=False, comment="Current stamps toward next reward"), + sa.Column("total_stamps_earned", sa.Integer(), nullable=False, comment="Lifetime stamps earned"), + sa.Column("stamps_redeemed", sa.Integer(), nullable=False, comment="Total rewards redeemed (stamps reset on redemption)"), + sa.Column("points_balance", sa.Integer(), nullable=False, comment="Current available points"), + sa.Column("total_points_earned", sa.Integer(), nullable=False, comment="Lifetime points earned"), + sa.Column("points_redeemed", sa.Integer(), nullable=False, comment="Lifetime points redeemed"), + sa.Column("google_object_id", sa.String(length=200), nullable=True, comment="Google Wallet Loyalty Object ID"), + sa.Column("google_object_jwt", sa.String(length=2000), nullable=True, comment="JWT for Google Wallet 'Add to Wallet' button"), + sa.Column("apple_serial_number", sa.String(length=100), nullable=True, comment="Apple Wallet pass serial number"), + sa.Column("apple_auth_token", sa.String(length=100), nullable=True, comment="Apple Wallet authentication token for updates"), + sa.Column("last_stamp_at", sa.DateTime(timezone=True), nullable=True, comment="Last stamp added (for cooldown)"), + sa.Column("last_points_at", sa.DateTime(timezone=True), nullable=True, comment="Last points earned"), + sa.Column("last_redemption_at", sa.DateTime(timezone=True), nullable=True, comment="Last reward redemption"), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["customer_id"], ["customers.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["program_id"], ["loyalty_programs.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_loyalty_card_customer_program', 'loyalty_cards', ['customer_id', 'program_id'], unique=True) - op.create_index('idx_loyalty_card_store_active', 'loyalty_cards', ['store_id', 'is_active'], unique=False) - op.create_index(op.f('ix_loyalty_cards_apple_serial_number'), 'loyalty_cards', ['apple_serial_number'], unique=True) - op.create_index(op.f('ix_loyalty_cards_card_number'), 'loyalty_cards', ['card_number'], unique=True) - op.create_index(op.f('ix_loyalty_cards_customer_id'), 'loyalty_cards', ['customer_id'], unique=False) - op.create_index(op.f('ix_loyalty_cards_google_object_id'), 'loyalty_cards', ['google_object_id'], unique=False) - op.create_index(op.f('ix_loyalty_cards_id'), 'loyalty_cards', ['id'], unique=False) - op.create_index(op.f('ix_loyalty_cards_is_active'), 'loyalty_cards', ['is_active'], unique=False) - op.create_index(op.f('ix_loyalty_cards_program_id'), 'loyalty_cards', ['program_id'], unique=False) - op.create_index(op.f('ix_loyalty_cards_qr_code_data'), 'loyalty_cards', ['qr_code_data'], unique=True) - op.create_index(op.f('ix_loyalty_cards_store_id'), 'loyalty_cards', ['store_id'], unique=False) - op.create_table('staff_pins', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('program_id', sa.Integer(), nullable=False), - sa.Column('store_id', sa.Integer(), nullable=False, comment='Denormalized for query performance'), - sa.Column('name', sa.String(length=100), nullable=False, comment='Staff member name'), - sa.Column('staff_id', sa.String(length=50), nullable=True, comment='Optional staff ID/employee number'), - sa.Column('pin_hash', sa.String(length=255), nullable=False, comment='bcrypt hash of PIN'), - sa.Column('failed_attempts', sa.Integer(), nullable=False, comment='Consecutive failed PIN attempts'), - sa.Column('locked_until', sa.DateTime(timezone=True), nullable=True, comment='Lockout expires at this time'), - sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=True, comment='Last successful use of PIN'), - sa.Column('is_active', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['program_id'], ['loyalty_programs.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + op.create_index("idx_loyalty_card_customer_program", "loyalty_cards", ["customer_id", "program_id"], unique=True) + op.create_index("idx_loyalty_card_store_active", "loyalty_cards", ["store_id", "is_active"], unique=False) + op.create_index(op.f("ix_loyalty_cards_apple_serial_number"), "loyalty_cards", ["apple_serial_number"], unique=True) + op.create_index(op.f("ix_loyalty_cards_card_number"), "loyalty_cards", ["card_number"], unique=True) + op.create_index(op.f("ix_loyalty_cards_customer_id"), "loyalty_cards", ["customer_id"], unique=False) + op.create_index(op.f("ix_loyalty_cards_google_object_id"), "loyalty_cards", ["google_object_id"], unique=False) + op.create_index(op.f("ix_loyalty_cards_id"), "loyalty_cards", ["id"], unique=False) + op.create_index(op.f("ix_loyalty_cards_is_active"), "loyalty_cards", ["is_active"], unique=False) + op.create_index(op.f("ix_loyalty_cards_program_id"), "loyalty_cards", ["program_id"], unique=False) + op.create_index(op.f("ix_loyalty_cards_qr_code_data"), "loyalty_cards", ["qr_code_data"], unique=True) + op.create_index(op.f("ix_loyalty_cards_store_id"), "loyalty_cards", ["store_id"], unique=False) + op.create_table("staff_pins", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("program_id", sa.Integer(), nullable=False), + sa.Column("store_id", sa.Integer(), nullable=False, comment="Denormalized for query performance"), + sa.Column("name", sa.String(length=100), nullable=False, comment="Staff member name"), + sa.Column("staff_id", sa.String(length=50), nullable=True, comment="Optional staff ID/employee number"), + sa.Column("pin_hash", sa.String(length=255), nullable=False, comment="bcrypt hash of PIN"), + sa.Column("failed_attempts", sa.Integer(), nullable=False, comment="Consecutive failed PIN attempts"), + sa.Column("locked_until", sa.DateTime(timezone=True), nullable=True, comment="Lockout expires at this time"), + sa.Column("last_used_at", sa.DateTime(timezone=True), nullable=True, comment="Last successful use of PIN"), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["program_id"], ["loyalty_programs.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_staff_pin_program_active', 'staff_pins', ['program_id', 'is_active'], unique=False) - op.create_index('idx_staff_pin_store_active', 'staff_pins', ['store_id', 'is_active'], unique=False) - op.create_index(op.f('ix_staff_pins_id'), 'staff_pins', ['id'], unique=False) - op.create_index(op.f('ix_staff_pins_is_active'), 'staff_pins', ['is_active'], unique=False) - op.create_index(op.f('ix_staff_pins_program_id'), 'staff_pins', ['program_id'], unique=False) - op.create_index(op.f('ix_staff_pins_staff_id'), 'staff_pins', ['staff_id'], unique=False) - op.create_index(op.f('ix_staff_pins_store_id'), 'staff_pins', ['store_id'], unique=False) - op.create_table('apple_device_registrations', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('card_id', sa.Integer(), nullable=False), - sa.Column('device_library_identifier', sa.String(length=100), nullable=False, comment='Unique identifier for the device/library'), - sa.Column('push_token', sa.String(length=100), nullable=False, comment='APNs push token for this device'), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['card_id'], ['loyalty_cards.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + op.create_index("idx_staff_pin_program_active", "staff_pins", ["program_id", "is_active"], unique=False) + op.create_index("idx_staff_pin_store_active", "staff_pins", ["store_id", "is_active"], unique=False) + op.create_index(op.f("ix_staff_pins_id"), "staff_pins", ["id"], unique=False) + op.create_index(op.f("ix_staff_pins_is_active"), "staff_pins", ["is_active"], unique=False) + op.create_index(op.f("ix_staff_pins_program_id"), "staff_pins", ["program_id"], unique=False) + op.create_index(op.f("ix_staff_pins_staff_id"), "staff_pins", ["staff_id"], unique=False) + op.create_index(op.f("ix_staff_pins_store_id"), "staff_pins", ["store_id"], unique=False) + op.create_table("apple_device_registrations", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("card_id", sa.Integer(), nullable=False), + sa.Column("device_library_identifier", sa.String(length=100), nullable=False, comment="Unique identifier for the device/library"), + sa.Column("push_token", sa.String(length=100), nullable=False, comment="APNs push token for this device"), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["card_id"], ["loyalty_cards.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_apple_device_card', 'apple_device_registrations', ['device_library_identifier', 'card_id'], unique=True) - op.create_index(op.f('ix_apple_device_registrations_card_id'), 'apple_device_registrations', ['card_id'], unique=False) - op.create_index(op.f('ix_apple_device_registrations_device_library_identifier'), 'apple_device_registrations', ['device_library_identifier'], unique=False) - op.create_index(op.f('ix_apple_device_registrations_id'), 'apple_device_registrations', ['id'], unique=False) - op.create_table('loyalty_transactions', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('card_id', sa.Integer(), nullable=False), - sa.Column('store_id', sa.Integer(), nullable=False, comment='Denormalized for query performance'), - sa.Column('staff_pin_id', sa.Integer(), nullable=True, comment='Staff PIN used for this operation'), - sa.Column('transaction_type', sa.String(length=30), nullable=False), - sa.Column('stamps_delta', sa.Integer(), nullable=False, comment='Change in stamps (+1 for earn, -N for redeem)'), - sa.Column('points_delta', sa.Integer(), nullable=False, comment='Change in points (+N for earn, -N for redeem)'), - sa.Column('stamps_balance_after', sa.Integer(), nullable=True, comment='Stamp count after this transaction'), - sa.Column('points_balance_after', sa.Integer(), nullable=True, comment='Points balance after this transaction'), - sa.Column('purchase_amount_cents', sa.Integer(), nullable=True, comment='Purchase amount in cents (for points calculation)'), - sa.Column('order_reference', sa.String(length=100), nullable=True, comment='Reference to order that triggered points'), - sa.Column('reward_id', sa.String(length=50), nullable=True, comment='ID of redeemed reward (from program.points_rewards)'), - sa.Column('reward_description', sa.String(length=255), nullable=True, comment='Description of redeemed reward'), - sa.Column('ip_address', sa.String(length=45), nullable=True, comment='IP address of requester (IPv4 or IPv6)'), - sa.Column('user_agent', sa.String(length=500), nullable=True, comment='User agent string'), - sa.Column('notes', sa.Text(), nullable=True, comment='Additional notes (e.g., reason for adjustment)'), - sa.Column('transaction_at', sa.DateTime(timezone=True), nullable=False, comment='When the transaction occurred (may differ from created_at)'), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['card_id'], ['loyalty_cards.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['staff_pin_id'], ['staff_pins.id'], ondelete='SET NULL'), - sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') + op.create_index("idx_apple_device_card", "apple_device_registrations", ["device_library_identifier", "card_id"], unique=True) + op.create_index(op.f("ix_apple_device_registrations_card_id"), "apple_device_registrations", ["card_id"], unique=False) + op.create_index(op.f("ix_apple_device_registrations_device_library_identifier"), "apple_device_registrations", ["device_library_identifier"], unique=False) + op.create_index(op.f("ix_apple_device_registrations_id"), "apple_device_registrations", ["id"], unique=False) + op.create_table("loyalty_transactions", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("card_id", sa.Integer(), nullable=False), + sa.Column("store_id", sa.Integer(), nullable=False, comment="Denormalized for query performance"), + sa.Column("staff_pin_id", sa.Integer(), nullable=True, comment="Staff PIN used for this operation"), + sa.Column("transaction_type", sa.String(length=30), nullable=False), + sa.Column("stamps_delta", sa.Integer(), nullable=False, comment="Change in stamps (+1 for earn, -N for redeem)"), + sa.Column("points_delta", sa.Integer(), nullable=False, comment="Change in points (+N for earn, -N for redeem)"), + sa.Column("stamps_balance_after", sa.Integer(), nullable=True, comment="Stamp count after this transaction"), + sa.Column("points_balance_after", sa.Integer(), nullable=True, comment="Points balance after this transaction"), + sa.Column("purchase_amount_cents", sa.Integer(), nullable=True, comment="Purchase amount in cents (for points calculation)"), + sa.Column("order_reference", sa.String(length=100), nullable=True, comment="Reference to order that triggered points"), + sa.Column("reward_id", sa.String(length=50), nullable=True, comment="ID of redeemed reward (from program.points_rewards)"), + sa.Column("reward_description", sa.String(length=255), nullable=True, comment="Description of redeemed reward"), + sa.Column("ip_address", sa.String(length=45), nullable=True, comment="IP address of requester (IPv4 or IPv6)"), + sa.Column("user_agent", sa.String(length=500), nullable=True, comment="User agent string"), + sa.Column("notes", sa.Text(), nullable=True, comment="Additional notes (e.g., reason for adjustment)"), + sa.Column("transaction_at", sa.DateTime(timezone=True), nullable=False, comment="When the transaction occurred (may differ from created_at)"), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["card_id"], ["loyalty_cards.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["staff_pin_id"], ["staff_pins.id"], ondelete="SET NULL"), + sa.ForeignKeyConstraint(["store_id"], ["stores.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id") ) - op.create_index('idx_loyalty_tx_card_type', 'loyalty_transactions', ['card_id', 'transaction_type'], unique=False) - op.create_index('idx_loyalty_tx_type_date', 'loyalty_transactions', ['transaction_type', 'transaction_at'], unique=False) - op.create_index('idx_loyalty_tx_store_date', 'loyalty_transactions', ['store_id', 'transaction_at'], unique=False) - op.create_index(op.f('ix_loyalty_transactions_card_id'), 'loyalty_transactions', ['card_id'], unique=False) - op.create_index(op.f('ix_loyalty_transactions_id'), 'loyalty_transactions', ['id'], unique=False) - op.create_index(op.f('ix_loyalty_transactions_order_reference'), 'loyalty_transactions', ['order_reference'], unique=False) - op.create_index(op.f('ix_loyalty_transactions_staff_pin_id'), 'loyalty_transactions', ['staff_pin_id'], unique=False) - op.create_index(op.f('ix_loyalty_transactions_transaction_at'), 'loyalty_transactions', ['transaction_at'], unique=False) - op.create_index(op.f('ix_loyalty_transactions_transaction_type'), 'loyalty_transactions', ['transaction_type'], unique=False) - op.create_index(op.f('ix_loyalty_transactions_store_id'), 'loyalty_transactions', ['store_id'], unique=False) - op.alter_column('admin_menu_configs', 'platform_id', + op.create_index("idx_loyalty_tx_card_type", "loyalty_transactions", ["card_id", "transaction_type"], unique=False) + op.create_index("idx_loyalty_tx_type_date", "loyalty_transactions", ["transaction_type", "transaction_at"], unique=False) + op.create_index("idx_loyalty_tx_store_date", "loyalty_transactions", ["store_id", "transaction_at"], unique=False) + op.create_index(op.f("ix_loyalty_transactions_card_id"), "loyalty_transactions", ["card_id"], unique=False) + op.create_index(op.f("ix_loyalty_transactions_id"), "loyalty_transactions", ["id"], unique=False) + op.create_index(op.f("ix_loyalty_transactions_order_reference"), "loyalty_transactions", ["order_reference"], unique=False) + op.create_index(op.f("ix_loyalty_transactions_staff_pin_id"), "loyalty_transactions", ["staff_pin_id"], unique=False) + op.create_index(op.f("ix_loyalty_transactions_transaction_at"), "loyalty_transactions", ["transaction_at"], unique=False) + op.create_index(op.f("ix_loyalty_transactions_transaction_type"), "loyalty_transactions", ["transaction_type"], unique=False) + op.create_index(op.f("ix_loyalty_transactions_store_id"), "loyalty_transactions", ["store_id"], unique=False) + op.alter_column("admin_menu_configs", "platform_id", existing_type=sa.INTEGER(), - comment='Platform scope - applies to users/stores of this platform', - existing_comment='Platform scope - applies to all platform admins of this platform', + comment="Platform scope - applies to users/stores of this platform", + existing_comment="Platform scope - applies to all platform admins of this platform", existing_nullable=True) - op.alter_column('admin_menu_configs', 'user_id', + op.alter_column("admin_menu_configs", "user_id", existing_type=sa.INTEGER(), - comment='User scope - applies to this specific super admin (admin frontend only)', - existing_comment='User scope - applies to this specific super admin', + comment="User scope - applies to this specific super admin (admin frontend only)", + existing_comment="User scope - applies to this specific super admin", existing_nullable=True) - op.alter_column('admin_menu_configs', 'created_at', + op.alter_column("admin_menu_configs", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('admin_menu_configs', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("admin_menu_configs", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_index('idx_admin_menu_configs_frontend_type', table_name='admin_menu_configs') - op.drop_index('idx_admin_menu_configs_menu_item_id', table_name='admin_menu_configs') - op.drop_index('idx_admin_menu_configs_platform_id', table_name='admin_menu_configs') - op.drop_index('idx_admin_menu_configs_user_id', table_name='admin_menu_configs') - op.create_index(op.f('ix_admin_menu_configs_frontend_type'), 'admin_menu_configs', ['frontend_type'], unique=False) - op.create_index(op.f('ix_admin_menu_configs_id'), 'admin_menu_configs', ['id'], unique=False) - op.create_index(op.f('ix_admin_menu_configs_menu_item_id'), 'admin_menu_configs', ['menu_item_id'], unique=False) - op.create_index(op.f('ix_admin_menu_configs_platform_id'), 'admin_menu_configs', ['platform_id'], unique=False) - op.create_index(op.f('ix_admin_menu_configs_user_id'), 'admin_menu_configs', ['user_id'], unique=False) - op.alter_column('admin_platforms', 'created_at', + existing_server_default=sa.text("now()")) + op.drop_index("idx_admin_menu_configs_frontend_type", table_name="admin_menu_configs") + op.drop_index("idx_admin_menu_configs_menu_item_id", table_name="admin_menu_configs") + op.drop_index("idx_admin_menu_configs_platform_id", table_name="admin_menu_configs") + op.drop_index("idx_admin_menu_configs_user_id", table_name="admin_menu_configs") + op.create_index(op.f("ix_admin_menu_configs_frontend_type"), "admin_menu_configs", ["frontend_type"], unique=False) + op.create_index(op.f("ix_admin_menu_configs_id"), "admin_menu_configs", ["id"], unique=False) + op.create_index(op.f("ix_admin_menu_configs_menu_item_id"), "admin_menu_configs", ["menu_item_id"], unique=False) + op.create_index(op.f("ix_admin_menu_configs_platform_id"), "admin_menu_configs", ["platform_id"], unique=False) + op.create_index(op.f("ix_admin_menu_configs_user_id"), "admin_menu_configs", ["user_id"], unique=False) + op.alter_column("admin_platforms", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('admin_platforms', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("admin_platforms", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_index('idx_admin_platforms_platform_id', table_name='admin_platforms') - op.drop_index('idx_admin_platforms_user_id', table_name='admin_platforms') - op.create_index(op.f('ix_admin_platforms_id'), 'admin_platforms', ['id'], unique=False) - op.create_index(op.f('ix_admin_platforms_platform_id'), 'admin_platforms', ['platform_id'], unique=False) - op.create_index(op.f('ix_admin_platforms_user_id'), 'admin_platforms', ['user_id'], unique=False) - op.alter_column('content_pages', 'platform_id', + existing_server_default=sa.text("now()")) + op.drop_index("idx_admin_platforms_platform_id", table_name="admin_platforms") + op.drop_index("idx_admin_platforms_user_id", table_name="admin_platforms") + op.create_index(op.f("ix_admin_platforms_id"), "admin_platforms", ["id"], unique=False) + op.create_index(op.f("ix_admin_platforms_platform_id"), "admin_platforms", ["platform_id"], unique=False) + op.create_index(op.f("ix_admin_platforms_user_id"), "admin_platforms", ["user_id"], unique=False) + op.alter_column("content_pages", "platform_id", existing_type=sa.INTEGER(), - comment='Platform this page belongs to', + comment="Platform this page belongs to", existing_nullable=False) - op.alter_column('content_pages', 'store_id', + op.alter_column("content_pages", "store_id", existing_type=sa.INTEGER(), - comment='Store this page belongs to (NULL for platform/default pages)', + comment="Store this page belongs to (NULL for platform/default pages)", existing_nullable=True) - op.alter_column('content_pages', 'is_platform_page', + op.alter_column("content_pages", "is_platform_page", existing_type=sa.BOOLEAN(), - comment='True = platform marketing page (homepage, pricing); False = store default or override', + comment="True = platform marketing page (homepage, pricing); False = store default or override", existing_nullable=False, - existing_server_default=sa.text('false')) - op.alter_column('platform_modules', 'created_at', + existing_server_default=sa.text("false")) + op.alter_column("platform_modules", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('platform_modules', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("platform_modules", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_index(op.f('ix_platform_modules_id'), 'platform_modules', ['id'], unique=False) - op.alter_column('platforms', 'code', + existing_server_default=sa.text("now()")) + op.create_index(op.f("ix_platform_modules_id"), "platform_modules", ["id"], unique=False) + op.alter_column("platforms", "code", existing_type=sa.VARCHAR(length=50), comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')", existing_nullable=False) - op.alter_column('platforms', 'name', + op.alter_column("platforms", "name", existing_type=sa.VARCHAR(length=100), comment="Display name (e.g., 'Wizamart OMS')", existing_nullable=False) - op.alter_column('platforms', 'description', + op.alter_column("platforms", "description", existing_type=sa.TEXT(), - comment='Platform description for admin/marketing purposes', + comment="Platform description for admin/marketing purposes", existing_nullable=True) - op.alter_column('platforms', 'domain', + op.alter_column("platforms", "domain", existing_type=sa.VARCHAR(length=255), comment="Production domain (e.g., 'oms.lu', 'loyalty.lu')", existing_nullable=True) - op.alter_column('platforms', 'path_prefix', + op.alter_column("platforms", "path_prefix", existing_type=sa.VARCHAR(length=50), comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)", existing_nullable=True) - op.alter_column('platforms', 'logo', + op.alter_column("platforms", "logo", existing_type=sa.VARCHAR(length=500), - comment='Logo URL for light mode', + comment="Logo URL for light mode", existing_nullable=True) - op.alter_column('platforms', 'logo_dark', + op.alter_column("platforms", "logo_dark", existing_type=sa.VARCHAR(length=500), - comment='Logo URL for dark mode', + comment="Logo URL for dark mode", existing_nullable=True) - op.alter_column('platforms', 'favicon', + op.alter_column("platforms", "favicon", existing_type=sa.VARCHAR(length=500), - comment='Favicon URL', + comment="Favicon URL", existing_nullable=True) - op.alter_column('platforms', 'theme_config', + op.alter_column("platforms", "theme_config", existing_type=postgresql.JSON(astext_type=sa.Text()), - comment='Theme configuration (colors, fonts, etc.)', + comment="Theme configuration (colors, fonts, etc.)", existing_nullable=True) - op.alter_column('platforms', 'default_language', + op.alter_column("platforms", "default_language", existing_type=sa.VARCHAR(length=5), comment="Default language code (e.g., 'fr', 'en', 'de')", existing_nullable=False, existing_server_default=sa.text("'fr'::character varying")) - op.alter_column('platforms', 'supported_languages', + op.alter_column("platforms", "supported_languages", existing_type=postgresql.JSON(astext_type=sa.Text()), - comment='List of supported language codes', + comment="List of supported language codes", existing_nullable=False) - op.alter_column('platforms', 'is_active', + op.alter_column("platforms", "is_active", existing_type=sa.BOOLEAN(), - comment='Whether the platform is active and accessible', + comment="Whether the platform is active and accessible", existing_nullable=False, - existing_server_default=sa.text('true')) - op.alter_column('platforms', 'is_public', + existing_server_default=sa.text("true")) + op.alter_column("platforms", "is_public", existing_type=sa.BOOLEAN(), - comment='Whether the platform is visible in public listings', + comment="Whether the platform is visible in public listings", existing_nullable=False, - existing_server_default=sa.text('true')) - op.alter_column('platforms', 'settings', + existing_server_default=sa.text("true")) + op.alter_column("platforms", "settings", existing_type=postgresql.JSON(astext_type=sa.Text()), - comment='Platform-specific settings and feature flags', + comment="Platform-specific settings and feature flags", existing_nullable=True) - op.alter_column('platforms', 'created_at', + op.alter_column("platforms", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('platforms', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("platforms", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_index(op.f('ix_platforms_id'), 'platforms', ['id'], unique=False) - op.alter_column('subscription_tiers', 'platform_id', + existing_server_default=sa.text("now()")) + op.create_index(op.f("ix_platforms_id"), "platforms", ["id"], unique=False) + op.alter_column("subscription_tiers", "platform_id", existing_type=sa.INTEGER(), - comment='Platform this tier belongs to (NULL = global tier)', + comment="Platform this tier belongs to (NULL = global tier)", existing_nullable=True) - op.alter_column('subscription_tiers', 'cms_pages_limit', + op.alter_column("subscription_tiers", "cms_pages_limit", existing_type=sa.INTEGER(), - comment='Total CMS pages limit (NULL = unlimited)', + comment="Total CMS pages limit (NULL = unlimited)", existing_nullable=True) - op.alter_column('subscription_tiers', 'cms_custom_pages_limit', + op.alter_column("subscription_tiers", "cms_custom_pages_limit", existing_type=sa.INTEGER(), - comment='Custom pages limit, excluding overrides (NULL = unlimited)', + comment="Custom pages limit, excluding overrides (NULL = unlimited)", existing_nullable=True) - op.drop_index('ix_subscription_tiers_code', table_name='subscription_tiers') - op.create_index(op.f('ix_subscription_tiers_code'), 'subscription_tiers', ['code'], unique=False) - op.alter_column('users', 'is_super_admin', + op.drop_index("ix_subscription_tiers_code", table_name="subscription_tiers") + op.create_index(op.f("ix_subscription_tiers_code"), "subscription_tiers", ["code"], unique=False) + op.alter_column("users", "is_super_admin", existing_type=sa.BOOLEAN(), comment=None, - existing_comment='Whether this admin has access to all platforms (super admin)', + existing_comment="Whether this admin has access to all platforms (super admin)", existing_nullable=False, - existing_server_default=sa.text('false')) - op.alter_column('store_platforms', 'store_id', + existing_server_default=sa.text("false")) + op.alter_column("store_platforms", "store_id", existing_type=sa.INTEGER(), - comment='Reference to the store', + comment="Reference to the store", existing_nullable=False) - op.alter_column('store_platforms', 'platform_id', + op.alter_column("store_platforms", "platform_id", existing_type=sa.INTEGER(), - comment='Reference to the platform', + comment="Reference to the platform", existing_nullable=False) - op.alter_column('store_platforms', 'tier_id', + op.alter_column("store_platforms", "tier_id", existing_type=sa.INTEGER(), - comment='Platform-specific subscription tier', + comment="Platform-specific subscription tier", existing_nullable=True) - op.alter_column('store_platforms', 'is_active', + op.alter_column("store_platforms", "is_active", existing_type=sa.BOOLEAN(), - comment='Whether the store is active on this platform', + comment="Whether the store is active on this platform", existing_nullable=False, - existing_server_default=sa.text('true')) - op.alter_column('store_platforms', 'is_primary', + existing_server_default=sa.text("true")) + op.alter_column("store_platforms", "is_primary", existing_type=sa.BOOLEAN(), comment="Whether this is the store's primary platform", existing_nullable=False, - existing_server_default=sa.text('false')) - op.alter_column('store_platforms', 'custom_subdomain', + existing_server_default=sa.text("false")) + op.alter_column("store_platforms", "custom_subdomain", existing_type=sa.VARCHAR(length=100), - comment='Platform-specific subdomain (if different from main subdomain)', + comment="Platform-specific subdomain (if different from main subdomain)", existing_nullable=True) - op.alter_column('store_platforms', 'settings', + op.alter_column("store_platforms", "settings", existing_type=postgresql.JSON(astext_type=sa.Text()), - comment='Platform-specific store settings', + comment="Platform-specific store settings", existing_nullable=True) - op.alter_column('store_platforms', 'joined_at', + op.alter_column("store_platforms", "joined_at", existing_type=postgresql.TIMESTAMP(timezone=True), - comment='When the store joined this platform', + comment="When the store joined this platform", existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('store_platforms', 'created_at', + existing_server_default=sa.text("now()")) + op.alter_column("store_platforms", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('store_platforms', 'updated_at', + existing_server_default=sa.text("now()")) + op.alter_column("store_platforms", "updated_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_index(op.f('ix_store_platforms_id'), 'store_platforms', ['id'], unique=False) + existing_server_default=sa.text("now()")) + op.create_index(op.f("ix_store_platforms_id"), "store_platforms", ["id"], unique=False) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_store_platforms_id'), table_name='store_platforms') - op.alter_column('store_platforms', 'updated_at', + op.drop_index(op.f("ix_store_platforms_id"), table_name="store_platforms") + op.alter_column("store_platforms", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('store_platforms', 'created_at', + existing_server_default=sa.text("now()")) + op.alter_column("store_platforms", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('store_platforms', 'joined_at', + existing_server_default=sa.text("now()")) + op.alter_column("store_platforms", "joined_at", existing_type=postgresql.TIMESTAMP(timezone=True), comment=None, - existing_comment='When the store joined this platform', + existing_comment="When the store joined this platform", existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('store_platforms', 'settings', + existing_server_default=sa.text("now()")) + op.alter_column("store_platforms", "settings", existing_type=postgresql.JSON(astext_type=sa.Text()), comment=None, - existing_comment='Platform-specific store settings', + existing_comment="Platform-specific store settings", existing_nullable=True) - op.alter_column('store_platforms', 'custom_subdomain', + op.alter_column("store_platforms", "custom_subdomain", existing_type=sa.VARCHAR(length=100), comment=None, - existing_comment='Platform-specific subdomain (if different from main subdomain)', + existing_comment="Platform-specific subdomain (if different from main subdomain)", existing_nullable=True) - op.alter_column('store_platforms', 'is_primary', + op.alter_column("store_platforms", "is_primary", existing_type=sa.BOOLEAN(), comment=None, existing_comment="Whether this is the store's primary platform", existing_nullable=False, - existing_server_default=sa.text('false')) - op.alter_column('store_platforms', 'is_active', + existing_server_default=sa.text("false")) + op.alter_column("store_platforms", "is_active", existing_type=sa.BOOLEAN(), comment=None, - existing_comment='Whether the store is active on this platform', + existing_comment="Whether the store is active on this platform", existing_nullable=False, - existing_server_default=sa.text('true')) - op.alter_column('store_platforms', 'tier_id', + existing_server_default=sa.text("true")) + op.alter_column("store_platforms", "tier_id", existing_type=sa.INTEGER(), comment=None, - existing_comment='Platform-specific subscription tier', + existing_comment="Platform-specific subscription tier", existing_nullable=True) - op.alter_column('store_platforms', 'platform_id', + op.alter_column("store_platforms", "platform_id", existing_type=sa.INTEGER(), comment=None, - existing_comment='Reference to the platform', + existing_comment="Reference to the platform", existing_nullable=False) - op.alter_column('store_platforms', 'store_id', + op.alter_column("store_platforms", "store_id", existing_type=sa.INTEGER(), comment=None, - existing_comment='Reference to the store', + existing_comment="Reference to the store", existing_nullable=False) - op.alter_column('users', 'is_super_admin', + op.alter_column("users", "is_super_admin", existing_type=sa.BOOLEAN(), - comment='Whether this admin has access to all platforms (super admin)', + comment="Whether this admin has access to all platforms (super admin)", existing_nullable=False, - existing_server_default=sa.text('false')) - op.drop_index(op.f('ix_subscription_tiers_code'), table_name='subscription_tiers') - op.create_index('ix_subscription_tiers_code', 'subscription_tiers', ['code'], unique=True) - op.alter_column('subscription_tiers', 'cms_custom_pages_limit', + existing_server_default=sa.text("false")) + op.drop_index(op.f("ix_subscription_tiers_code"), table_name="subscription_tiers") + op.create_index("ix_subscription_tiers_code", "subscription_tiers", ["code"], unique=True) + op.alter_column("subscription_tiers", "cms_custom_pages_limit", existing_type=sa.INTEGER(), comment=None, - existing_comment='Custom pages limit, excluding overrides (NULL = unlimited)', + existing_comment="Custom pages limit, excluding overrides (NULL = unlimited)", existing_nullable=True) - op.alter_column('subscription_tiers', 'cms_pages_limit', + op.alter_column("subscription_tiers", "cms_pages_limit", existing_type=sa.INTEGER(), comment=None, - existing_comment='Total CMS pages limit (NULL = unlimited)', + existing_comment="Total CMS pages limit (NULL = unlimited)", existing_nullable=True) - op.alter_column('subscription_tiers', 'platform_id', + op.alter_column("subscription_tiers", "platform_id", existing_type=sa.INTEGER(), comment=None, - existing_comment='Platform this tier belongs to (NULL = global tier)', + existing_comment="Platform this tier belongs to (NULL = global tier)", existing_nullable=True) - op.drop_index(op.f('ix_platforms_id'), table_name='platforms') - op.alter_column('platforms', 'updated_at', + op.drop_index(op.f("ix_platforms_id"), table_name="platforms") + op.alter_column("platforms", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('platforms', 'created_at', + existing_server_default=sa.text("now()")) + op.alter_column("platforms", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('platforms', 'settings', + existing_server_default=sa.text("now()")) + op.alter_column("platforms", "settings", existing_type=postgresql.JSON(astext_type=sa.Text()), comment=None, - existing_comment='Platform-specific settings and feature flags', + existing_comment="Platform-specific settings and feature flags", existing_nullable=True) - op.alter_column('platforms', 'is_public', + op.alter_column("platforms", "is_public", existing_type=sa.BOOLEAN(), comment=None, - existing_comment='Whether the platform is visible in public listings', + existing_comment="Whether the platform is visible in public listings", existing_nullable=False, - existing_server_default=sa.text('true')) - op.alter_column('platforms', 'is_active', + existing_server_default=sa.text("true")) + op.alter_column("platforms", "is_active", existing_type=sa.BOOLEAN(), comment=None, - existing_comment='Whether the platform is active and accessible', + existing_comment="Whether the platform is active and accessible", existing_nullable=False, - existing_server_default=sa.text('true')) - op.alter_column('platforms', 'supported_languages', + existing_server_default=sa.text("true")) + op.alter_column("platforms", "supported_languages", existing_type=postgresql.JSON(astext_type=sa.Text()), comment=None, - existing_comment='List of supported language codes', + existing_comment="List of supported language codes", existing_nullable=False) - op.alter_column('platforms', 'default_language', + op.alter_column("platforms", "default_language", existing_type=sa.VARCHAR(length=5), comment=None, existing_comment="Default language code (e.g., 'fr', 'en', 'de')", existing_nullable=False, existing_server_default=sa.text("'fr'::character varying")) - op.alter_column('platforms', 'theme_config', + op.alter_column("platforms", "theme_config", existing_type=postgresql.JSON(astext_type=sa.Text()), comment=None, - existing_comment='Theme configuration (colors, fonts, etc.)', + existing_comment="Theme configuration (colors, fonts, etc.)", existing_nullable=True) - op.alter_column('platforms', 'favicon', + op.alter_column("platforms", "favicon", existing_type=sa.VARCHAR(length=500), comment=None, - existing_comment='Favicon URL', + existing_comment="Favicon URL", existing_nullable=True) - op.alter_column('platforms', 'logo_dark', + op.alter_column("platforms", "logo_dark", existing_type=sa.VARCHAR(length=500), comment=None, - existing_comment='Logo URL for dark mode', + existing_comment="Logo URL for dark mode", existing_nullable=True) - op.alter_column('platforms', 'logo', + op.alter_column("platforms", "logo", existing_type=sa.VARCHAR(length=500), comment=None, - existing_comment='Logo URL for light mode', + existing_comment="Logo URL for light mode", existing_nullable=True) - op.alter_column('platforms', 'path_prefix', + op.alter_column("platforms", "path_prefix", existing_type=sa.VARCHAR(length=50), comment=None, existing_comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)", existing_nullable=True) - op.alter_column('platforms', 'domain', + op.alter_column("platforms", "domain", existing_type=sa.VARCHAR(length=255), comment=None, existing_comment="Production domain (e.g., 'oms.lu', 'loyalty.lu')", existing_nullable=True) - op.alter_column('platforms', 'description', + op.alter_column("platforms", "description", existing_type=sa.TEXT(), comment=None, - existing_comment='Platform description for admin/marketing purposes', + existing_comment="Platform description for admin/marketing purposes", existing_nullable=True) - op.alter_column('platforms', 'name', + op.alter_column("platforms", "name", existing_type=sa.VARCHAR(length=100), comment=None, existing_comment="Display name (e.g., 'Wizamart OMS')", existing_nullable=False) - op.alter_column('platforms', 'code', + op.alter_column("platforms", "code", existing_type=sa.VARCHAR(length=50), comment=None, existing_comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')", existing_nullable=False) - op.drop_index(op.f('ix_platform_modules_id'), table_name='platform_modules') - op.alter_column('platform_modules', 'updated_at', + op.drop_index(op.f("ix_platform_modules_id"), table_name="platform_modules") + op.alter_column("platform_modules", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('platform_modules', 'created_at', + existing_server_default=sa.text("now()")) + op.alter_column("platform_modules", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('content_pages', 'is_platform_page', + existing_server_default=sa.text("now()")) + op.alter_column("content_pages", "is_platform_page", existing_type=sa.BOOLEAN(), comment=None, - existing_comment='True = platform marketing page (homepage, pricing); False = store default or override', + existing_comment="True = platform marketing page (homepage, pricing); False = store default or override", existing_nullable=False, - existing_server_default=sa.text('false')) - op.alter_column('content_pages', 'store_id', + existing_server_default=sa.text("false")) + op.alter_column("content_pages", "store_id", existing_type=sa.INTEGER(), comment=None, - existing_comment='Store this page belongs to (NULL for platform/default pages)', + existing_comment="Store this page belongs to (NULL for platform/default pages)", existing_nullable=True) - op.alter_column('content_pages', 'platform_id', + op.alter_column("content_pages", "platform_id", existing_type=sa.INTEGER(), comment=None, - existing_comment='Platform this page belongs to', + existing_comment="Platform this page belongs to", existing_nullable=False) - op.drop_index(op.f('ix_admin_platforms_user_id'), table_name='admin_platforms') - op.drop_index(op.f('ix_admin_platforms_platform_id'), table_name='admin_platforms') - op.drop_index(op.f('ix_admin_platforms_id'), table_name='admin_platforms') - op.create_index('idx_admin_platforms_user_id', 'admin_platforms', ['user_id'], unique=False) - op.create_index('idx_admin_platforms_platform_id', 'admin_platforms', ['platform_id'], unique=False) - op.alter_column('admin_platforms', 'updated_at', + op.drop_index(op.f("ix_admin_platforms_user_id"), table_name="admin_platforms") + op.drop_index(op.f("ix_admin_platforms_platform_id"), table_name="admin_platforms") + op.drop_index(op.f("ix_admin_platforms_id"), table_name="admin_platforms") + op.create_index("idx_admin_platforms_user_id", "admin_platforms", ["user_id"], unique=False) + op.create_index("idx_admin_platforms_platform_id", "admin_platforms", ["platform_id"], unique=False) + op.alter_column("admin_platforms", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('admin_platforms', 'created_at', + existing_server_default=sa.text("now()")) + op.alter_column("admin_platforms", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_index(op.f('ix_admin_menu_configs_user_id'), table_name='admin_menu_configs') - op.drop_index(op.f('ix_admin_menu_configs_platform_id'), table_name='admin_menu_configs') - op.drop_index(op.f('ix_admin_menu_configs_menu_item_id'), table_name='admin_menu_configs') - op.drop_index(op.f('ix_admin_menu_configs_id'), table_name='admin_menu_configs') - op.drop_index(op.f('ix_admin_menu_configs_frontend_type'), table_name='admin_menu_configs') - op.create_index('idx_admin_menu_configs_user_id', 'admin_menu_configs', ['user_id'], unique=False) - op.create_index('idx_admin_menu_configs_platform_id', 'admin_menu_configs', ['platform_id'], unique=False) - op.create_index('idx_admin_menu_configs_menu_item_id', 'admin_menu_configs', ['menu_item_id'], unique=False) - op.create_index('idx_admin_menu_configs_frontend_type', 'admin_menu_configs', ['frontend_type'], unique=False) - op.alter_column('admin_menu_configs', 'updated_at', + existing_server_default=sa.text("now()")) + op.drop_index(op.f("ix_admin_menu_configs_user_id"), table_name="admin_menu_configs") + op.drop_index(op.f("ix_admin_menu_configs_platform_id"), table_name="admin_menu_configs") + op.drop_index(op.f("ix_admin_menu_configs_menu_item_id"), table_name="admin_menu_configs") + op.drop_index(op.f("ix_admin_menu_configs_id"), table_name="admin_menu_configs") + op.drop_index(op.f("ix_admin_menu_configs_frontend_type"), table_name="admin_menu_configs") + op.create_index("idx_admin_menu_configs_user_id", "admin_menu_configs", ["user_id"], unique=False) + op.create_index("idx_admin_menu_configs_platform_id", "admin_menu_configs", ["platform_id"], unique=False) + op.create_index("idx_admin_menu_configs_menu_item_id", "admin_menu_configs", ["menu_item_id"], unique=False) + op.create_index("idx_admin_menu_configs_frontend_type", "admin_menu_configs", ["frontend_type"], unique=False) + op.alter_column("admin_menu_configs", "updated_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('admin_menu_configs', 'created_at', + existing_server_default=sa.text("now()")) + op.alter_column("admin_menu_configs", "created_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('admin_menu_configs', 'user_id', + existing_server_default=sa.text("now()")) + op.alter_column("admin_menu_configs", "user_id", existing_type=sa.INTEGER(), - comment='User scope - applies to this specific super admin', - existing_comment='User scope - applies to this specific super admin (admin frontend only)', + comment="User scope - applies to this specific super admin", + existing_comment="User scope - applies to this specific super admin (admin frontend only)", existing_nullable=True) - op.alter_column('admin_menu_configs', 'platform_id', + op.alter_column("admin_menu_configs", "platform_id", existing_type=sa.INTEGER(), - comment='Platform scope - applies to all platform admins of this platform', - existing_comment='Platform scope - applies to users/stores of this platform', + comment="Platform scope - applies to all platform admins of this platform", + existing_comment="Platform scope - applies to users/stores of this platform", existing_nullable=True) - op.drop_index(op.f('ix_loyalty_transactions_store_id'), table_name='loyalty_transactions') - op.drop_index(op.f('ix_loyalty_transactions_transaction_type'), table_name='loyalty_transactions') - op.drop_index(op.f('ix_loyalty_transactions_transaction_at'), table_name='loyalty_transactions') - op.drop_index(op.f('ix_loyalty_transactions_staff_pin_id'), table_name='loyalty_transactions') - op.drop_index(op.f('ix_loyalty_transactions_order_reference'), table_name='loyalty_transactions') - op.drop_index(op.f('ix_loyalty_transactions_id'), table_name='loyalty_transactions') - op.drop_index(op.f('ix_loyalty_transactions_card_id'), table_name='loyalty_transactions') - op.drop_index('idx_loyalty_tx_store_date', table_name='loyalty_transactions') - op.drop_index('idx_loyalty_tx_type_date', table_name='loyalty_transactions') - op.drop_index('idx_loyalty_tx_card_type', table_name='loyalty_transactions') - op.drop_table('loyalty_transactions') - op.drop_index(op.f('ix_apple_device_registrations_id'), table_name='apple_device_registrations') - op.drop_index(op.f('ix_apple_device_registrations_device_library_identifier'), table_name='apple_device_registrations') - op.drop_index(op.f('ix_apple_device_registrations_card_id'), table_name='apple_device_registrations') - op.drop_index('idx_apple_device_card', table_name='apple_device_registrations') - op.drop_table('apple_device_registrations') - op.drop_index(op.f('ix_staff_pins_store_id'), table_name='staff_pins') - op.drop_index(op.f('ix_staff_pins_staff_id'), table_name='staff_pins') - op.drop_index(op.f('ix_staff_pins_program_id'), table_name='staff_pins') - op.drop_index(op.f('ix_staff_pins_is_active'), table_name='staff_pins') - op.drop_index(op.f('ix_staff_pins_id'), table_name='staff_pins') - op.drop_index('idx_staff_pin_store_active', table_name='staff_pins') - op.drop_index('idx_staff_pin_program_active', table_name='staff_pins') - op.drop_table('staff_pins') - op.drop_index(op.f('ix_loyalty_cards_store_id'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_qr_code_data'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_program_id'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_is_active'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_id'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_google_object_id'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_customer_id'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_card_number'), table_name='loyalty_cards') - op.drop_index(op.f('ix_loyalty_cards_apple_serial_number'), table_name='loyalty_cards') - op.drop_index('idx_loyalty_card_store_active', table_name='loyalty_cards') - op.drop_index('idx_loyalty_card_customer_program', table_name='loyalty_cards') - op.drop_table('loyalty_cards') - op.drop_index(op.f('ix_loyalty_programs_store_id'), table_name='loyalty_programs') - op.drop_index(op.f('ix_loyalty_programs_is_active'), table_name='loyalty_programs') - op.drop_index(op.f('ix_loyalty_programs_id'), table_name='loyalty_programs') - op.drop_index('idx_loyalty_program_store_active', table_name='loyalty_programs') - op.drop_table('loyalty_programs') + op.drop_index(op.f("ix_loyalty_transactions_store_id"), table_name="loyalty_transactions") + op.drop_index(op.f("ix_loyalty_transactions_transaction_type"), table_name="loyalty_transactions") + op.drop_index(op.f("ix_loyalty_transactions_transaction_at"), table_name="loyalty_transactions") + op.drop_index(op.f("ix_loyalty_transactions_staff_pin_id"), table_name="loyalty_transactions") + op.drop_index(op.f("ix_loyalty_transactions_order_reference"), table_name="loyalty_transactions") + op.drop_index(op.f("ix_loyalty_transactions_id"), table_name="loyalty_transactions") + op.drop_index(op.f("ix_loyalty_transactions_card_id"), table_name="loyalty_transactions") + op.drop_index("idx_loyalty_tx_store_date", table_name="loyalty_transactions") + op.drop_index("idx_loyalty_tx_type_date", table_name="loyalty_transactions") + op.drop_index("idx_loyalty_tx_card_type", table_name="loyalty_transactions") + op.drop_table("loyalty_transactions") + op.drop_index(op.f("ix_apple_device_registrations_id"), table_name="apple_device_registrations") + op.drop_index(op.f("ix_apple_device_registrations_device_library_identifier"), table_name="apple_device_registrations") + op.drop_index(op.f("ix_apple_device_registrations_card_id"), table_name="apple_device_registrations") + op.drop_index("idx_apple_device_card", table_name="apple_device_registrations") + op.drop_table("apple_device_registrations") + op.drop_index(op.f("ix_staff_pins_store_id"), table_name="staff_pins") + op.drop_index(op.f("ix_staff_pins_staff_id"), table_name="staff_pins") + op.drop_index(op.f("ix_staff_pins_program_id"), table_name="staff_pins") + op.drop_index(op.f("ix_staff_pins_is_active"), table_name="staff_pins") + op.drop_index(op.f("ix_staff_pins_id"), table_name="staff_pins") + op.drop_index("idx_staff_pin_store_active", table_name="staff_pins") + op.drop_index("idx_staff_pin_program_active", table_name="staff_pins") + op.drop_table("staff_pins") + op.drop_index(op.f("ix_loyalty_cards_store_id"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_qr_code_data"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_program_id"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_is_active"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_id"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_google_object_id"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_customer_id"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_card_number"), table_name="loyalty_cards") + op.drop_index(op.f("ix_loyalty_cards_apple_serial_number"), table_name="loyalty_cards") + op.drop_index("idx_loyalty_card_store_active", table_name="loyalty_cards") + op.drop_index("idx_loyalty_card_customer_program", table_name="loyalty_cards") + op.drop_table("loyalty_cards") + op.drop_index(op.f("ix_loyalty_programs_store_id"), table_name="loyalty_programs") + op.drop_index(op.f("ix_loyalty_programs_is_active"), table_name="loyalty_programs") + op.drop_index(op.f("ix_loyalty_programs_id"), table_name="loyalty_programs") + op.drop_index("idx_loyalty_program_store_active", table_name="loyalty_programs") + op.drop_table("loyalty_programs") # ### end Alembic commands ### diff --git a/alembic/versions_backup/module_loyalty/loyalty_003_phase2_merchant_based.py b/alembic/versions_backup/module_loyalty/loyalty_003_phase2_merchant_based.py index 7040b76d..164c81ee 100644 --- a/alembic/versions_backup/module_loyalty/loyalty_003_phase2_merchant_based.py +++ b/alembic/versions_backup/module_loyalty/loyalty_003_phase2_merchant_based.py @@ -15,17 +15,18 @@ Phase 2 changes: - NEW COLUMN on loyalty_cards: last_activity_at """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa -from alembic import op from sqlalchemy import text +from alembic import op + # revision identifiers, used by Alembic. revision: str = "loyalty_003_phase2" -down_revision: Union[str, None] = "0fb5d6d6ff97" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "0fb5d6d6ff97" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/n2c3d4e5f6a7_add_features_table.py b/alembic/versions_backup/n2c3d4e5f6a7_add_features_table.py index 27b36e85..ddfd69e4 100644 --- a/alembic/versions_backup/n2c3d4e5f6a7_add_features_table.py +++ b/alembic/versions_backup/n2c3d4e5f6a7_add_features_table.py @@ -7,16 +7,17 @@ Create Date: 2025-12-31 10:00:00.000000 """ import json -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. revision: str = "n2c3d4e5f6a7" -down_revision: Union[str, None] = "ba2c0ce78396" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "ba2c0ce78396" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None # ============================================================================ @@ -245,7 +246,7 @@ def upgrade() -> None: tier_ids[row[1]] = row[0] # Insert features - now = sa.func.now() + sa.func.now() for category, code, name, description, ui_location, ui_icon, ui_route, display_order in FEATURES: minimum_tier_code = MINIMUM_TIER.get(code) minimum_tier_id = tier_ids.get(minimum_tier_code) if minimum_tier_code else None diff --git a/alembic/versions_backup/o3c4d5e6f7a8_add_inventory_transactions_table.py b/alembic/versions_backup/o3c4d5e6f7a8_add_inventory_transactions_table.py index fef620d5..10c2279d 100644 --- a/alembic/versions_backup/o3c4d5e6f7a8_add_inventory_transactions_table.py +++ b/alembic/versions_backup/o3c4d5e6f7a8_add_inventory_transactions_table.py @@ -10,9 +10,10 @@ Adds an audit trail for inventory movements: - Store quantity snapshots for historical analysis """ -from alembic import op import sqlalchemy as sa +from alembic import op + # revision identifiers, used by Alembic. revision = "o3c4d5e6f7a8" down_revision = "n2c3d4e5f6a7" diff --git a/alembic/versions_backup/p4d5e6f7a8b9_add_shipped_quantity_to_order_items.py b/alembic/versions_backup/p4d5e6f7a8b9_add_shipped_quantity_to_order_items.py index cbd2069b..6c7161af 100644 --- a/alembic/versions_backup/p4d5e6f7a8b9_add_shipped_quantity_to_order_items.py +++ b/alembic/versions_backup/p4d5e6f7a8b9_add_shipped_quantity_to_order_items.py @@ -6,24 +6,24 @@ Revises: o3c4d5e6f7a8 Create Date: 2026-01-01 12:00:00.000000 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'p4d5e6f7a8b9' -down_revision: Union[str, None] = 'o3c4d5e6f7a8' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "p4d5e6f7a8b9" +down_revision: str | None = "o3c4d5e6f7a8" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add shipped_quantity column to order_items op.add_column( - 'order_items', - sa.Column('shipped_quantity', sa.Integer(), nullable=False, server_default='0') + "order_items", + sa.Column("shipped_quantity", sa.Integer(), nullable=False, server_default="0") ) # Set shipped_quantity = quantity for already fulfilled items @@ -36,4 +36,4 @@ def upgrade() -> None: def downgrade() -> None: - op.drop_column('order_items', 'shipped_quantity') + op.drop_column("order_items", "shipped_quantity") diff --git a/alembic/versions_backup/q5e6f7a8b9c0_add_vat_fields_to_orders.py b/alembic/versions_backup/q5e6f7a8b9c0_add_vat_fields_to_orders.py index b7926c76..87bc42d8 100644 --- a/alembic/versions_backup/q5e6f7a8b9c0_add_vat_fields_to_orders.py +++ b/alembic/versions_backup/q5e6f7a8b9c0_add_vat_fields_to_orders.py @@ -10,42 +10,42 @@ Revises: p4d5e6f7a8b9 Create Date: 2026-01-02 10:00:00.000000 """ -from typing import Sequence, Union +from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision: str = 'q5e6f7a8b9c0' -down_revision: Union[str, None] = 'p4d5e6f7a8b9' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +revision: str = "q5e6f7a8b9c0" +down_revision: str | None = "p4d5e6f7a8b9" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: # Add VAT regime (domestic, oss, reverse_charge, origin, exempt) op.add_column( - 'orders', - sa.Column('vat_regime', sa.String(20), nullable=True) + "orders", + sa.Column("vat_regime", sa.String(20), nullable=True) ) # Add VAT rate as percentage (e.g., 17.00 for 17%) op.add_column( - 'orders', - sa.Column('vat_rate', sa.Numeric(5, 2), nullable=True) + "orders", + sa.Column("vat_rate", sa.Numeric(5, 2), nullable=True) ) # Add human-readable VAT label (e.g., "Luxembourg VAT 17%") op.add_column( - 'orders', - sa.Column('vat_rate_label', sa.String(100), nullable=True) + "orders", + sa.Column("vat_rate_label", sa.String(100), nullable=True) ) # Add destination country for cross-border sales (ISO code) op.add_column( - 'orders', - sa.Column('vat_destination_country', sa.String(2), nullable=True) + "orders", + sa.Column("vat_destination_country", sa.String(2), nullable=True) ) # Populate VAT fields for existing orders based on shipping country @@ -66,7 +66,7 @@ def upgrade() -> None: def downgrade() -> None: - op.drop_column('orders', 'vat_destination_country') - op.drop_column('orders', 'vat_rate_label') - op.drop_column('orders', 'vat_rate') - op.drop_column('orders', 'vat_regime') + op.drop_column("orders", "vat_destination_country") + op.drop_column("orders", "vat_rate_label") + op.drop_column("orders", "vat_rate") + op.drop_column("orders", "vat_regime") diff --git a/alembic/versions_backup/r6f7a8b9c0d1_add_country_iso_to_addresses.py b/alembic/versions_backup/r6f7a8b9c0d1_add_country_iso_to_addresses.py index e7af3c97..ea2d862a 100644 --- a/alembic/versions_backup/r6f7a8b9c0d1_add_country_iso_to_addresses.py +++ b/alembic/versions_backup/r6f7a8b9c0d1_add_country_iso_to_addresses.py @@ -11,10 +11,10 @@ This migration is idempotent - it checks for existing columns before making changes. """ -from alembic import op import sqlalchemy as sa from sqlalchemy import text +from alembic import op # revision identifiers, used by Alembic. revision = "r6f7a8b9c0d1" diff --git a/alembic/versions_backup/s7a8b9c0d1e2_add_storefront_locale_to_vendors.py b/alembic/versions_backup/s7a8b9c0d1e2_add_storefront_locale_to_vendors.py index b11cb9a0..ffc01fbf 100644 --- a/alembic/versions_backup/s7a8b9c0d1e2_add_storefront_locale_to_vendors.py +++ b/alembic/versions_backup/s7a8b9c0d1e2_add_storefront_locale_to_vendors.py @@ -10,9 +10,9 @@ NULL means the vendor inherits from platform defaults. Examples: 'fr-LU', 'de-DE', 'en-GB' """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "s7a8b9c0d1e2" diff --git a/alembic/versions_backup/t001_rename_company_vendor_to_merchant_store.py b/alembic/versions_backup/t001_rename_company_vendor_to_merchant_store.py index 15644522..254fa619 100644 --- a/alembic/versions_backup/t001_rename_company_vendor_to_merchant_store.py +++ b/alembic/versions_backup/t001_rename_company_vendor_to_merchant_store.py @@ -18,16 +18,17 @@ Major terminology migration: - letzshop_vendor_cache -> letzshop_store_cache """ -from typing import Sequence, Union +from collections.abc import Sequence + +from sqlalchemy import text from alembic import op -from sqlalchemy import text # revision identifiers, used by Alembic. revision: str = "t001_terminology" -down_revision: Union[str, None] = "loyalty_003_phase2" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "loyalty_003_phase2" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def _col_exists(table: str, col: str) -> bool: diff --git a/alembic/versions_backup/t002_rename_vendor_constraints_and_indexes.py b/alembic/versions_backup/t002_rename_vendor_constraints_and_indexes.py index 082b81f7..f884d9e0 100644 --- a/alembic/versions_backup/t002_rename_vendor_constraints_and_indexes.py +++ b/alembic/versions_backup/t002_rename_vendor_constraints_and_indexes.py @@ -8,15 +8,15 @@ Completes the Company/Vendor -> Merchant/Store terminology migration by renaming 4 constraints and 12 indexes that still used "vendor" in their names. """ -from typing import Sequence, Union +from collections.abc import Sequence from alembic import op # revision identifiers, used by Alembic. revision: str = "t002_constraints" -down_revision: Union[str, None] = "t001_terminology" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "t001_terminology" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None # (old_name, new_name, table) — table is needed for RENAME CONSTRAINT CONSTRAINTS = [ diff --git a/alembic/versions_backup/t8b9c0d1e2f3_add_password_reset_tokens.py b/alembic/versions_backup/t8b9c0d1e2f3_add_password_reset_tokens.py index 329a3189..16b7d9fc 100644 --- a/alembic/versions_backup/t8b9c0d1e2f3_add_password_reset_tokens.py +++ b/alembic/versions_backup/t8b9c0d1e2f3_add_password_reset_tokens.py @@ -6,9 +6,9 @@ Create Date: 2026-01-03 """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "t8b9c0d1e2f3" diff --git a/alembic/versions_backup/u9c0d1e2f3g4_add_vendor_email_templates.py b/alembic/versions_backup/u9c0d1e2f3g4_add_vendor_email_templates.py index 9cf16718..0c633b8e 100644 --- a/alembic/versions_backup/u9c0d1e2f3g4_add_vendor_email_templates.py +++ b/alembic/versions_backup/u9c0d1e2f3g4_add_vendor_email_templates.py @@ -11,9 +11,9 @@ Changes: - Create vendor_email_templates table for vendor-specific template overrides """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "u9c0d1e2f3g4" diff --git a/alembic/versions_backup/v0a1b2c3d4e5_add_vendor_email_settings.py b/alembic/versions_backup/v0a1b2c3d4e5_add_vendor_email_settings.py index ac5307a1..67fff26f 100644 --- a/alembic/versions_backup/v0a1b2c3d4e5_add_vendor_email_settings.py +++ b/alembic/versions_backup/v0a1b2c3d4e5_add_vendor_email_settings.py @@ -11,9 +11,9 @@ Changes: - Premium providers (SendGrid, Mailgun, SES) are tier-gated (Business+) """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "v0a1b2c3d4e5" diff --git a/alembic/versions_backup/w1b2c3d4e5f6_add_media_library_tables.py b/alembic/versions_backup/w1b2c3d4e5f6_add_media_library_tables.py index 795b0827..5725b683 100644 --- a/alembic/versions_backup/w1b2c3d4e5f6_add_media_library_tables.py +++ b/alembic/versions_backup/w1b2c3d4e5f6_add_media_library_tables.py @@ -5,16 +5,17 @@ Revises: v0a1b2c3d4e5 Create Date: 2026-01-06 10:00:00.000000 """ -from typing import Sequence, Union +from collections.abc import Sequence + +import sqlalchemy as sa from alembic import op -import sqlalchemy as sa # revision identifiers, used by Alembic. revision: str = "w1b2c3d4e5f6" -down_revision: Union[str, None] = "v0a1b2c3d4e5" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "v0a1b2c3d4e5" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/x2c3d4e5f6g7_make_marketplace_product_id_nullable.py b/alembic/versions_backup/x2c3d4e5f6g7_make_marketplace_product_id_nullable.py index b8e334bb..c2671077 100644 --- a/alembic/versions_backup/x2c3d4e5f6g7_make_marketplace_product_id_nullable.py +++ b/alembic/versions_backup/x2c3d4e5f6g7_make_marketplace_product_id_nullable.py @@ -9,9 +9,9 @@ Create Date: 2026-01-06 23:15:00.000000 from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "x2c3d4e5f6g7" diff --git a/alembic/versions_backup/y3d4e5f6g7h8_add_product_type_columns.py b/alembic/versions_backup/y3d4e5f6g7h8_add_product_type_columns.py index a7db7e52..37be39a2 100644 --- a/alembic/versions_backup/y3d4e5f6g7h8_add_product_type_columns.py +++ b/alembic/versions_backup/y3d4e5f6g7h8_add_product_type_columns.py @@ -11,9 +11,9 @@ Create Date: 2026-01-07 10:00:00.000000 from collections.abc import Sequence -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = "y3d4e5f6g7h8" diff --git a/alembic/versions_backup/z4e5f6a7b8c9_add_multi_platform_support.py b/alembic/versions_backup/z4e5f6a7b8c9_add_multi_platform_support.py index ea8b53b4..ff61b342 100644 --- a/alembic/versions_backup/z4e5f6a7b8c9_add_multi_platform_support.py +++ b/alembic/versions_backup/z4e5f6a7b8c9_add_multi_platform_support.py @@ -15,16 +15,17 @@ This migration adds multi-platform support: """ import json -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. revision: str = "z4e5f6a7b8c9" -down_revision: Union[str, None] = "1b398cf45e85" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "1b398cf45e85" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None # Platform marketing page slugs (is_platform_page=True) PLATFORM_PAGE_SLUGS = [ @@ -303,7 +304,7 @@ def upgrade() -> None: ("cms", "cms_scheduling", "Page Scheduling", "Schedule page publish/unpublish", "settings", "clock", None, tier_ids.get("enterprise"), 6), ] - for category, code, name, description, ui_location, ui_icon, ui_route, minimum_tier_id, display_order in cms_features: + for category, code, name, description, ui_location, ui_icon, _ui_route, minimum_tier_id, display_order in cms_features: min_tier_val = minimum_tier_id if minimum_tier_id else "NULL" conn.execute( sa.text(f""" diff --git a/alembic/versions_backup/z5f6g7h8i9j0_add_loyalty_platform.py b/alembic/versions_backup/z5f6g7h8i9j0_add_loyalty_platform.py index 12e09ac9..644b3151 100644 --- a/alembic/versions_backup/z5f6g7h8i9j0_add_loyalty_platform.py +++ b/alembic/versions_backup/z5f6g7h8i9j0_add_loyalty_platform.py @@ -10,16 +10,17 @@ This migration adds the Loyalty+ platform: 3. Creates vendor default pages (about, rewards-catalog, terms, privacy) """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. revision: str = "z5f6g7h8i9j0" -down_revision: Union[str, None] = "z4e5f6a7b8c9" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "z4e5f6a7b8c9" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/z6g7h8i9j0k1_add_main_platform.py b/alembic/versions_backup/z6g7h8i9j0k1_add_main_platform.py index d25de645..8bef2f61 100644 --- a/alembic/versions_backup/z6g7h8i9j0k1_add_main_platform.py +++ b/alembic/versions_backup/z6g7h8i9j0k1_add_main_platform.py @@ -17,16 +17,17 @@ All other platforms are accessed via: - Production: {code}.lu or custom domain """ -from typing import Sequence, Union +from collections.abc import Sequence import sqlalchemy as sa + from alembic import op # revision identifiers, used by Alembic. revision: str = "z6g7h8i9j0k1" -down_revision: Union[str, None] = "z5f6g7h8i9j0" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "z5f6g7h8i9j0" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/alembic/versions_backup/z7h8i9j0k1l2_fix_content_page_nullable_columns.py b/alembic/versions_backup/z7h8i9j0k1l2_fix_content_page_nullable_columns.py index 12a0fe1f..2b2bbde0 100644 --- a/alembic/versions_backup/z7h8i9j0k1l2_fix_content_page_nullable_columns.py +++ b/alembic/versions_backup/z7h8i9j0k1l2_fix_content_page_nullable_columns.py @@ -9,9 +9,9 @@ This migration: 2. Alters columns to be NOT NULL """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "z7h8i9j0k1l2" diff --git a/alembic/versions_backup/z8i9j0k1l2m3_add_sections_to_content_pages.py b/alembic/versions_backup/z8i9j0k1l2m3_add_sections_to_content_pages.py index dbf4da8d..89bc703f 100644 --- a/alembic/versions_backup/z8i9j0k1l2m3_add_sections_to_content_pages.py +++ b/alembic/versions_backup/z8i9j0k1l2m3_add_sections_to_content_pages.py @@ -9,9 +9,9 @@ The sections column stores hero, features, pricing, and cta configurations with TranslatableText pattern for i18n. """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "z8i9j0k1l2m3" diff --git a/alembic/versions_backup/z9j0k1l2m3n4_add_admin_platform_roles.py b/alembic/versions_backup/z9j0k1l2m3n4_add_admin_platform_roles.py index 0f5ca272..701ab60b 100644 --- a/alembic/versions_backup/z9j0k1l2m3n4_add_admin_platform_roles.py +++ b/alembic/versions_backup/z9j0k1l2m3n4_add_admin_platform_roles.py @@ -13,11 +13,10 @@ Platform admins are assigned to specific platforms via admin_platforms. Existing admins are migrated to super admins for backward compatibility. """ -from datetime import UTC, datetime -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "z9j0k1l2m3n4" diff --git a/alembic/versions_backup/za0k1l2m3n4o5_add_admin_menu_config.py b/alembic/versions_backup/za0k1l2m3n4o5_add_admin_menu_config.py index b2c18cba..419f876f 100644 --- a/alembic/versions_backup/za0k1l2m3n4o5_add_admin_menu_config.py +++ b/alembic/versions_backup/za0k1l2m3n4o5_add_admin_menu_config.py @@ -11,9 +11,9 @@ Adds configurable admin sidebar menus: - Mandatory items enforced at application level (companies, vendors, users, settings) """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "za0k1l2m3n4o5" diff --git a/alembic/versions_backup/zb1l2m3n4o5p6_add_frontend_type_to_menu_config.py b/alembic/versions_backup/zb1l2m3n4o5p6_add_frontend_type_to_menu_config.py index ded81419..7dbdb455 100644 --- a/alembic/versions_backup/zb1l2m3n4o5p6_add_frontend_type_to_menu_config.py +++ b/alembic/versions_backup/zb1l2m3n4o5p6_add_frontend_type_to_menu_config.py @@ -12,9 +12,9 @@ Also updates unique constraints to include frontend_type and adds a check constraint ensuring user_id scope is only used for admin frontend. """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "zb1l2m3n4o5p6" @@ -25,7 +25,7 @@ depends_on = None def upgrade() -> None: # 1. Create the enum type for frontend_type - frontend_type_enum = sa.Enum('admin', 'vendor', name='frontendtype') + frontend_type_enum = sa.Enum("admin", "vendor", name="frontendtype") frontend_type_enum.create(op.get_bind(), checkfirst=True) # 2. Add frontend_type column with default value @@ -33,7 +33,7 @@ def upgrade() -> None: "admin_menu_configs", sa.Column( "frontend_type", - sa.Enum('admin', 'vendor', name='frontendtype'), + sa.Enum("admin", "vendor", name="frontendtype"), nullable=False, server_default="admin", comment="Which frontend this config applies to (admin or vendor)", @@ -114,4 +114,4 @@ def downgrade() -> None: op.drop_column("admin_menu_configs", "frontend_type") # Drop the enum type - sa.Enum('admin', 'vendor', name='frontendtype').drop(op.get_bind(), checkfirst=True) + sa.Enum("admin", "vendor", name="frontendtype").drop(op.get_bind(), checkfirst=True) diff --git a/alembic/versions_backup/zc2m3n4o5p6q7_add_platform_modules_table.py b/alembic/versions_backup/zc2m3n4o5p6q7_add_platform_modules_table.py index 4f31fc16..680f78f0 100644 --- a/alembic/versions_backup/zc2m3n4o5p6q7_add_platform_modules_table.py +++ b/alembic/versions_backup/zc2m3n4o5p6q7_add_platform_modules_table.py @@ -13,9 +13,9 @@ This replaces the simpler Platform.settings["enabled_modules"] JSON approach for better auditability and query capabilities. """ -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = "zc2m3n4o5p6q7" diff --git a/alembic/versions_backup/zd3n4o5p6q7r8_promote_cms_customers_to_core.py b/alembic/versions_backup/zd3n4o5p6q7r8_promote_cms_customers_to_core.py index 00af6d27..0aa8a840 100644 --- a/alembic/versions_backup/zd3n4o5p6q7r8_promote_cms_customers_to_core.py +++ b/alembic/versions_backup/zd3n4o5p6q7r8_promote_cms_customers_to_core.py @@ -9,10 +9,11 @@ This migration ensures that CMS and Customers modules are enabled for all platfo since they are now core modules that cannot be disabled. """ -from datetime import datetime, timezone +from datetime import UTC, datetime + +import sqlalchemy as sa from alembic import op -import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "zd3n4o5p6q7r8" @@ -30,7 +31,7 @@ def upgrade() -> None: sa.text("SELECT id FROM platforms") ).fetchall() - now = datetime.now(timezone.utc) + now = datetime.now(UTC) core_modules = ["cms", "customers"] for (platform_id,) in platforms: @@ -80,4 +81,3 @@ def downgrade() -> None: break functionality. It just removes the explicit enabling done by upgrade. """ # No-op: We don't want to disable core modules - pass diff --git a/app/api/deps.py b/app/api/deps.py index ff154396..8be273e4 100644 --- a/app/api/deps.py +++ b/app/api/deps.py @@ -44,22 +44,22 @@ from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer from sqlalchemy.orm import Session from app.core.database import get_db +from app.modules.enums import FrontendType from app.modules.tenancy.exceptions import ( AdminRequiredException, InsufficientPermissionsException, InsufficientStorePermissionsException, InvalidTokenException, - UnauthorizedStoreAccessException, StoreNotFoundException, StoreOwnerOnlyException, + UnauthorizedStoreAccessException, ) +from app.modules.tenancy.models import Store +from app.modules.tenancy.models import User as UserModel from app.modules.tenancy.services.store_service import store_service from middleware.auth import AuthManager from middleware.rate_limiter import RateLimiter -from app.modules.tenancy.models import User as UserModel -from app.modules.tenancy.models import Store from models.schema.auth import UserContext -from app.modules.enums import FrontendType # Initialize dependencies security = HTTPBearer(auto_error=False) # auto_error=False prevents automatic 403 @@ -485,10 +485,9 @@ def require_module_access(module_code: str, frontend_type: FrontendType): if user_context.is_super_admin: # Super admins bypass module checks return user_context - else: - platform = getattr(request.state, "admin_platform", None) - if platform: - platform_id = platform.id + platform = getattr(request.state, "admin_platform", None) + if platform: + platform_id = platform.id except Exception: pass @@ -572,10 +571,10 @@ def require_menu_access(menu_item_id: str, frontend_type: "FrontendType"): Returns: Dependency function that validates menu access and returns User """ - from app.modules.registry import get_menu_item_module - from app.modules.service import module_service from app.modules.core.services.menu_service import menu_service from app.modules.enums import FrontendType as FT + from app.modules.registry import get_menu_item_module + from app.modules.service import module_service def _check_menu_access( request: Request, @@ -941,52 +940,82 @@ def get_current_merchant_optional( return None -def require_merchant_owner(merchant_id: int): +def get_merchant_for_current_user( + request: Request, + current_user: UserContext = Depends(get_current_merchant_api), + db: Session = Depends(get_db), +): """ - Dependency factory to require ownership of a specific merchant. + Get the active merchant owned by the current API user. - Usage: - @router.get("/merchants/{merchant_id}/subscriptions") - def list_subscriptions( - merchant_id: int, - user: UserContext = Depends(require_merchant_owner(merchant_id)) - ): - ... + Used by merchant API endpoints (header-only auth) that need the Merchant object. + Stores the merchant on request.state.merchant for endpoint use. + + Returns: + Merchant ORM object + + Raises: + MerchantNotFoundException: If user owns no active merchants """ + from app.modules.tenancy.exceptions import MerchantNotFoundException + from app.modules.tenancy.models import Merchant - def _check_merchant_ownership( - request: Request, - credentials: HTTPAuthorizationCredentials | None = Depends(security), - merchant_token: str | None = Cookie(None), - db: Session = Depends(get_db), - ) -> UserContext: - user_context = get_current_merchant_from_cookie_or_header( - request, credentials, merchant_token, db + merchant = ( + db.query(Merchant) + .filter( + Merchant.owner_user_id == current_user.id, + Merchant.is_active == True, # noqa: E712 + ) + .order_by(Merchant.id) + .first() + ) + + if not merchant: + raise MerchantNotFoundException( + str(current_user.id), identifier_type="owner_user_id" ) - # Verify user owns this specific merchant - from app.modules.tenancy.models import Merchant - merchant = ( - db.query(Merchant) - .filter( - Merchant.id == merchant_id, - Merchant.owner_user_id == user_context.id, - Merchant.is_active == True, # noqa: E712 - ) - .first() + request.state.merchant = merchant + return merchant + + +def get_merchant_for_current_user_page( + request: Request, + current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), + db: Session = Depends(get_db), +): + """ + Get the active merchant owned by the current page user. + + Used by merchant page routes (cookie+header auth) that need the Merchant object. + Stores the merchant on request.state.merchant for endpoint use. + + Returns: + Merchant ORM object + + Raises: + MerchantNotFoundException: If user owns no active merchants + """ + from app.modules.tenancy.exceptions import MerchantNotFoundException + from app.modules.tenancy.models import Merchant + + merchant = ( + db.query(Merchant) + .filter( + Merchant.owner_user_id == current_user.id, + Merchant.is_active == True, # noqa: E712 + ) + .order_by(Merchant.id) + .first() + ) + + if not merchant: + raise MerchantNotFoundException( + str(current_user.id), identifier_type="owner_user_id" ) - if not merchant: - raise InsufficientPermissionsException( - f"You do not own merchant {merchant_id}" - ) - - # Store merchant in request state for endpoint use - request.state.merchant = merchant - - return user_context - - return _check_merchant_ownership + request.state.merchant = merchant + return merchant # ============================================================================ diff --git a/app/api/main.py b/app/api/main.py index 828992d5..58b84d96 100644 --- a/app/api/main.py +++ b/app/api/main.py @@ -10,7 +10,7 @@ This module provides: from fastapi import APIRouter -from app.api.v1 import admin, merchant, platform, storefront, store, webhooks +from app.api.v1 import admin, merchant, platform, store, storefront, webhooks api_router = APIRouter() diff --git a/app/api/v1/__init__.py b/app/api/v1/__init__.py index 4432d581..20f8d69e 100644 --- a/app/api/v1/__init__.py +++ b/app/api/v1/__init__.py @@ -3,6 +3,6 @@ API Version 1 - All endpoints """ -from . import admin, merchant, storefront, store +from . import admin, merchant, store, storefront __all__ = ["admin", "merchant", "store", "storefront"] diff --git a/app/api/v1/admin/__init__.py b/app/api/v1/admin/__init__.py index 21f1e950..8be47cb5 100644 --- a/app/api/v1/admin/__init__.py +++ b/app/api/v1/admin/__init__.py @@ -25,7 +25,6 @@ IMPORTANT: from fastapi import APIRouter - # Create admin router router = APIRouter() diff --git a/app/api/v1/merchant/__init__.py b/app/api/v1/merchant/__init__.py index 0bffa2e3..e7b45c25 100644 --- a/app/api/v1/merchant/__init__.py +++ b/app/api/v1/merchant/__init__.py @@ -16,7 +16,6 @@ IMPORTANT: from fastapi import APIRouter - # Create merchant router router = APIRouter() diff --git a/app/api/v1/platform/signup.py b/app/api/v1/platform/signup.py index f0d8e657..234ec2c4 100644 --- a/app/api/v1/platform/signup.py +++ b/app/api/v1/platform/signup.py @@ -20,7 +20,9 @@ from sqlalchemy.orm import Session from app.core.database import get_db from app.core.environment import should_use_secure_cookies -from app.modules.marketplace.services.platform_signup_service import platform_signup_service +from app.modules.marketplace.services.platform_signup_service import ( + platform_signup_service, +) router = APIRouter() logger = logging.getLogger(__name__) diff --git a/app/core/lifespan.py b/app/core/lifespan.py index 0f813283..ce85512e 100644 --- a/app/core/lifespan.py +++ b/app/core/lifespan.py @@ -65,7 +65,7 @@ def get_migration_status(): try: from alembic.config import Config - alembic_cfg = Config("alembic.ini") + Config("alembic.ini") # This would need more implementation to actually check status # For now, just return a placeholder diff --git a/app/core/logging.py b/app/core/logging.py index f58b1b54..9d188927 100644 --- a/app/core/logging.py +++ b/app/core/logging.py @@ -102,7 +102,9 @@ def get_log_level_from_db(): """ try: from app.core.database import SessionLocal - from app.modules.core.services.admin_settings_service import admin_settings_service + from app.modules.core.services.admin_settings_service import ( + admin_settings_service, + ) db = SessionLocal() if not db: @@ -127,7 +129,9 @@ def get_rotation_settings_from_db(): """ try: from app.core.database import SessionLocal - from app.modules.core.services.admin_settings_service import admin_settings_service + from app.modules.core.services.admin_settings_service import ( + admin_settings_service, + ) db = SessionLocal() if not db: diff --git a/app/core/observability.py b/app/core/observability.py index f5ae0f29..defff19e 100644 --- a/app/core/observability.py +++ b/app/core/observability.py @@ -30,7 +30,7 @@ import logging import time from collections.abc import Callable from dataclasses import dataclass, field -from datetime import datetime, timezone +from datetime import UTC, datetime from enum import Enum from typing import Any @@ -61,7 +61,7 @@ class HealthCheckResult: message: str = "" latency_ms: float = 0.0 details: dict[str, Any] = field(default_factory=dict) - checked_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc)) + checked_at: datetime = field(default_factory=lambda: datetime.now(UTC)) @dataclass @@ -70,7 +70,7 @@ class AggregatedHealth: status: HealthStatus checks: list[HealthCheckResult] - timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc)) + timestamp: datetime = field(default_factory=lambda: datetime.now(UTC)) def to_dict(self) -> dict[str, Any]: """Convert to dictionary for JSON response.""" diff --git a/app/handlers/stripe_webhook.py b/app/handlers/stripe_webhook.py index a93d27c8..e97ac463 100644 --- a/app/handlers/stripe_webhook.py +++ b/app/handlers/stripe_webhook.py @@ -10,7 +10,7 @@ Processes webhook events from Stripe: """ import logging -from datetime import datetime, timezone +from datetime import UTC, datetime import stripe from sqlalchemy.orm import Session @@ -19,10 +19,10 @@ from app.modules.billing.models import ( AddOnProduct, BillingHistory, MerchantSubscription, + StoreAddOn, StripeWebhookEvent, SubscriptionStatus, SubscriptionTier, - StoreAddOn, ) from app.modules.tenancy.models import Store, StorePlatform @@ -68,7 +68,7 @@ class StripeWebhookHandler: if existing.status == "processed": logger.info(f"Skipping duplicate event {event_id}") return {"status": "skipped", "reason": "duplicate"} - elif existing.status == "failed": + if existing.status == "failed": logger.info(f"Retrying previously failed event {event_id}") else: # Record the event @@ -86,14 +86,14 @@ class StripeWebhookHandler: if not handler: logger.debug(f"No handler for event type {event_type}") existing.status = "processed" - existing.processed_at = datetime.now(timezone.utc) + existing.processed_at = datetime.now(UTC) db.commit() return {"status": "ignored", "reason": f"no handler for {event_type}"} try: result = handler(db, event) existing.status = "processed" - existing.processed_at = datetime.now(timezone.utc) + existing.processed_at = datetime.now(UTC) db.commit() logger.info(f"Successfully processed event {event_id} ({event_type})") return {"status": "processed", "result": result} @@ -181,15 +181,15 @@ class StripeWebhookHandler: if session.subscription: stripe_sub = stripe.Subscription.retrieve(session.subscription) subscription.period_start = datetime.fromtimestamp( - stripe_sub.current_period_start, tz=timezone.utc + stripe_sub.current_period_start, tz=UTC ) subscription.period_end = datetime.fromtimestamp( - stripe_sub.current_period_end, tz=timezone.utc + stripe_sub.current_period_end, tz=UTC ) if stripe_sub.trial_end: subscription.trial_ends_at = datetime.fromtimestamp( - stripe_sub.trial_end, tz=timezone.utc + stripe_sub.trial_end, tz=UTC ) logger.info(f"Subscription checkout completed for merchant {merchant_id}") @@ -264,10 +264,10 @@ class StripeWebhookHandler: try: stripe_sub = stripe.Subscription.retrieve(session.subscription) period_start = datetime.fromtimestamp( - stripe_sub.current_period_start, tz=timezone.utc + stripe_sub.current_period_start, tz=UTC ) period_end = datetime.fromtimestamp( - stripe_sub.current_period_end, tz=timezone.utc + stripe_sub.current_period_end, tz=UTC ) except Exception as e: logger.warning(f"Could not retrieve subscription period: {e}") @@ -320,10 +320,10 @@ class StripeWebhookHandler: subscription.stripe_subscription_id = stripe_sub.id subscription.status = self._map_stripe_status(stripe_sub.status) subscription.period_start = datetime.fromtimestamp( - stripe_sub.current_period_start, tz=timezone.utc + stripe_sub.current_period_start, tz=UTC ) subscription.period_end = datetime.fromtimestamp( - stripe_sub.current_period_end, tz=timezone.utc + stripe_sub.current_period_end, tz=UTC ) logger.info(f"Subscription created for merchant {subscription.merchant_id}") @@ -348,15 +348,15 @@ class StripeWebhookHandler: # Update status and period subscription.status = self._map_stripe_status(stripe_sub.status) subscription.period_start = datetime.fromtimestamp( - stripe_sub.current_period_start, tz=timezone.utc + stripe_sub.current_period_start, tz=UTC ) subscription.period_end = datetime.fromtimestamp( - stripe_sub.current_period_end, tz=timezone.utc + stripe_sub.current_period_end, tz=UTC ) # Handle cancellation if stripe_sub.cancel_at_period_end: - subscription.cancelled_at = datetime.now(timezone.utc) + subscription.cancelled_at = datetime.now(UTC) subscription.cancellation_reason = stripe_sub.metadata.get( "cancellation_reason", "user_request" ) @@ -407,7 +407,7 @@ class StripeWebhookHandler: # Cancel the subscription subscription.status = SubscriptionStatus.CANCELLED.value - subscription.cancelled_at = datetime.now(timezone.utc) + subscription.cancelled_at = datetime.now(UTC) # Find all stores for this merchant, then cancel their add-ons store_ids = [ @@ -429,7 +429,7 @@ class StripeWebhookHandler: addon_count = 0 for addon in cancelled_addons: addon.status = "cancelled" - addon.cancelled_at = datetime.now(timezone.utc) + addon.cancelled_at = datetime.now(UTC) addon_count += 1 if addon_count > 0: @@ -463,7 +463,7 @@ class StripeWebhookHandler: stripe_invoice_id=invoice.id, stripe_payment_intent_id=invoice.payment_intent, invoice_number=invoice.number, - invoice_date=datetime.fromtimestamp(invoice.created, tz=timezone.utc), + invoice_date=datetime.fromtimestamp(invoice.created, tz=UTC), subtotal_cents=invoice.subtotal, tax_cents=invoice.tax or 0, total_cents=invoice.total, @@ -550,8 +550,8 @@ class StripeWebhookHandler: merchant_id=subscription.merchant_id, stripe_invoice_id=invoice.id, invoice_number=invoice.number, - invoice_date=datetime.fromtimestamp(invoice.created, tz=timezone.utc), - due_date=datetime.fromtimestamp(invoice.due_date, tz=timezone.utc) + invoice_date=datetime.fromtimestamp(invoice.created, tz=UTC), + due_date=datetime.fromtimestamp(invoice.due_date, tz=UTC) if invoice.due_date else None, subtotal_cents=invoice.subtotal, diff --git a/app/modules/__init__.py b/app/modules/__init__.py index 6023549f..49b63de7 100644 --- a/app/modules/__init__.py +++ b/app/modules/__init__.py @@ -54,31 +54,31 @@ Usage: """ from app.modules.base import ModuleDefinition, ScheduledTask -from app.modules.task_base import ModuleTask, DatabaseTask -from app.modules.tasks import ( - discover_module_tasks, - build_beat_schedule, - parse_schedule, - get_module_task_routes, +from app.modules.events import ( + ModuleEvent, + ModuleEventBus, + ModuleEventData, + module_event_bus, ) from app.modules.registry import ( - MODULES, CORE_MODULES, - OPTIONAL_MODULES, INTERNAL_MODULES, + MODULES, + OPTIONAL_MODULES, get_core_module_codes, - get_optional_module_codes, get_internal_module_codes, get_module_tier, + get_optional_module_codes, is_core_module, is_internal_module, ) from app.modules.service import ModuleService, module_service -from app.modules.events import ( - ModuleEvent, - ModuleEventData, - ModuleEventBus, - module_event_bus, +from app.modules.task_base import DatabaseTask, ModuleTask +from app.modules.tasks import ( + build_beat_schedule, + discover_module_tasks, + get_module_task_routes, + parse_schedule, ) __all__ = [ diff --git a/app/modules/analytics/__init__.py b/app/modules/analytics/__init__.py index fc20eb1d..7cf1af11 100644 --- a/app/modules/analytics/__init__.py +++ b/app/modules/analytics/__init__.py @@ -25,7 +25,7 @@ def __getattr__(name: str): from app.modules.analytics.definition import analytics_module return analytics_module - elif name == "get_analytics_module_with_routers": + if name == "get_analytics_module_with_routers": from app.modules.analytics.definition import get_analytics_module_with_routers return get_analytics_module_with_routers diff --git a/app/modules/analytics/definition.py b/app/modules/analytics/definition.py index 796a4968..4eb715f6 100644 --- a/app/modules/analytics/definition.py +++ b/app/modules/analytics/definition.py @@ -6,7 +6,12 @@ Defines the analytics module including its features, menu items, route configurations, and self-contained module settings. """ -from app.modules.base import MenuItemDefinition, MenuSectionDefinition, ModuleDefinition, PermissionDefinition +from app.modules.base import ( + MenuItemDefinition, + MenuSectionDefinition, + ModuleDefinition, + PermissionDefinition, +) from app.modules.enums import FrontendType @@ -26,7 +31,9 @@ def _get_store_page_router(): def _get_feature_provider(): """Lazy import of feature provider to avoid circular imports.""" - from app.modules.analytics.services.analytics_features import analytics_feature_provider + from app.modules.analytics.services.analytics_features import ( + analytics_feature_provider, + ) return analytics_feature_provider diff --git a/app/modules/analytics/routes/__init__.py b/app/modules/analytics/routes/__init__.py index b08fa128..2fe42a11 100644 --- a/app/modules/analytics/routes/__init__.py +++ b/app/modules/analytics/routes/__init__.py @@ -24,7 +24,7 @@ def __getattr__(name: str): if name == "store_api_router": from app.modules.analytics.routes.api import store_router return store_router - elif name == "store_page_router": + if name == "store_page_router": from app.modules.analytics.routes.pages import store_router return store_router raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/app/modules/analytics/routes/api/store.py b/app/modules/analytics/routes/api/store.py index 2adfeaff..9796f130 100644 --- a/app/modules/analytics/routes/api/store.py +++ b/app/modules/analytics/routes/api/store.py @@ -16,14 +16,14 @@ from fastapi import APIRouter, Depends, Query from sqlalchemy.orm import Session from app.api.deps import get_current_store_api, get_db, require_module_access -from app.modules.billing.dependencies.feature_gate import RequireFeature -from app.modules.analytics.services import stats_service from app.modules.analytics.schemas import ( StoreAnalyticsCatalog, StoreAnalyticsImports, StoreAnalyticsInventory, StoreAnalyticsResponse, ) +from app.modules.analytics.services import stats_service +from app.modules.billing.dependencies.feature_gate import RequireFeature from app.modules.enums import FrontendType from app.modules.tenancy.models import User diff --git a/app/modules/analytics/routes/pages/admin.py b/app/modules/analytics/routes/pages/admin.py index 30b82f9a..c1985196 100644 --- a/app/modules/analytics/routes/pages/admin.py +++ b/app/modules/analytics/routes/pages/admin.py @@ -14,9 +14,9 @@ from sqlalchemy.orm import Session from app.api.deps import get_db, require_menu_access from app.modules.core.utils.page_context import get_admin_context -from app.templates_config import templates from app.modules.enums import FrontendType from app.modules.tenancy.models import User +from app.templates_config import templates router = APIRouter() diff --git a/app/modules/analytics/routes/pages/store.py b/app/modules/analytics/routes/pages/store.py index 22e41588..81126baf 100644 --- a/app/modules/analytics/routes/pages/store.py +++ b/app/modules/analytics/routes/pages/store.py @@ -12,10 +12,11 @@ from fastapi.responses import HTMLResponse from sqlalchemy.orm import Session from app.api.deps import get_current_store_from_cookie_or_header, get_db -from app.modules.core.services.platform_settings_service import platform_settings_service # noqa: MOD-004 - shared platform service +from app.modules.core.services.platform_settings_service import ( + platform_settings_service, # noqa: MOD-004 - shared platform service +) +from app.modules.tenancy.models import Store, User from app.templates_config import templates -from app.modules.tenancy.models import User -from app.modules.tenancy.models import Store logger = logging.getLogger(__name__) diff --git a/app/modules/analytics/schemas/__init__.py b/app/modules/analytics/schemas/__init__.py index 9fd08af6..8bcefddc 100644 --- a/app/modules/analytics/schemas/__init__.py +++ b/app/modules/analytics/schemas/__init__.py @@ -6,29 +6,29 @@ This is the canonical location for analytics schemas. """ from app.modules.analytics.schemas.stats import ( - StatsResponse, - MarketplaceStatsResponse, - ImportStatsResponse, - UserStatsResponse, - StoreStatsResponse, - ProductStatsResponse, - PlatformStatsResponse, - OrderStatsBasicResponse, AdminDashboardResponse, - StoreProductStats, - StoreOrderStats, - StoreCustomerStats, - StoreRevenueStats, - StoreInfo, - StoreDashboardStatsResponse, - StoreAnalyticsImports, - StoreAnalyticsCatalog, - StoreAnalyticsInventory, - StoreAnalyticsResponse, - ValidatorStats, CodeQualityDashboardStatsResponse, CustomerStatsResponse, + ImportStatsResponse, + MarketplaceStatsResponse, + OrderStatsBasicResponse, OrderStatsResponse, + PlatformStatsResponse, + ProductStatsResponse, + StatsResponse, + StoreAnalyticsCatalog, + StoreAnalyticsImports, + StoreAnalyticsInventory, + StoreAnalyticsResponse, + StoreCustomerStats, + StoreDashboardStatsResponse, + StoreInfo, + StoreOrderStats, + StoreProductStats, + StoreRevenueStats, + StoreStatsResponse, + UserStatsResponse, + ValidatorStats, ) __all__ = [ diff --git a/app/modules/analytics/schemas/stats.py b/app/modules/analytics/schemas/stats.py index 5d63da39..a8106bbc 100644 --- a/app/modules/analytics/schemas/stats.py +++ b/app/modules/analytics/schemas/stats.py @@ -23,7 +23,6 @@ from app.modules.core.schemas.dashboard import ( PlatformStatsResponse, ProductStatsResponse, StatsResponse, - UserStatsResponse, StoreCustomerStats, StoreDashboardStatsResponse, StoreInfo, @@ -31,9 +30,9 @@ from app.modules.core.schemas.dashboard import ( StoreProductStats, StoreRevenueStats, StoreStatsResponse, + UserStatsResponse, ) - # ============================================================================ # Store Analytics (Analytics-specific, not in core) # ============================================================================ diff --git a/app/modules/analytics/services/__init__.py b/app/modules/analytics/services/__init__.py index 1832ff70..8ab28cce 100644 --- a/app/modules/analytics/services/__init__.py +++ b/app/modules/analytics/services/__init__.py @@ -6,8 +6,8 @@ This is the canonical location for analytics services. """ from app.modules.analytics.services.stats_service import ( - stats_service, StatsService, + stats_service, ) __all__ = [ diff --git a/app/modules/analytics/services/analytics_features.py b/app/modules/analytics/services/analytics_features.py index cb6f7272..1b6c24bf 100644 --- a/app/modules/analytics/services/analytics_features.py +++ b/app/modules/analytics/services/analytics_features.py @@ -12,11 +12,8 @@ from __future__ import annotations import logging from typing import TYPE_CHECKING -from sqlalchemy import func - from app.modules.contracts.features import ( FeatureDeclaration, - FeatureProviderProtocol, FeatureScope, FeatureType, FeatureUsage, diff --git a/app/modules/analytics/services/stats_service.py b/app/modules/analytics/services/stats_service.py index 935cfb21..03f49362 100644 --- a/app/modules/analytics/services/stats_service.py +++ b/app/modules/analytics/services/stats_service.py @@ -18,14 +18,16 @@ from typing import Any from sqlalchemy import func from sqlalchemy.orm import Session -from app.modules.tenancy.exceptions import AdminOperationException, StoreNotFoundException +from app.modules.catalog.models import Product from app.modules.customers.models.customer import Customer from app.modules.inventory.models import Inventory from app.modules.marketplace.models import MarketplaceImportJob, MarketplaceProduct from app.modules.orders.models import Order -from app.modules.catalog.models import Product -from app.modules.tenancy.models import User -from app.modules.tenancy.models import Store +from app.modules.tenancy.exceptions import ( + AdminOperationException, + StoreNotFoundException, +) +from app.modules.tenancy.models import Store, User logger = logging.getLogger(__name__) diff --git a/app/modules/base.py b/app/modules/base.py index d2645ed3..3d148de7 100644 --- a/app/modules/base.py +++ b/app/modules/base.py @@ -36,9 +36,10 @@ Self-Contained Module Structure: └── locales/ # Translation files """ +from collections.abc import Callable from dataclasses import dataclass, field from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from fastapi import APIRouter @@ -52,7 +53,6 @@ if TYPE_CHECKING: from app.modules.enums import FrontendType - # ============================================================================= # Menu Item Definitions # ============================================================================= @@ -805,10 +805,9 @@ class ModuleDefinition: """ if self.is_core: return "core" - elif self.is_internal: + if self.is_internal: return "internal" - else: - return "optional" + return "optional" # ========================================================================= # Context Provider Methods diff --git a/app/modules/billing/__init__.py b/app/modules/billing/__init__.py index db870f67..c66f1d48 100644 --- a/app/modules/billing/__init__.py +++ b/app/modules/billing/__init__.py @@ -39,7 +39,7 @@ def __getattr__(name: str): if name == "billing_module": from app.modules.billing.definition import billing_module return billing_module - elif name == "get_billing_module_with_routers": + if name == "get_billing_module_with_routers": from app.modules.billing.definition import get_billing_module_with_routers return get_billing_module_with_routers raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/app/modules/billing/definition.py b/app/modules/billing/definition.py index 41d1bd54..f8e665ca 100644 --- a/app/modules/billing/definition.py +++ b/app/modules/billing/definition.py @@ -9,7 +9,13 @@ route configurations, and scheduled tasks. import logging from typing import Any -from app.modules.base import MenuItemDefinition, MenuSectionDefinition, ModuleDefinition, PermissionDefinition, ScheduledTask +from app.modules.base import ( + MenuItemDefinition, + MenuSectionDefinition, + ModuleDefinition, + PermissionDefinition, + ScheduledTask, +) from app.modules.enums import FrontendType logger = logging.getLogger(__name__) diff --git a/app/modules/billing/dependencies/__init__.py b/app/modules/billing/dependencies/__init__.py index 1d5c1f15..c4897712 100644 --- a/app/modules/billing/dependencies/__init__.py +++ b/app/modules/billing/dependencies/__init__.py @@ -2,9 +2,9 @@ """FastAPI dependencies for the billing module.""" from .feature_gate import ( - require_feature, - RequireFeature, FeatureNotAvailableError, + RequireFeature, + require_feature, ) __all__ = [ diff --git a/app/modules/billing/dependencies/feature_gate.py b/app/modules/billing/dependencies/feature_gate.py index bcd7233d..20750a62 100644 --- a/app/modules/billing/dependencies/feature_gate.py +++ b/app/modules/billing/dependencies/feature_gate.py @@ -37,7 +37,7 @@ Usage: import asyncio import functools import logging -from typing import Callable +from collections.abc import Callable from fastapi import Depends, HTTPException from sqlalchemy.orm import Session @@ -106,7 +106,7 @@ class RequireFeature: for feature_code in self.feature_codes: if feature_service.has_feature_for_store(db, store_id, feature_code): - return None + return # None of the features are available feature_code = self.feature_codes[0] @@ -204,8 +204,7 @@ def require_feature(*feature_codes: str) -> Callable: if asyncio.iscoroutinefunction(func): return async_wrapper - else: - return sync_wrapper + return sync_wrapper return decorator diff --git a/app/modules/billing/migrations/versions/billing_001_initial.py b/app/modules/billing/migrations/versions/billing_001_initial.py index eaa49168..c1ef20aa 100644 --- a/app/modules/billing/migrations/versions/billing_001_initial.py +++ b/app/modules/billing/migrations/versions/billing_001_initial.py @@ -4,9 +4,10 @@ Revision ID: billing_001 Revises: core_001 Create Date: 2026-02-07 """ -from alembic import op import sqlalchemy as sa +from alembic import op + revision = "billing_001" down_revision = "core_001" branch_labels = None diff --git a/app/modules/billing/models/subscription.py b/app/modules/billing/models/subscription.py index afe2708c..2e60a992 100644 --- a/app/modules/billing/models/subscription.py +++ b/app/modules/billing/models/subscription.py @@ -14,7 +14,6 @@ Feature limits per tier are in tier_feature_limit.py. """ import enum -from datetime import UTC, datetime from sqlalchemy import ( Boolean, diff --git a/app/modules/billing/routes/api/admin.py b/app/modules/billing/routes/api/admin.py index 203a3785..35a54431 100644 --- a/app/modules/billing/routes/api/admin.py +++ b/app/modules/billing/routes/api/admin.py @@ -17,8 +17,6 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_admin_api, require_module_access from app.core.database import get_db from app.exceptions import ResourceNotFoundException -from app.modules.billing.services import admin_subscription_service, subscription_service -from app.modules.enums import FrontendType from app.modules.billing.schemas import ( BillingHistoryListResponse, BillingHistoryWithMerchant, @@ -33,6 +31,11 @@ from app.modules.billing.schemas import ( SubscriptionTierResponse, SubscriptionTierUpdate, ) +from app.modules.billing.services import ( + admin_subscription_service, + subscription_service, +) +from app.modules.enums import FrontendType from models.schema.auth import UserContext logger = logging.getLogger(__name__) diff --git a/app/modules/billing/routes/api/admin_features.py b/app/modules/billing/routes/api/admin_features.py index 155cd975..7a7e1c06 100644 --- a/app/modules/billing/routes/api/admin_features.py +++ b/app/modules/billing/routes/api/admin_features.py @@ -17,16 +17,19 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_admin_api, require_module_access from app.core.database import get_db -from app.modules.billing.services.feature_aggregator import feature_aggregator -from app.modules.billing.models.tier_feature_limit import TierFeatureLimit, MerchantFeatureOverride from app.modules.billing.models import SubscriptionTier +from app.modules.billing.models.tier_feature_limit import ( + MerchantFeatureOverride, + TierFeatureLimit, +) from app.modules.billing.schemas import ( - FeatureDeclarationResponse, FeatureCatalogResponse, - TierFeatureLimitEntry, + FeatureDeclarationResponse, MerchantFeatureOverrideEntry, MerchantFeatureOverrideResponse, + TierFeatureLimitEntry, ) +from app.modules.billing.services.feature_aggregator import feature_aggregator from app.modules.enums import FrontendType from models.schema.auth import UserContext diff --git a/app/modules/billing/routes/api/merchant.py b/app/modules/billing/routes/api/merchant.py index 5ea5c7d0..14332319 100644 --- a/app/modules/billing/routes/api/merchant.py +++ b/app/modules/billing/routes/api/merchant.py @@ -8,9 +8,9 @@ Provides subscription management and billing operations for merchant owners: - Stripe checkout session creation - Invoice history -Authentication: merchant_token cookie or Authorization header. +Authentication: Authorization header (API-only, no cookies for CSRF safety). The user must own at least one active merchant (validated by -get_current_merchant_from_cookie_or_header). +get_merchant_for_current_user). Auto-discovered by the route system (merchant.py in routes/api/ triggers registration under /api/v1/merchants/billing/*). @@ -18,22 +18,26 @@ registration under /api/v1/merchants/billing/*). import logging -from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request -from pydantic import BaseModel +from fastapi import APIRouter, Depends, Path, Query, Request from sqlalchemy.orm import Session -from app.api.deps import get_current_merchant_from_cookie_or_header +from app.api.deps import get_merchant_for_current_user from app.core.database import get_db from app.modules.billing.schemas import ( + ChangeTierRequest, + ChangeTierResponse, CheckoutRequest, CheckoutResponse, + MerchantPortalAvailableTiersResponse, + MerchantPortalInvoiceListResponse, + MerchantPortalSubscriptionDetailResponse, + MerchantPortalSubscriptionItem, + MerchantPortalSubscriptionListResponse, MerchantSubscriptionResponse, TierInfo, ) from app.modules.billing.services.billing_service import billing_service from app.modules.billing.services.subscription_service import subscription_service -from app.modules.tenancy.models import Merchant -from models.schema.auth import UserContext logger = logging.getLogger(__name__) @@ -44,49 +48,15 @@ ROUTE_CONFIG = { router = APIRouter() -# ============================================================================ -# Helpers -# ============================================================================ - - -def _get_user_merchant(db: Session, user_context: UserContext) -> Merchant: - """ - Get the first active merchant owned by the current user. - - Args: - db: Database session - user_context: Authenticated user context - - Returns: - Merchant: The user's active merchant - - Raises: - HTTPException 404: If the user has no active merchants - """ - merchant = ( - db.query(Merchant) - .filter( - Merchant.owner_user_id == user_context.id, - Merchant.is_active == True, # noqa: E712 - ) - .first() - ) - - if not merchant: - raise HTTPException(status_code=404, detail="No active merchant found") - - return merchant - - # ============================================================================ # Subscription Endpoints # ============================================================================ -@router.get("/subscriptions") +@router.get("/subscriptions", response_model=MerchantPortalSubscriptionListResponse) def list_merchant_subscriptions( request: Request, - current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), + merchant=Depends(get_merchant_for_current_user), db: Session = Depends(get_db), ): """ @@ -95,7 +65,6 @@ def list_merchant_subscriptions( Returns subscriptions across all platforms the merchant is subscribed to, including tier information and status. """ - merchant = _get_user_merchant(db, current_user) subscriptions = subscription_service.get_merchant_subscriptions(db, merchant.id) items = [] @@ -104,16 +73,21 @@ def list_merchant_subscriptions( data["tier"] = sub.tier.code if sub.tier else None data["tier_name"] = sub.tier.name if sub.tier else None data["platform_name"] = sub.platform.name if sub.platform else "" - items.append(data) + items.append(MerchantPortalSubscriptionItem(**data)) - return {"subscriptions": items, "total": len(items)} + return MerchantPortalSubscriptionListResponse( + subscriptions=items, total=len(items) + ) -@router.get("/subscriptions/{platform_id}") +@router.get( + "/subscriptions/{platform_id}", + response_model=MerchantPortalSubscriptionDetailResponse, +) def get_merchant_subscription( request: Request, platform_id: int = Path(..., description="Platform ID"), - current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), + merchant=Depends(get_merchant_for_current_user), db: Session = Depends(get_db), ): """ @@ -121,21 +95,25 @@ def get_merchant_subscription( Returns the subscription with tier information for the given platform. """ - merchant = _get_user_merchant(db, current_user) subscription = subscription_service.get_merchant_subscription( db, merchant.id, platform_id ) if not subscription: - raise HTTPException( - status_code=404, - detail=f"No subscription found for platform {platform_id}", + from app.exceptions.base import ResourceNotFoundException + + raise ResourceNotFoundException( + resource_type="Subscription", + identifier=f"merchant={merchant.id}, platform={platform_id}", + error_code="SUBSCRIPTION_NOT_FOUND", ) sub_data = MerchantSubscriptionResponse.model_validate(subscription).model_dump() sub_data["tier"] = subscription.tier.code if subscription.tier else None sub_data["tier_name"] = subscription.tier.name if subscription.tier else None - sub_data["platform_name"] = subscription.platform.name if subscription.platform else "" + sub_data["platform_name"] = ( + subscription.platform.name if subscription.platform else "" + ) tier_info = None if subscription.tier: @@ -146,20 +124,25 @@ def get_merchant_subscription( description=tier.description, price_monthly_cents=tier.price_monthly_cents, price_annual_cents=tier.price_annual_cents, - feature_codes=tier.get_feature_codes() if hasattr(tier, "get_feature_codes") else [], + feature_codes=( + tier.get_feature_codes() if hasattr(tier, "get_feature_codes") else [] + ), ) - return { - "subscription": sub_data, - "tier": tier_info, - } + return MerchantPortalSubscriptionDetailResponse( + subscription=MerchantPortalSubscriptionItem(**sub_data), + tier=tier_info, + ) -@router.get("/subscriptions/{platform_id}/tiers") +@router.get( + "/subscriptions/{platform_id}/tiers", + response_model=MerchantPortalAvailableTiersResponse, +) def get_available_tiers( request: Request, platform_id: int = Path(..., description="Platform ID"), - current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), + merchant=Depends(get_merchant_for_current_user), db: Session = Depends(get_db), ): """ @@ -168,7 +151,6 @@ def get_available_tiers( Returns all public tiers with upgrade/downgrade flags relative to the merchant's current tier. """ - merchant = _get_user_merchant(db, current_user) subscription = subscription_service.get_merchant_subscription( db, merchant.id, platform_id ) @@ -182,25 +164,21 @@ def get_available_tiers( if subscription and subscription.tier: current_tier_code = subscription.tier.code - return { - "tiers": tier_list, - "current_tier": current_tier_code, - } + return MerchantPortalAvailableTiersResponse( + tiers=tier_list, + current_tier=current_tier_code, + ) -class ChangeTierRequest(BaseModel): - """Request for changing subscription tier.""" - - tier_code: str - is_annual: bool = False - - -@router.post("/subscriptions/{platform_id}/change-tier") +@router.post( + "/subscriptions/{platform_id}/change-tier", + response_model=ChangeTierResponse, +) def change_subscription_tier( request: Request, tier_data: ChangeTierRequest, platform_id: int = Path(..., description="Platform ID"), - current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), + merchant=Depends(get_merchant_for_current_user), db: Session = Depends(get_db), ): """ @@ -208,7 +186,6 @@ def change_subscription_tier( Handles both Stripe-connected and non-Stripe subscriptions. """ - merchant = _get_user_merchant(db, current_user) result = billing_service.change_tier( db, merchant.id, platform_id, tier_data.tier_code, tier_data.is_annual ) @@ -230,7 +207,7 @@ def create_checkout_session( request: Request, checkout_data: CheckoutRequest, platform_id: int = Path(..., description="Platform ID"), - current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), + merchant=Depends(get_merchant_for_current_user), db: Session = Depends(get_db), ): """ @@ -239,12 +216,14 @@ def create_checkout_session( Starts a new subscription or upgrades an existing one to the requested tier. """ - merchant = _get_user_merchant(db, current_user) - # Build success/cancel URLs from request base_url = str(request.base_url).rstrip("/") - success_url = f"{base_url}/merchants/billing/subscriptions/{platform_id}?checkout=success" - cancel_url = f"{base_url}/merchants/billing/subscriptions/{platform_id}?checkout=cancelled" + success_url = ( + f"{base_url}/merchants/billing/subscriptions/{platform_id}?checkout=success" + ) + cancel_url = ( + f"{base_url}/merchants/billing/subscriptions/{platform_id}?checkout=cancelled" + ) result = billing_service.create_checkout_session( db=db, @@ -274,12 +253,12 @@ def create_checkout_session( # ============================================================================ -@router.get("/invoices") +@router.get("/invoices", response_model=MerchantPortalInvoiceListResponse) def get_invoices( request: Request, skip: int = Query(0, ge=0, description="Number of records to skip"), limit: int = Query(20, ge=1, le=100, description="Max records to return"), - current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), + merchant=Depends(get_merchant_for_current_user), db: Session = Depends(get_db), ): """ @@ -287,14 +266,12 @@ def get_invoices( Returns paginated billing history entries ordered by date descending. """ - merchant = _get_user_merchant(db, current_user) - invoices, total = billing_service.get_invoices( db, merchant.id, skip=skip, limit=limit ) - return { - "invoices": [ + return MerchantPortalInvoiceListResponse( + invoices=[ { "id": inv.id, "invoice_number": inv.invoice_number, @@ -309,11 +286,13 @@ def get_invoices( "pdf_url": inv.invoice_pdf_url, "hosted_url": inv.hosted_invoice_url, "description": inv.description, - "created_at": inv.created_at.isoformat() if inv.created_at else None, + "created_at": ( + inv.created_at.isoformat() if inv.created_at else None + ), } for inv in invoices ], - "total": total, - "skip": skip, - "limit": limit, - } + total=total, + skip=skip, + limit=limit, + ) diff --git a/app/modules/billing/routes/api/platform.py b/app/modules/billing/routes/api/platform.py index 8ad1238b..586108d6 100644 --- a/app/modules/billing/routes/api/platform.py +++ b/app/modules/billing/routes/api/platform.py @@ -14,8 +14,10 @@ from sqlalchemy.orm import Session from app.core.database import get_db from app.exceptions import ResourceNotFoundException -from app.modules.billing.services.platform_pricing_service import platform_pricing_service -from app.modules.billing.models import TierCode, SubscriptionTier +from app.modules.billing.models import SubscriptionTier, TierCode +from app.modules.billing.services.platform_pricing_service import ( + platform_pricing_service, +) router = APIRouter(prefix="/pricing") diff --git a/app/modules/billing/routes/api/store.py b/app/modules/billing/routes/api/store.py index cb523219..ce94f2ce 100644 --- a/app/modules/billing/routes/api/store.py +++ b/app/modules/billing/routes/api/store.py @@ -14,7 +14,6 @@ from pydantic import BaseModel from sqlalchemy.orm import Session from app.api.deps import get_current_store_api, require_module_access -from app.core.config import settings from app.core.database import get_db from app.modules.billing.services import billing_service, subscription_service from app.modules.enums import FrontendType @@ -218,9 +217,9 @@ def get_invoices( # ============================================================================ # Include all billing-related store sub-routers -from app.modules.billing.routes.api.store_features import store_features_router -from app.modules.billing.routes.api.store_checkout import store_checkout_router from app.modules.billing.routes.api.store_addons import store_addons_router +from app.modules.billing.routes.api.store_checkout import store_checkout_router +from app.modules.billing.routes.api.store_features import store_features_router from app.modules.billing.routes.api.store_usage import store_usage_router store_router.include_router(store_features_router, tags=["store-features"]) diff --git a/app/modules/billing/routes/api/store_checkout.py b/app/modules/billing/routes/api/store_checkout.py index d79b2a91..616850f8 100644 --- a/app/modules/billing/routes/api/store_checkout.py +++ b/app/modules/billing/routes/api/store_checkout.py @@ -22,7 +22,7 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_store_api, require_module_access from app.core.config import settings from app.core.database import get_db -from app.modules.billing.services import billing_service, subscription_service +from app.modules.billing.services import billing_service from app.modules.enums import FrontendType from models.schema.auth import UserContext diff --git a/app/modules/billing/routes/pages/admin.py b/app/modules/billing/routes/pages/admin.py index f1c97553..24e94920 100644 --- a/app/modules/billing/routes/pages/admin.py +++ b/app/modules/billing/routes/pages/admin.py @@ -14,9 +14,9 @@ from sqlalchemy.orm import Session from app.api.deps import get_db, require_menu_access from app.modules.core.utils.page_context import get_admin_context -from app.templates_config import templates from app.modules.enums import FrontendType from app.modules.tenancy.models import User +from app.templates_config import templates router = APIRouter() diff --git a/app/modules/billing/routes/pages/merchant.py b/app/modules/billing/routes/pages/merchant.py index 0b9174d6..40893846 100644 --- a/app/modules/billing/routes/pages/merchant.py +++ b/app/modules/billing/routes/pages/merchant.py @@ -124,7 +124,7 @@ async def merchant_subscription_detail_page( # ============================================================================ -@router.get("/billing", response_class=HTMLResponse, include_in_schema=False) +@router.get("/invoices", response_class=HTMLResponse, include_in_schema=False) async def merchant_billing_history_page( request: Request, current_user: UserContext = Depends(get_current_merchant_from_cookie_or_header), diff --git a/app/modules/billing/routes/pages/store.py b/app/modules/billing/routes/pages/store.py index c6e70370..b5e4a555 100644 --- a/app/modules/billing/routes/pages/store.py +++ b/app/modules/billing/routes/pages/store.py @@ -13,8 +13,8 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_store_from_cookie_or_header, get_db from app.modules.core.utils.page_context import get_store_context -from app.templates_config import templates from app.modules.tenancy.models import User +from app.templates_config import templates router = APIRouter() diff --git a/app/modules/billing/schemas/__init__.py b/app/modules/billing/schemas/__init__.py index 244b6490..7a2522f8 100644 --- a/app/modules/billing/schemas/__init__.py +++ b/app/modules/billing/schemas/__init__.py @@ -12,51 +12,59 @@ Usage: ) """ +from app.modules.billing.schemas.billing import ( + BillingHistoryListResponse, + # Billing History schemas + BillingHistoryResponse, + BillingHistoryWithMerchant, + # Checkout & Portal schemas + CheckoutRequest, + CheckoutResponse, + FeatureCatalogResponse, + # Feature Catalog schemas + FeatureDeclarationResponse, + # Merchant Feature Override schemas + MerchantFeatureOverrideEntry, + MerchantFeatureOverrideResponse, + MerchantSubscriptionAdminCreate, + # Merchant Subscription Admin schemas + MerchantSubscriptionAdminResponse, + MerchantSubscriptionAdminUpdate, + MerchantSubscriptionListResponse, + MerchantSubscriptionWithMerchant, + PortalSessionResponse, + # Stats schemas + SubscriptionStatsResponse, + SubscriptionTierBase, + SubscriptionTierCreate, + SubscriptionTierListResponse, + SubscriptionTierResponse, + SubscriptionTierUpdate, + # Subscription Tier Admin schemas + TierFeatureLimitEntry, +) from app.modules.billing.schemas.subscription import ( - # Tier schemas - TierFeatureLimitResponse, - TierInfo, - # Subscription schemas - MerchantSubscriptionCreate, - MerchantSubscriptionUpdate, - MerchantSubscriptionResponse, - MerchantSubscriptionStatusResponse, + ChangeTierRequest, + ChangeTierResponse, + FeatureCheckResponse, # Feature summary schemas FeatureSummaryResponse, # Limit check schemas LimitCheckResult, - FeatureCheckResponse, -) -from app.modules.billing.schemas.billing import ( - # Subscription Tier Admin schemas - TierFeatureLimitEntry, - SubscriptionTierBase, - SubscriptionTierCreate, - SubscriptionTierUpdate, - SubscriptionTierResponse, - SubscriptionTierListResponse, - # Merchant Subscription Admin schemas - MerchantSubscriptionAdminResponse, - MerchantSubscriptionWithMerchant, - MerchantSubscriptionListResponse, - MerchantSubscriptionAdminCreate, - MerchantSubscriptionAdminUpdate, - # Merchant Feature Override schemas - MerchantFeatureOverrideEntry, - MerchantFeatureOverrideResponse, - # Billing History schemas - BillingHistoryResponse, - BillingHistoryWithMerchant, - BillingHistoryListResponse, - # Checkout & Portal schemas - CheckoutRequest, - CheckoutResponse, - PortalSessionResponse, - # Stats schemas - SubscriptionStatsResponse, - # Feature Catalog schemas - FeatureDeclarationResponse, - FeatureCatalogResponse, + MerchantPortalAvailableTiersResponse, + MerchantPortalInvoiceListResponse, + MerchantPortalSubscriptionDetailResponse, + # Merchant portal schemas + MerchantPortalSubscriptionItem, + MerchantPortalSubscriptionListResponse, + # Subscription schemas + MerchantSubscriptionCreate, + MerchantSubscriptionResponse, + MerchantSubscriptionStatusResponse, + MerchantSubscriptionUpdate, + # Tier schemas + TierFeatureLimitResponse, + TierInfo, ) __all__ = [ @@ -73,6 +81,14 @@ __all__ = [ # Limit check schemas (subscription.py) "LimitCheckResult", "FeatureCheckResponse", + # Merchant portal schemas (subscription.py) + "MerchantPortalSubscriptionItem", + "MerchantPortalSubscriptionListResponse", + "MerchantPortalSubscriptionDetailResponse", + "MerchantPortalAvailableTiersResponse", + "ChangeTierRequest", + "ChangeTierResponse", + "MerchantPortalInvoiceListResponse", # Subscription Tier Admin schemas (billing.py) "TierFeatureLimitEntry", "SubscriptionTierBase", diff --git a/app/modules/billing/schemas/billing.py b/app/modules/billing/schemas/billing.py index 580c4323..79a365c4 100644 --- a/app/modules/billing/schemas/billing.py +++ b/app/modules/billing/schemas/billing.py @@ -9,7 +9,6 @@ from datetime import datetime from pydantic import BaseModel, ConfigDict, Field, field_validator - # ============================================================================ # Subscription Tier Schemas # ============================================================================ diff --git a/app/modules/billing/schemas/subscription.py b/app/modules/billing/schemas/subscription.py index c1f3c31d..017cb716 100644 --- a/app/modules/billing/schemas/subscription.py +++ b/app/modules/billing/schemas/subscription.py @@ -9,7 +9,6 @@ from datetime import datetime from pydantic import BaseModel, ConfigDict, Field - # ============================================================================ # Tier Information Schemas # ============================================================================ @@ -141,3 +140,82 @@ class FeatureCheckResponse(BaseModel): message: str | None = None +# ============================================================================ +# Merchant Portal Schemas (for merchant-facing routes) +# ============================================================================ + + +class MerchantPortalSubscriptionItem(BaseModel): + """Subscription item with tier and platform names for merchant portal list.""" + + model_config = ConfigDict(from_attributes=True) + + # Base subscription fields (mirror MerchantSubscriptionResponse) + id: int + merchant_id: int + platform_id: int + tier_id: int | None + status: str + is_annual: bool + period_start: datetime + period_end: datetime + trial_ends_at: datetime | None + stripe_customer_id: str | None = None + cancelled_at: datetime | None = None + is_active: bool + is_trial: bool + trial_days_remaining: int | None + created_at: datetime + updated_at: datetime + + # Enrichment fields + tier: str | None = None + tier_name: str | None = None + platform_name: str = "" + + +class MerchantPortalSubscriptionListResponse(BaseModel): + """Paginated subscription list for merchant portal.""" + + subscriptions: list[MerchantPortalSubscriptionItem] + total: int + + +class MerchantPortalSubscriptionDetailResponse(BaseModel): + """Subscription detail with tier info for merchant portal.""" + + subscription: MerchantPortalSubscriptionItem + tier: TierInfo | None = None + + +class MerchantPortalAvailableTiersResponse(BaseModel): + """Available tiers for a platform.""" + + tiers: list[dict] + current_tier: str | None = None + + +class ChangeTierRequest(BaseModel): + """Request for changing subscription tier.""" + + tier_code: str + is_annual: bool = False + + +class ChangeTierResponse(BaseModel): + """Response after tier change.""" + + message: str + new_tier: str | None = None + effective_immediately: bool = False + + +class MerchantPortalInvoiceListResponse(BaseModel): + """Paginated invoice list for merchant portal.""" + + invoices: list[dict] + total: int + skip: int + limit: int + + diff --git a/app/modules/billing/services/__init__.py b/app/modules/billing/services/__init__.py index b7c7ed3f..dc19b3df 100644 --- a/app/modules/billing/services/__init__.py +++ b/app/modules/billing/services/__init__.py @@ -5,13 +5,13 @@ Billing module services. Provides subscription management, Stripe integration, and admin operations. """ -from app.modules.billing.services.subscription_service import ( - SubscriptionService, - subscription_service, -) -from app.modules.billing.services.stripe_service import ( - StripeService, - stripe_service, +from app.modules.billing.exceptions import ( + BillingServiceError, + NoActiveSubscriptionError, + PaymentSystemNotConfiguredError, + StripePriceNotConfiguredError, + SubscriptionNotCancelledError, + TierNotFoundError, ) from app.modules.billing.services.admin_subscription_service import ( AdminSubscriptionService, @@ -21,34 +21,34 @@ from app.modules.billing.services.billing_service import ( BillingService, billing_service, ) -from app.modules.billing.exceptions import ( - BillingServiceError, - PaymentSystemNotConfiguredError, - TierNotFoundError, - StripePriceNotConfiguredError, - NoActiveSubscriptionError, - SubscriptionNotCancelledError, +from app.modules.billing.services.capacity_forecast_service import ( + CapacityForecastService, + capacity_forecast_service, ) from app.modules.billing.services.feature_service import ( FeatureService, feature_service, ) -from app.modules.billing.services.capacity_forecast_service import ( - CapacityForecastService, - capacity_forecast_service, -) from app.modules.billing.services.platform_pricing_service import ( PlatformPricingService, platform_pricing_service, ) +from app.modules.billing.services.stripe_service import ( + StripeService, + stripe_service, +) +from app.modules.billing.services.subscription_service import ( + SubscriptionService, + subscription_service, +) from app.modules.billing.services.usage_service import ( - UsageService, - usage_service, - UsageData, - UsageMetricData, + LimitCheckData, TierInfoData, UpgradeTierData, - LimitCheckData, + UsageData, + UsageMetricData, + UsageService, + usage_service, ) __all__ = [ diff --git a/app/modules/billing/services/admin_subscription_service.py b/app/modules/billing/services/admin_subscription_service.py index 8357a0ad..105d1b1a 100644 --- a/app/modules/billing/services/admin_subscription_service.py +++ b/app/modules/billing/services/admin_subscription_service.py @@ -324,7 +324,7 @@ class AdminSubscriptionService: .all() ) - tier_distribution = {tier_name: count for tier_name, count in tier_counts} + tier_distribution = dict(tier_counts) # Calculate MRR (Monthly Recurring Revenue) mrr_cents = 0 diff --git a/app/modules/billing/services/billing_features.py b/app/modules/billing/services/billing_features.py index 92118eb8..6d13cff2 100644 --- a/app/modules/billing/services/billing_features.py +++ b/app/modules/billing/services/billing_features.py @@ -13,7 +13,6 @@ from typing import TYPE_CHECKING from app.modules.contracts.features import ( FeatureDeclaration, - FeatureProviderProtocol, FeatureScope, FeatureType, FeatureUsage, diff --git a/app/modules/billing/services/billing_service.py b/app/modules/billing/services/billing_service.py index b3e108d3..f7db848e 100644 --- a/app/modules/billing/services/billing_service.py +++ b/app/modules/billing/services/billing_service.py @@ -15,15 +15,6 @@ from datetime import datetime from sqlalchemy.orm import Session -from app.modules.billing.services.stripe_service import stripe_service -from app.modules.billing.services.subscription_service import subscription_service -from app.modules.billing.models import ( - AddOnProduct, - BillingHistory, - MerchantSubscription, - SubscriptionTier, - StoreAddOn, -) from app.modules.billing.exceptions import ( BillingServiceError, NoActiveSubscriptionError, @@ -32,6 +23,15 @@ from app.modules.billing.exceptions import ( SubscriptionNotCancelledError, TierNotFoundError, ) +from app.modules.billing.models import ( + AddOnProduct, + BillingHistory, + MerchantSubscription, + StoreAddOn, + SubscriptionTier, +) +from app.modules.billing.services.stripe_service import stripe_service +from app.modules.billing.services.subscription_service import subscription_service logger = logging.getLogger(__name__) diff --git a/app/modules/billing/services/capacity_forecast_service.py b/app/modules/billing/services/capacity_forecast_service.py index fa4a73bb..54d8e180 100644 --- a/app/modules/billing/services/capacity_forecast_service.py +++ b/app/modules/billing/services/capacity_forecast_service.py @@ -49,7 +49,9 @@ class CapacityForecastService: Should be called by a daily background job. """ from app.modules.cms.services.media_service import media_service - from app.modules.monitoring.services.platform_health_service import platform_health_service + from app.modules.monitoring.services.platform_health_service import ( + platform_health_service, + ) now = datetime.now(UTC) today = now.replace(hour=0, minute=0, second=0, microsecond=0) @@ -234,7 +236,9 @@ class CapacityForecastService: Returns prioritized list of recommendations. """ - from app.modules.monitoring.services.platform_health_service import platform_health_service + from app.modules.monitoring.services.platform_health_service import ( + platform_health_service, + ) recommendations = [] diff --git a/app/modules/billing/services/feature_aggregator.py b/app/modules/billing/services/feature_aggregator.py index 7192d092..d51e4da9 100644 --- a/app/modules/billing/services/feature_aggregator.py +++ b/app/modules/billing/services/feature_aggregator.py @@ -229,7 +229,7 @@ class FeatureAggregatorService: if decl.scope == FeatureScope.STORE and store_id is not None: usage = self.get_store_usage(db, store_id) return usage.get(feature_code) - elif decl.scope == FeatureScope.MERCHANT and merchant_id is not None and platform_id is not None: + if decl.scope == FeatureScope.MERCHANT and merchant_id is not None and platform_id is not None: usage = self.get_merchant_usage(db, merchant_id, platform_id) return usage.get(feature_code) diff --git a/app/modules/billing/services/feature_service.py b/app/modules/billing/services/feature_service.py index 85c30a25..02409cc6 100644 --- a/app/modules/billing/services/feature_service.py +++ b/app/modules/billing/services/feature_service.py @@ -33,9 +33,8 @@ from app.modules.billing.models import ( MerchantFeatureOverride, MerchantSubscription, SubscriptionTier, - TierFeatureLimit, ) -from app.modules.contracts.features import FeatureScope, FeatureType +from app.modules.contracts.features import FeatureType logger = logging.getLogger(__name__) @@ -397,7 +396,6 @@ class FeatureService: } # Get all usage at once - store_usage = {} merchant_usage = feature_aggregator.get_merchant_usage(db, merchant_id, platform_id) summaries = [] diff --git a/app/modules/billing/services/stripe_service.py b/app/modules/billing/services/stripe_service.py index a58d63a4..b5630a68 100644 --- a/app/modules/billing/services/stripe_service.py +++ b/app/modules/billing/services/stripe_service.py @@ -11,7 +11,6 @@ Provides: """ import logging -from datetime import datetime import stripe from sqlalchemy.orm import Session @@ -22,10 +21,7 @@ from app.modules.billing.exceptions import ( WebhookVerificationException, ) from app.modules.billing.models import ( - BillingHistory, MerchantSubscription, - SubscriptionStatus, - SubscriptionTier, ) from app.modules.tenancy.models import Store diff --git a/app/modules/billing/services/subscription_service.py b/app/modules/billing/services/subscription_service.py index a0d7fb76..7024fd26 100644 --- a/app/modules/billing/services/subscription_service.py +++ b/app/modules/billing/services/subscription_service.py @@ -29,8 +29,7 @@ from datetime import UTC, datetime, timedelta from sqlalchemy.orm import Session, joinedload from app.modules.billing.exceptions import ( - SubscriptionNotFoundException, - TierLimitExceededException, # Re-exported for backward compatibility + SubscriptionNotFoundException, # Re-exported for backward compatibility ) from app.modules.billing.models import ( MerchantSubscription, diff --git a/app/modules/billing/services/usage_service.py b/app/modules/billing/services/usage_service.py index 9724c9d2..125bd655 100644 --- a/app/modules/billing/services/usage_service.py +++ b/app/modules/billing/services/usage_service.py @@ -92,7 +92,9 @@ class UsageService: self, db: Session, store_id: int ) -> MerchantSubscription | None: """Resolve store_id to MerchantSubscription.""" - from app.modules.billing.services.subscription_service import subscription_service + from app.modules.billing.services.subscription_service import ( + subscription_service, + ) return subscription_service.get_subscription_for_store(db, store_id) def get_store_usage(self, db: Session, store_id: int) -> UsageData: diff --git a/app/modules/billing/tasks/__init__.py b/app/modules/billing/tasks/__init__.py index 4efa7ef3..20e98c1b 100644 --- a/app/modules/billing/tasks/__init__.py +++ b/app/modules/billing/tasks/__init__.py @@ -12,10 +12,10 @@ Note: capture_capacity_snapshot moved to monitoring module. """ from app.modules.billing.tasks.subscription import ( - reset_period_counters, check_trial_expirations, - sync_stripe_status, cleanup_stale_subscriptions, + reset_period_counters, + sync_stripe_status, ) __all__ = [ diff --git a/app/modules/billing/tests/integration/test_admin_routes.py b/app/modules/billing/tests/integration/test_admin_routes.py index 86023941..508f9d28 100644 --- a/app/modules/billing/tests/integration/test_admin_routes.py +++ b/app/modules/billing/tests/integration/test_admin_routes.py @@ -21,7 +21,6 @@ from app.modules.billing.models import ( ) from app.modules.tenancy.models import Merchant, Platform, User - # ============================================================================ # Fixtures # ============================================================================ diff --git a/app/modules/billing/tests/integration/test_merchant_routes.py b/app/modules/billing/tests/integration/test_merchant_routes.py index 434cf444..5aa0dc12 100644 --- a/app/modules/billing/tests/integration/test_merchant_routes.py +++ b/app/modules/billing/tests/integration/test_merchant_routes.py @@ -15,7 +15,7 @@ from unittest.mock import patch import pytest -from app.api.deps import get_current_merchant_from_cookie_or_header +from app.api.deps import get_current_merchant_api, get_merchant_for_current_user from app.modules.billing.models import ( BillingHistory, MerchantSubscription, @@ -26,7 +26,6 @@ from app.modules.tenancy.models import Merchant, Platform, User from main import app from models.schema.auth import UserContext - # ============================================================================ # Fixtures # ============================================================================ @@ -156,7 +155,7 @@ def merch_invoices(db, merch_merchant): @pytest.fixture def merch_auth_headers(merch_owner, merch_merchant): - """Override auth dependency to return a UserContext for the merchant owner.""" + """Override auth dependencies to return merchant/user for the merchant owner.""" user_context = UserContext( id=merch_owner.id, email=merch_owner.email, @@ -165,13 +164,17 @@ def merch_auth_headers(merch_owner, merch_merchant): is_active=True, ) - def _override(): + def _override_merchant(): + return merch_merchant + + def _override_user(): return user_context - app.dependency_overrides[get_current_merchant_from_cookie_or_header] = _override + app.dependency_overrides[get_merchant_for_current_user] = _override_merchant + app.dependency_overrides[get_current_merchant_api] = _override_user yield {"Authorization": "Bearer fake-token"} - if get_current_merchant_from_cookie_or_header in app.dependency_overrides: - del app.dependency_overrides[get_current_merchant_from_cookie_or_header] + app.dependency_overrides.pop(get_merchant_for_current_user, None) + app.dependency_overrides.pop(get_current_merchant_api, None) # ============================================================================ diff --git a/app/modules/billing/tests/integration/test_platform_routes.py b/app/modules/billing/tests/integration/test_platform_routes.py index 5d29af77..409acd46 100644 --- a/app/modules/billing/tests/integration/test_platform_routes.py +++ b/app/modules/billing/tests/integration/test_platform_routes.py @@ -19,7 +19,6 @@ from app.modules.billing.models import ( ) from app.modules.tenancy.models import Platform - # ============================================================================ # Fixtures # ============================================================================ diff --git a/app/modules/billing/tests/integration/test_store_routes.py b/app/modules/billing/tests/integration/test_store_routes.py index fb7479e7..4673df77 100644 --- a/app/modules/billing/tests/integration/test_store_routes.py +++ b/app/modules/billing/tests/integration/test_store_routes.py @@ -27,7 +27,6 @@ from app.modules.tenancy.models import Merchant, Platform, Store, User from app.modules.tenancy.models.store import StoreUser, StoreUserType from app.modules.tenancy.models.store_platform import StorePlatform - # ============================================================================ # Fixtures # ============================================================================ diff --git a/app/modules/billing/tests/unit/test_admin_subscription_service.py b/app/modules/billing/tests/unit/test_admin_subscription_service.py index 74411309..1c7bec11 100644 --- a/app/modules/billing/tests/unit/test_admin_subscription_service.py +++ b/app/modules/billing/tests/unit/test_admin_subscription_service.py @@ -22,7 +22,6 @@ from app.modules.billing.services.admin_subscription_service import ( ) from app.modules.tenancy.models import Merchant - # ============================================================================ # Tier Management # ============================================================================ diff --git a/app/modules/billing/tests/unit/test_billing_service.py b/app/modules/billing/tests/unit/test_billing_service.py index 579bb9c8..cd9a926a 100644 --- a/app/modules/billing/tests/unit/test_billing_service.py +++ b/app/modules/billing/tests/unit/test_billing_service.py @@ -6,6 +6,13 @@ from unittest.mock import MagicMock, patch import pytest +from app.modules.billing.models import ( + AddOnProduct, + BillingHistory, + MerchantSubscription, + SubscriptionStatus, + SubscriptionTier, +) from app.modules.billing.services.billing_service import ( BillingService, NoActiveSubscriptionError, @@ -14,15 +21,6 @@ from app.modules.billing.services.billing_service import ( SubscriptionNotCancelledError, TierNotFoundError, ) -from app.modules.billing.models import ( - AddOnProduct, - BillingHistory, - MerchantSubscription, - SubscriptionStatus, - SubscriptionTier, - StoreAddOn, -) - # ============================================================================ # Tier Lookup diff --git a/app/modules/billing/tests/unit/test_capacity_forecast_service.py b/app/modules/billing/tests/unit/test_capacity_forecast_service.py index f3b01c87..258614f4 100644 --- a/app/modules/billing/tests/unit/test_capacity_forecast_service.py +++ b/app/modules/billing/tests/unit/test_capacity_forecast_service.py @@ -11,16 +11,15 @@ Tests cover: from datetime import UTC, datetime, timedelta from decimal import Decimal -from unittest.mock import MagicMock, patch import pytest +from app.modules.billing.models import CapacitySnapshot from app.modules.billing.services.capacity_forecast_service import ( INFRASTRUCTURE_SCALING, CapacityForecastService, capacity_forecast_service, ) -from app.modules.billing.models import CapacitySnapshot @pytest.mark.unit diff --git a/app/modules/billing/tests/unit/test_stripe_webhook_handler.py b/app/modules/billing/tests/unit/test_stripe_webhook_handler.py index 9ea2a4a0..ec265995 100644 --- a/app/modules/billing/tests/unit/test_stripe_webhook_handler.py +++ b/app/modules/billing/tests/unit/test_stripe_webhook_handler.py @@ -1,14 +1,13 @@ # tests/unit/services/test_stripe_webhook_handler.py """Unit tests for StripeWebhookHandler.""" -from datetime import datetime, timezone -from unittest.mock import MagicMock, patch +from datetime import UTC, datetime +from unittest.mock import MagicMock import pytest from app.handlers.stripe_webhook import StripeWebhookHandler from app.modules.billing.models import ( - BillingHistory, MerchantSubscription, StripeWebhookEvent, SubscriptionStatus, @@ -175,8 +174,8 @@ def test_subscription(db, test_store): store_id=test_store.id, tier="essential", status=SubscriptionStatus.TRIAL, - period_start=datetime.now(timezone.utc), - period_end=datetime.now(timezone.utc), + period_start=datetime.now(UTC), + period_end=datetime.now(UTC), ) db.add(subscription) db.commit() @@ -207,8 +206,8 @@ def test_active_subscription(db, test_store): status=SubscriptionStatus.ACTIVE, stripe_customer_id="cus_test123", stripe_subscription_id="sub_test123", - period_start=datetime.now(timezone.utc), - period_end=datetime.now(timezone.utc), + period_start=datetime.now(UTC), + period_end=datetime.now(UTC), ) db.add(subscription) db.commit() @@ -248,8 +247,8 @@ def mock_subscription_updated_event(): event.data.object.id = "sub_test123" event.data.object.customer = "cus_test123" event.data.object.status = "active" - event.data.object.current_period_start = int(datetime.now(timezone.utc).timestamp()) - event.data.object.current_period_end = int(datetime.now(timezone.utc).timestamp()) + event.data.object.current_period_start = int(datetime.now(UTC).timestamp()) + event.data.object.current_period_end = int(datetime.now(UTC).timestamp()) event.data.object.cancel_at_period_end = False event.data.object.items.data = [] event.data.object.metadata = {} @@ -277,7 +276,7 @@ def mock_invoice_paid_event(): event.data.object.customer = "cus_test123" event.data.object.payment_intent = "pi_test123" event.data.object.number = "INV-001" - event.data.object.created = int(datetime.now(timezone.utc).timestamp()) + event.data.object.created = int(datetime.now(UTC).timestamp()) event.data.object.subtotal = 4900 event.data.object.tax = 0 event.data.object.total = 4900 diff --git a/app/modules/billing/tests/unit/test_subscription_service.py b/app/modules/billing/tests/unit/test_subscription_service.py index 019275e8..5139ec2b 100644 --- a/app/modules/billing/tests/unit/test_subscription_service.py +++ b/app/modules/billing/tests/unit/test_subscription_service.py @@ -10,11 +10,9 @@ from app.modules.billing.models import ( MerchantSubscription, SubscriptionStatus, SubscriptionTier, - TierCode, ) from app.modules.billing.services.subscription_service import SubscriptionService - # ============================================================================ # Tier Information # ============================================================================ diff --git a/app/modules/cart/definition.py b/app/modules/cart/definition.py index a21477de..13261e89 100644 --- a/app/modules/cart/definition.py +++ b/app/modules/cart/definition.py @@ -8,7 +8,6 @@ It is session-based and does not require customer authentication. from app.modules.base import ModuleDefinition, PermissionDefinition - # ============================================================================= # Router Lazy Imports # ============================================================================= diff --git a/app/modules/cart/migrations/versions/cart_001_initial.py b/app/modules/cart/migrations/versions/cart_001_initial.py index 4366c5b9..3d631f30 100644 --- a/app/modules/cart/migrations/versions/cart_001_initial.py +++ b/app/modules/cart/migrations/versions/cart_001_initial.py @@ -4,9 +4,10 @@ Revision ID: cart_001 Revises: inventory_001 Create Date: 2026-02-07 """ -from alembic import op import sqlalchemy as sa +from alembic import op + revision = "cart_001" down_revision = "inventory_001" branch_labels = None diff --git a/app/modules/cart/routes/api/storefront.py b/app/modules/cart/routes/api/storefront.py index 241c1d42..80b9b858 100644 --- a/app/modules/cart/routes/api/storefront.py +++ b/app/modules/cart/routes/api/storefront.py @@ -15,7 +15,6 @@ from fastapi import APIRouter, Body, Depends, Path from sqlalchemy.orm import Session from app.core.database import get_db -from app.modules.cart.services import cart_service from app.modules.cart.schemas import ( AddToCartRequest, CartOperationResponse, @@ -23,8 +22,9 @@ from app.modules.cart.schemas import ( ClearCartResponse, UpdateCartItemRequest, ) -from middleware.store_context import require_store_context +from app.modules.cart.services import cart_service from app.modules.tenancy.models import Store +from middleware.store_context import require_store_context router = APIRouter() logger = logging.getLogger(__name__) diff --git a/app/modules/cart/schemas/__init__.py b/app/modules/cart/schemas/__init__.py index 2601c1a6..b30800b6 100644 --- a/app/modules/cart/schemas/__init__.py +++ b/app/modules/cart/schemas/__init__.py @@ -3,11 +3,11 @@ from app.modules.cart.schemas.cart import ( AddToCartRequest, - UpdateCartItemRequest, CartItemResponse, - CartResponse, CartOperationResponse, + CartResponse, ClearCartResponse, + UpdateCartItemRequest, ) __all__ = [ diff --git a/app/modules/cart/services/__init__.py b/app/modules/cart/services/__init__.py index c9f3be33..c2a75b17 100644 --- a/app/modules/cart/services/__init__.py +++ b/app/modules/cart/services/__init__.py @@ -1,6 +1,6 @@ # app/modules/cart/services/__init__.py """Cart module services.""" -from app.modules.cart.services.cart_service import cart_service, CartService +from app.modules.cart.services.cart_service import CartService, cart_service __all__ = ["cart_service", "CartService"] diff --git a/app/modules/cart/services/cart_service.py b/app/modules/cart/services/cart_service.py index b40a15e2..ff8ae53a 100644 --- a/app/modules/cart/services/cart_service.py +++ b/app/modules/cart/services/cart_service.py @@ -21,10 +21,10 @@ from app.modules.cart.exceptions import ( InsufficientInventoryForCartException, InvalidCartQuantityException, ) -from app.modules.catalog.exceptions import ProductNotFoundException -from app.utils.money import cents_to_euros from app.modules.cart.models.cart import CartItem +from app.modules.catalog.exceptions import ProductNotFoundException from app.modules.catalog.models import Product +from app.utils.money import cents_to_euros logger = logging.getLogger(__name__) diff --git a/app/modules/catalog/definition.py b/app/modules/catalog/definition.py index b4717af3..cb4b1cfa 100644 --- a/app/modules/catalog/definition.py +++ b/app/modules/catalog/definition.py @@ -9,7 +9,6 @@ from app.modules.base import ( ) from app.modules.enums import FrontendType - # ============================================================================= # Router Lazy Imports # ============================================================================= diff --git a/app/modules/catalog/migrations/versions/catalog_001_initial.py b/app/modules/catalog/migrations/versions/catalog_001_initial.py index be745718..9b730dcb 100644 --- a/app/modules/catalog/migrations/versions/catalog_001_initial.py +++ b/app/modules/catalog/migrations/versions/catalog_001_initial.py @@ -4,9 +4,10 @@ Revision ID: catalog_001 Revises: cms_001 Create Date: 2026-02-07 """ -from alembic import op import sqlalchemy as sa +from alembic import op + revision = "catalog_001" down_revision = "cms_001" branch_labels = None diff --git a/app/modules/catalog/routes/api/__init__.py b/app/modules/catalog/routes/api/__init__.py index b8500a66..6c5b62a9 100644 --- a/app/modules/catalog/routes/api/__init__.py +++ b/app/modules/catalog/routes/api/__init__.py @@ -19,7 +19,7 @@ def __getattr__(name: str): if name == "admin_router": from app.modules.catalog.routes.api.admin import admin_router return admin_router - elif name == "store_router": + if name == "store_router": from app.modules.catalog.routes.api.store import store_router return store_router raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/app/modules/catalog/routes/api/admin.py b/app/modules/catalog/routes/api/admin.py index bdb1d29c..b872ae94 100644 --- a/app/modules/catalog/routes/api/admin.py +++ b/app/modules/catalog/routes/api/admin.py @@ -18,9 +18,6 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_admin_api, require_module_access from app.core.database import get_db from app.modules.billing.services.subscription_service import subscription_service -from app.modules.catalog.services.store_product_service import store_product_service -from app.modules.enums import FrontendType -from models.schema.auth import UserContext from app.modules.catalog.schemas import ( CatalogStore, CatalogStoresResponse, @@ -33,6 +30,9 @@ from app.modules.catalog.schemas import ( StoreProductStats, StoreProductUpdate, ) +from app.modules.catalog.services.store_product_service import store_product_service +from app.modules.enums import FrontendType +from models.schema.auth import UserContext admin_router = APIRouter( prefix="/store-products", diff --git a/app/modules/catalog/routes/api/store.py b/app/modules/catalog/routes/api/store.py index 1e5a2764..4b5c6398 100644 --- a/app/modules/catalog/routes/api/store.py +++ b/app/modules/catalog/routes/api/store.py @@ -15,11 +15,7 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_store_api, require_module_access from app.core.database import get_db -from app.modules.catalog.services.product_service import product_service from app.modules.billing.services.subscription_service import subscription_service -from app.modules.catalog.services.store_product_service import store_product_service -from app.modules.enums import FrontendType -from models.schema.auth import UserContext from app.modules.catalog.schemas import ( ProductCreate, ProductDeleteResponse, @@ -31,6 +27,10 @@ from app.modules.catalog.schemas import ( StoreDirectProductCreate, StoreProductCreateResponse, ) +from app.modules.catalog.services.product_service import product_service +from app.modules.catalog.services.store_product_service import store_product_service +from app.modules.enums import FrontendType +from models.schema.auth import UserContext store_router = APIRouter( prefix="/products", diff --git a/app/modules/catalog/routes/api/storefront.py b/app/modules/catalog/routes/api/storefront.py index 2f142b30..44fca02f 100644 --- a/app/modules/catalog/routes/api/storefront.py +++ b/app/modules/catalog/routes/api/storefront.py @@ -15,14 +15,14 @@ from fastapi import APIRouter, Depends, Path, Query, Request from sqlalchemy.orm import Session from app.core.database import get_db -from app.modules.catalog.services import catalog_service from app.modules.catalog.schemas import ( ProductDetailResponse, ProductListResponse, ProductResponse, ) -from middleware.store_context import require_store_context +from app.modules.catalog.services import catalog_service from app.modules.tenancy.models import Store +from middleware.store_context import require_store_context router = APIRouter() logger = logging.getLogger(__name__) diff --git a/app/modules/catalog/routes/pages/admin.py b/app/modules/catalog/routes/pages/admin.py index 55edbd7b..c8ddc1ca 100644 --- a/app/modules/catalog/routes/pages/admin.py +++ b/app/modules/catalog/routes/pages/admin.py @@ -14,9 +14,9 @@ from sqlalchemy.orm import Session from app.api.deps import get_db, require_menu_access from app.modules.core.utils.page_context import get_admin_context -from app.templates_config import templates from app.modules.enums import FrontendType from app.modules.tenancy.models import User +from app.templates_config import templates router = APIRouter() diff --git a/app/modules/catalog/routes/pages/store.py b/app/modules/catalog/routes/pages/store.py index c04a07f5..082c23f7 100644 --- a/app/modules/catalog/routes/pages/store.py +++ b/app/modules/catalog/routes/pages/store.py @@ -13,8 +13,8 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_store_from_cookie_or_header, get_db from app.modules.core.utils.page_context import get_store_context -from app.templates_config import templates from app.modules.tenancy.models import User +from app.templates_config import templates router = APIRouter() diff --git a/app/modules/catalog/schemas/__init__.py b/app/modules/catalog/schemas/__init__.py index ab644bf5..c2506d4f 100644 --- a/app/modules/catalog/schemas/__init__.py +++ b/app/modules/catalog/schemas/__init__.py @@ -3,34 +3,38 @@ from app.modules.catalog.schemas.catalog import ( ProductDetailResponse as CatalogProductDetailResponse, +) +from app.modules.catalog.schemas.catalog import ( ProductListResponse as CatalogProductListResponse, +) +from app.modules.catalog.schemas.catalog import ( ProductResponse as CatalogProductResponse, ) from app.modules.catalog.schemas.product import ( ProductCreate, - ProductUpdate, - ProductResponse, + ProductDeleteResponse, ProductDetailResponse, ProductListResponse, - ProductDeleteResponse, + ProductResponse, ProductToggleResponse, + ProductUpdate, ) from app.modules.catalog.schemas.store_product import ( + # Catalog store schemas + CatalogStore, + CatalogStoresResponse, + RemoveProductResponse, + StoreDirectProductCreate, + StoreProductCreate, + StoreProductCreateResponse, + StoreProductDetail, # List/Detail schemas StoreProductListItem, StoreProductListResponse, StoreProductStats, - StoreProductDetail, - # Catalog store schemas - CatalogStore, - CatalogStoresResponse, + StoreProductUpdate, # CRUD schemas TranslationUpdate, - StoreProductCreate, - StoreDirectProductCreate, - StoreProductUpdate, - StoreProductCreateResponse, - RemoveProductResponse, ) __all__ = [ diff --git a/app/modules/catalog/schemas/catalog.py b/app/modules/catalog/schemas/catalog.py index 70161c04..94c2b8c5 100644 --- a/app/modules/catalog/schemas/catalog.py +++ b/app/modules/catalog/schemas/catalog.py @@ -8,7 +8,7 @@ For store product management, see the products module. from datetime import datetime -from pydantic import BaseModel, ConfigDict, Field +from pydantic import BaseModel, ConfigDict from app.modules.inventory.schemas import InventoryLocationResponse from app.modules.marketplace.schemas import MarketplaceProductResponse diff --git a/app/modules/catalog/services/catalog_features.py b/app/modules/catalog/services/catalog_features.py index d366f2fa..72dec672 100644 --- a/app/modules/catalog/services/catalog_features.py +++ b/app/modules/catalog/services/catalog_features.py @@ -14,7 +14,6 @@ from sqlalchemy import func from app.modules.contracts.features import ( FeatureDeclaration, - FeatureProviderProtocol, FeatureScope, FeatureType, FeatureUsage, diff --git a/app/modules/catalog/services/catalog_metrics.py b/app/modules/catalog/services/catalog_metrics.py index ab947d5b..dd23e6a4 100644 --- a/app/modules/catalog/services/catalog_metrics.py +++ b/app/modules/catalog/services/catalog_metrics.py @@ -16,9 +16,8 @@ from sqlalchemy import func from sqlalchemy.orm import Session from app.modules.contracts.metrics import ( - MetricValue, MetricsContext, - MetricsProviderProtocol, + MetricValue, ) if TYPE_CHECKING: @@ -95,7 +94,7 @@ class CatalogMetricsProvider: new_products = new_products_query.count() # Products with translations - products_with_translations = ( + ( db.query(func.count(func.distinct(Product.id))) .filter(Product.store_id == store_id) .join(Product.translations) diff --git a/app/modules/catalog/services/product_service.py b/app/modules/catalog/services/product_service.py index e3f7555d..632aadd0 100644 --- a/app/modules/catalog/services/product_service.py +++ b/app/modules/catalog/services/product_service.py @@ -18,9 +18,9 @@ from app.modules.catalog.exceptions import ( ProductAlreadyExistsException, ProductNotFoundException, ) -from app.modules.marketplace.models import MarketplaceProduct from app.modules.catalog.models import Product from app.modules.catalog.schemas import ProductCreate, ProductUpdate +from app.modules.marketplace.models import MarketplaceProduct logger = logging.getLogger(__name__) diff --git a/app/modules/checkout/definition.py b/app/modules/checkout/definition.py index c7b91c62..11c19803 100644 --- a/app/modules/checkout/definition.py +++ b/app/modules/checkout/definition.py @@ -8,7 +8,6 @@ Orchestrates payment processing and order creation. from app.modules.base import ModuleDefinition, PermissionDefinition - # ============================================================================= # Router Lazy Imports # ============================================================================= diff --git a/app/modules/checkout/routes/api/storefront.py b/app/modules/checkout/routes/api/storefront.py index 6e254ad8..8138c340 100644 --- a/app/modules/checkout/routes/api/storefront.py +++ b/app/modules/checkout/routes/api/storefront.py @@ -18,7 +18,6 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_customer_api from app.core.database import get_db -from app.modules.tenancy.exceptions import StoreNotFoundException from app.modules.cart.services import cart_service from app.modules.checkout.schemas import ( CheckoutRequest, @@ -27,11 +26,14 @@ from app.modules.checkout.schemas import ( ) from app.modules.checkout.services import checkout_service from app.modules.customers.schemas import CustomerContext -from app.modules.orders.services import order_service -from app.modules.messaging.services.email_service import EmailService # noqa: MOD-004 - Core email service -from middleware.store_context import require_store_context -from app.modules.tenancy.models import Store +from app.modules.messaging.services.email_service import ( + EmailService, # noqa: MOD-004 - Core email service +) from app.modules.orders.schemas import OrderCreate, OrderResponse +from app.modules.orders.services import order_service +from app.modules.tenancy.exceptions import StoreNotFoundException +from app.modules.tenancy.models import Store +from middleware.store_context import require_store_context router = APIRouter() logger = logging.getLogger(__name__) diff --git a/app/modules/cms/definition.py b/app/modules/cms/definition.py index 98b21176..d3abe850 100644 --- a/app/modules/cms/definition.py +++ b/app/modules/cms/definition.py @@ -15,7 +15,12 @@ This is a self-contained module with: import logging from typing import Any -from app.modules.base import MenuItemDefinition, MenuSectionDefinition, ModuleDefinition, PermissionDefinition +from app.modules.base import ( + MenuItemDefinition, + MenuSectionDefinition, + ModuleDefinition, + PermissionDefinition, +) from app.modules.enums import FrontendType logger = logging.getLogger(__name__) diff --git a/app/modules/cms/migrations/versions/cms_001_initial.py b/app/modules/cms/migrations/versions/cms_001_initial.py index ee64db47..28c53c61 100644 --- a/app/modules/cms/migrations/versions/cms_001_initial.py +++ b/app/modules/cms/migrations/versions/cms_001_initial.py @@ -4,9 +4,10 @@ Revision ID: cms_001 Revises: marketplace_001 Create Date: 2026-02-07 """ -from alembic import op import sqlalchemy as sa +from alembic import op + revision = "cms_001" down_revision = "marketplace_001" branch_labels = None diff --git a/app/modules/cms/models/content_page.py b/app/modules/cms/models/content_page.py index 6200f188..f3e253de 100644 --- a/app/modules/cms/models/content_page.py +++ b/app/modules/cms/models/content_page.py @@ -191,10 +191,9 @@ class ContentPage(Base): """Get the tier level of this page for display purposes.""" if self.is_platform_page: return "platform" - elif self.store_id is None: + if self.store_id is None: return "store_default" - else: - return "store_override" + return "store_override" def to_dict(self): """Convert to dictionary for API responses.""" diff --git a/app/modules/cms/routes/__init__.py b/app/modules/cms/routes/__init__.py index a3b3c2f1..ed292a5a 100644 --- a/app/modules/cms/routes/__init__.py +++ b/app/modules/cms/routes/__init__.py @@ -22,10 +22,10 @@ def __getattr__(name: str): if name == "admin_router": from app.modules.cms.routes.admin import admin_router return admin_router - elif name == "store_router": + if name == "store_router": from app.modules.cms.routes.store import store_router return store_router - elif name == "store_media_router": + if name == "store_media_router": from app.modules.cms.routes.store import store_media_router return store_media_router raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/app/modules/cms/routes/api/admin_content_pages.py b/app/modules/cms/routes/api/admin_content_pages.py index e445ea0f..7c4a0c5f 100644 --- a/app/modules/cms/routes/api/admin_content_pages.py +++ b/app/modules/cms/routes/api/admin_content_pages.py @@ -17,8 +17,8 @@ from app.api.deps import get_current_admin_api, get_db from app.exceptions import ValidationException from app.modules.cms.schemas import ( ContentPageCreate, - ContentPageUpdate, ContentPageResponse, + ContentPageUpdate, HomepageSectionsResponse, SectionUpdateResponse, ) diff --git a/app/modules/cms/routes/api/admin_images.py b/app/modules/cms/routes/api/admin_images.py index 379c3b62..4478bf29 100644 --- a/app/modules/cms/routes/api/admin_images.py +++ b/app/modules/cms/routes/api/admin_images.py @@ -13,9 +13,9 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_admin_api from app.core.database import get_db +from app.modules.cms.schemas.image import ImageStorageStats from app.modules.cms.services.media_service import media_service from models.schema.auth import UserContext -from app.modules.cms.schemas.image import ImageStorageStats admin_images_router = APIRouter(prefix="/images") logger = logging.getLogger(__name__) diff --git a/app/modules/cms/routes/api/admin_media.py b/app/modules/cms/routes/api/admin_media.py index 3c7485dc..65051508 100644 --- a/app/modules/cms/routes/api/admin_media.py +++ b/app/modules/cms/routes/api/admin_media.py @@ -12,14 +12,14 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_admin_api from app.core.database import get_db -from app.modules.cms.services.media_service import media_service -from models.schema.auth import UserContext from app.modules.cms.schemas.media import ( MediaDetailResponse, MediaItemResponse, MediaListResponse, MediaUploadResponse, ) +from app.modules.cms.services.media_service import media_service +from models.schema.auth import UserContext admin_media_router = APIRouter(prefix="/media") logger = logging.getLogger(__name__) diff --git a/app/modules/cms/routes/api/admin_store_themes.py b/app/modules/cms/routes/api/admin_store_themes.py index 512027c5..aeb0b948 100644 --- a/app/modules/cms/routes/api/admin_store_themes.py +++ b/app/modules/cms/routes/api/admin_store_themes.py @@ -18,15 +18,15 @@ from fastapi import APIRouter, Depends, Path from sqlalchemy.orm import Session from app.api.deps import get_current_admin_api, get_db -from app.modules.cms.services.store_theme_service import store_theme_service -from models.schema.auth import UserContext from app.modules.cms.schemas.store_theme import ( + StoreThemeResponse, + StoreThemeUpdate, ThemeDeleteResponse, ThemePresetListResponse, ThemePresetResponse, - StoreThemeResponse, - StoreThemeUpdate, ) +from app.modules.cms.services.store_theme_service import store_theme_service +from models.schema.auth import UserContext admin_store_themes_router = APIRouter(prefix="/store-themes") logger = logging.getLogger(__name__) diff --git a/app/modules/cms/routes/api/store_content_pages.py b/app/modules/cms/routes/api/store_content_pages.py index f0ec17a9..802cd8b1 100644 --- a/app/modules/cms/routes/api/store_content_pages.py +++ b/app/modules/cms/routes/api/store_content_pages.py @@ -19,14 +19,16 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_store_api, get_db from app.modules.cms.exceptions import ContentPageNotFoundException from app.modules.cms.schemas import ( + CMSUsageResponse, + ContentPageResponse, StoreContentPageCreate, StoreContentPageUpdate, - ContentPageResponse, - CMSUsageResponse, ) from app.modules.cms.services import content_page_service -from app.modules.tenancy.services.store_service import StoreService # noqa: MOD-004 - shared platform service from app.modules.tenancy.models import User +from app.modules.tenancy.services.store_service import ( + StoreService, # noqa: MOD-004 - shared platform service +) store_service = StoreService() diff --git a/app/modules/cms/routes/api/store_media.py b/app/modules/cms/routes/api/store_media.py index 6cecba75..71b4eaaa 100644 --- a/app/modules/cms/routes/api/store_media.py +++ b/app/modules/cms/routes/api/store_media.py @@ -14,9 +14,8 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_store_api from app.core.database import get_db from app.modules.cms.exceptions import MediaOptimizationException -from app.modules.cms.services.media_service import media_service -from models.schema.auth import UserContext from app.modules.cms.schemas.media import ( + FailedFileInfo, MediaDetailResponse, MediaItemResponse, MediaListResponse, @@ -26,8 +25,9 @@ from app.modules.cms.schemas.media import ( MultipleUploadResponse, OptimizationResultResponse, UploadedFileInfo, - FailedFileInfo, ) +from app.modules.cms.services.media_service import media_service +from models.schema.auth import UserContext store_media_router = APIRouter(prefix="/media") logger = logging.getLogger(__name__) diff --git a/app/modules/cms/routes/api/storefront.py b/app/modules/cms/routes/api/storefront.py index 26b68398..97c82a00 100644 --- a/app/modules/cms/routes/api/storefront.py +++ b/app/modules/cms/routes/api/storefront.py @@ -13,8 +13,8 @@ from sqlalchemy.orm import Session from app.core.database import get_db from app.modules.cms.schemas import ( - PublicContentPageResponse, ContentPageListItem, + PublicContentPageResponse, ) from app.modules.cms.services import content_page_service diff --git a/app/modules/cms/routes/pages/admin.py b/app/modules/cms/routes/pages/admin.py index d03afa54..37b239a2 100644 --- a/app/modules/cms/routes/pages/admin.py +++ b/app/modules/cms/routes/pages/admin.py @@ -10,9 +10,9 @@ from fastapi.responses import HTMLResponse, RedirectResponse from sqlalchemy.orm import Session from app.api.deps import get_db, require_menu_access -from app.templates_config import templates from app.modules.enums import FrontendType from app.modules.tenancy.models import User +from app.templates_config import templates router = APIRouter() diff --git a/app/modules/cms/routes/pages/store.py b/app/modules/cms/routes/pages/store.py index d161cbbf..d27a3b50 100644 --- a/app/modules/cms/routes/pages/store.py +++ b/app/modules/cms/routes/pages/store.py @@ -13,10 +13,11 @@ from sqlalchemy.orm import Session from app.api.deps import get_current_store_from_cookie_or_header, get_db from app.modules.cms.services import content_page_service -from app.modules.core.services.platform_settings_service import platform_settings_service # noqa: MOD-004 - shared platform service +from app.modules.core.services.platform_settings_service import ( + platform_settings_service, # noqa: MOD-004 - shared platform service +) +from app.modules.tenancy.models import Store, User from app.templates_config import templates -from app.modules.tenancy.models import User -from app.modules.tenancy.models import Store logger = logging.getLogger(__name__) diff --git a/app/modules/cms/schemas/__init__.py b/app/modules/cms/schemas/__init__.py index e192c164..de04a9ec 100644 --- a/app/modules/cms/schemas/__init__.py +++ b/app/modules/cms/schemas/__init__.py @@ -4,35 +4,42 @@ CMS module Pydantic schemas for API request/response validation. """ from app.modules.cms.schemas.content_page import ( + CMSUsageResponse, # Admin schemas ContentPageCreate, - ContentPageUpdate, + ContentPageListItem, ContentPageResponse, - HomepageSectionsResponse as ContentPageHomepageSectionsResponse, + ContentPageUpdate, + # Public/Shop schemas + PublicContentPageResponse, SectionUpdateResponse, # Store schemas StoreContentPageCreate, StoreContentPageUpdate, - CMSUsageResponse, - # Public/Shop schemas - PublicContentPageResponse, - ContentPageListItem, +) +from app.modules.cms.schemas.content_page import ( + HomepageSectionsResponse as ContentPageHomepageSectionsResponse, ) from app.modules.cms.schemas.homepage_sections import ( - # Translatable text - TranslatableText, + CTASection, + FeatureCard, + FeaturesSection, # Section components HeroButton, HeroSection, - FeatureCard, - FeaturesSection, - PricingSection, - CTASection, # Main structure HomepageSections, + HomepageSectionsResponse, + PricingSection, # API schemas SectionUpdateRequest, - HomepageSectionsResponse, + # Translatable text + TranslatableText, +) + +# Image schemas +from app.modules.cms.schemas.image import ( + ImageStorageStats, ) # Media schemas @@ -51,23 +58,18 @@ from app.modules.cms.schemas.media import ( UploadedFileInfo, ) -# Image schemas -from app.modules.cms.schemas.image import ( - ImageStorageStats, -) - # Theme schemas from app.modules.cms.schemas.store_theme import ( - ThemeDeleteResponse, - ThemePresetListResponse, - ThemePresetPreview, - ThemePresetResponse, StoreThemeBranding, StoreThemeColors, StoreThemeFonts, StoreThemeLayout, StoreThemeResponse, StoreThemeUpdate, + ThemeDeleteResponse, + ThemePresetListResponse, + ThemePresetPreview, + ThemePresetResponse, ) __all__ = [ diff --git a/app/modules/cms/schemas/content_page.py b/app/modules/cms/schemas/content_page.py index ccf166c0..8fd7205b 100644 --- a/app/modules/cms/schemas/content_page.py +++ b/app/modules/cms/schemas/content_page.py @@ -10,7 +10,6 @@ Schemas are organized by context: from pydantic import BaseModel, Field - # ============================================================================ # ADMIN SCHEMAS # ============================================================================ diff --git a/app/modules/cms/schemas/homepage_sections.py b/app/modules/cms/schemas/homepage_sections.py index 283c3be7..68dde068 100644 --- a/app/modules/cms/schemas/homepage_sections.py +++ b/app/modules/cms/schemas/homepage_sections.py @@ -18,8 +18,8 @@ Example JSON structure: } """ + from pydantic import BaseModel, Field -from typing import Optional class TranslatableText(BaseModel): @@ -59,13 +59,13 @@ class HeroSection(BaseModel): """Hero section configuration.""" enabled: bool = True - badge_text: Optional[TranslatableText] = None + badge_text: TranslatableText | None = None title: TranslatableText = Field(default_factory=TranslatableText) subtitle: TranslatableText = Field(default_factory=TranslatableText) background_type: str = Field( default="gradient", description="gradient, image, solid" ) - background_image: Optional[str] = None + background_image: str | None = None buttons: list[HeroButton] = Field(default_factory=list) @@ -82,7 +82,7 @@ class FeaturesSection(BaseModel): enabled: bool = True title: TranslatableText = Field(default_factory=TranslatableText) - subtitle: Optional[TranslatableText] = None + subtitle: TranslatableText | None = None features: list[FeatureCard] = Field(default_factory=list) layout: str = Field(default="grid", description="grid, list, cards") @@ -92,7 +92,7 @@ class PricingSection(BaseModel): enabled: bool = True title: TranslatableText = Field(default_factory=TranslatableText) - subtitle: Optional[TranslatableText] = None + subtitle: TranslatableText | None = None use_subscription_tiers: bool = Field( default=True, description="Pull pricing from subscription_tiers table dynamically" ) @@ -103,7 +103,7 @@ class CTASection(BaseModel): enabled: bool = True title: TranslatableText = Field(default_factory=TranslatableText) - subtitle: Optional[TranslatableText] = None + subtitle: TranslatableText | None = None buttons: list[HeroButton] = Field(default_factory=list) background_type: str = Field( default="gradient", description="gradient, image, solid" @@ -113,10 +113,10 @@ class CTASection(BaseModel): class HomepageSections(BaseModel): """Complete homepage sections structure.""" - hero: Optional[HeroSection] = None - features: Optional[FeaturesSection] = None - pricing: Optional[PricingSection] = None - cta: Optional[CTASection] = None + hero: HeroSection | None = None + features: FeaturesSection | None = None + pricing: PricingSection | None = None + cta: CTASection | None = None @classmethod def get_empty_structure(cls, languages: list[str]) -> "HomepageSections": @@ -169,6 +169,6 @@ class SectionUpdateRequest(BaseModel): class HomepageSectionsResponse(BaseModel): """Response containing all homepage sections with platform language info.""" - sections: Optional[HomepageSections] = None + sections: HomepageSections | None = None supported_languages: list[str] = Field(default_factory=lambda: ["fr", "de", "en"]) default_language: str = "fr" diff --git a/app/modules/cms/services/__init__.py b/app/modules/cms/services/__init__.py index c809d007..8acad576 100644 --- a/app/modules/cms/services/__init__.py +++ b/app/modules/cms/services/__init__.py @@ -13,15 +13,15 @@ from app.modules.cms.services.media_service import ( MediaService, media_service, ) +from app.modules.cms.services.store_email_settings_service import ( + StoreEmailSettingsService, + get_store_email_settings_service, # Deprecated: use store_email_settings_service + store_email_settings_service, +) from app.modules.cms.services.store_theme_service import ( StoreThemeService, store_theme_service, ) -from app.modules.cms.services.store_email_settings_service import ( - StoreEmailSettingsService, - store_email_settings_service, - get_store_email_settings_service, # Deprecated: use store_email_settings_service -) __all__ = [ "ContentPageService", diff --git a/app/modules/cms/services/cms_features.py b/app/modules/cms/services/cms_features.py index d0d102a1..5cb1542f 100644 --- a/app/modules/cms/services/cms_features.py +++ b/app/modules/cms/services/cms_features.py @@ -16,7 +16,6 @@ from sqlalchemy import func from app.modules.contracts.features import ( FeatureDeclaration, - FeatureProviderProtocol, FeatureScope, FeatureType, FeatureUsage, diff --git a/app/modules/cms/services/cms_metrics.py b/app/modules/cms/services/cms_metrics.py index e751d8bc..5222c608 100644 --- a/app/modules/cms/services/cms_metrics.py +++ b/app/modules/cms/services/cms_metrics.py @@ -15,9 +15,8 @@ from sqlalchemy import func from sqlalchemy.orm import Session from app.modules.contracts.metrics import ( - MetricValue, MetricsContext, - MetricsProviderProtocol, + MetricValue, ) if TYPE_CHECKING: diff --git a/app/modules/cms/services/content_page_service.py b/app/modules/cms/services/content_page_service.py index 73553ae5..690cefe2 100644 --- a/app/modules/cms/services/content_page_service.py +++ b/app/modules/cms/services/content_page_service.py @@ -96,7 +96,7 @@ class ContentPageService: db.query(ContentPage) .filter( and_( - ContentPage.store_id == None, + ContentPage.store_id is None, ContentPage.is_platform_page == False, *base_filters, ) @@ -136,7 +136,7 @@ class ContentPageService: filters = [ ContentPage.platform_id == platform_id, ContentPage.slug == slug, - ContentPage.store_id == None, + ContentPage.store_id is None, ContentPage.is_platform_page == True, ] @@ -209,7 +209,7 @@ class ContentPageService: db.query(ContentPage) .filter( and_( - ContentPage.store_id == None, + ContentPage.store_id is None, ContentPage.is_platform_page == False, *base_filters, ) @@ -252,7 +252,7 @@ class ContentPageService: """ filters = [ ContentPage.platform_id == platform_id, - ContentPage.store_id == None, + ContentPage.store_id is None, ContentPage.is_platform_page == True, ] @@ -291,7 +291,7 @@ class ContentPageService: """ filters = [ ContentPage.platform_id == platform_id, - ContentPage.store_id == None, + ContentPage.store_id is None, ContentPage.is_platform_page == False, ] @@ -760,12 +760,12 @@ class ContentPageService: if page_tier == "platform": filters.append(ContentPage.is_platform_page == True) - filters.append(ContentPage.store_id == None) + filters.append(ContentPage.store_id is None) elif page_tier == "store_default": filters.append(ContentPage.is_platform_page == False) - filters.append(ContentPage.store_id == None) + filters.append(ContentPage.store_id is None) elif page_tier == "store_override": - filters.append(ContentPage.store_id != None) + filters.append(ContentPage.store_id is not None) return ( db.query(ContentPage) @@ -942,10 +942,10 @@ class ContentPageService: ValueError: If section name is invalid """ from app.modules.cms.schemas import ( - HeroSection, - FeaturesSection, - PricingSection, CTASection, + FeaturesSection, + HeroSection, + PricingSection, ) SECTION_SCHEMAS = { diff --git a/app/modules/cms/services/media_service.py b/app/modules/cms/services/media_service.py index d96c52f0..d8e1bbef 100644 --- a/app/modules/cms/services/media_service.py +++ b/app/modules/cms/services/media_service.py @@ -11,7 +11,6 @@ This module provides: import logging import mimetypes -import os import shutil import uuid from datetime import UTC, datetime @@ -22,11 +21,10 @@ from sqlalchemy import func, or_ from sqlalchemy.orm import Session from app.modules.cms.exceptions import ( + MediaFileTooLargeException, MediaNotFoundException, - MediaUploadException, MediaValidationException, UnsupportedMediaTypeException, - MediaFileTooLargeException, ) from app.modules.cms.models import MediaFile diff --git a/app/modules/cms/services/store_email_settings_service.py b/app/modules/cms/services/store_email_settings_service.py index 10bc1b00..e5582800 100644 --- a/app/modules/cms/services/store_email_settings_service.py +++ b/app/modules/cms/services/store_email_settings_service.py @@ -20,17 +20,16 @@ from sqlalchemy.orm import Session from app.exceptions import ( AuthorizationException, + ExternalServiceException, ResourceNotFoundException, ValidationException, - ExternalServiceException, -) -from app.modules.tenancy.models import Store -from app.modules.messaging.models import ( - StoreEmailSettings, - EmailProvider, - PREMIUM_EMAIL_PROVIDERS, ) from app.modules.billing.models import TierCode +from app.modules.messaging.models import ( + PREMIUM_EMAIL_PROVIDERS, + EmailProvider, + StoreEmailSettings, +) logger = logging.getLogger(__name__) @@ -343,7 +342,7 @@ class StoreEmailSettingsService: from_email=(settings.from_email, settings.from_name), to_emails=to_email, subject="Wizamart Email Configuration Test", - html_content=f""" + html_content="""
If you received this email, your email settings are working correctly!
- Provider: {provider}
- From: {from_email}
+ Provider: {app_settings.email_provider}
+ From: {app_settings.email_from_address}