feat(subscriptions): migrate subscription management to merchant level and seed tiers

Move subscription create/edit from store detail (broken endpoint) to merchant
detail page with proper modal UI. Seed 4 subscription tiers (Essential,
Professional, Business, Enterprise) in init_production.py. Also includes
cross-module dependency declarations, store domain platform_id migration,
platform context middleware, CMS route fixes, and migration backups.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-09 21:04:04 +01:00
parent 7feacd5af8
commit 68493dc6cb
97 changed files with 13286 additions and 77 deletions

View File

@@ -101,27 +101,35 @@ migrate-squash:
init-prod:
@echo "🔧 Initializing production database..."
@echo ""
@echo "Step 0/5: Ensuring database exists (running migrations)..."
@$(PYTHON) -m alembic upgrade head
@echo "Step 0/6: Ensuring database exists (running migrations)..."
@$(PYTHON) -m alembic upgrade heads
@echo ""
@echo "Step 1/5: Creating admin user and platform settings..."
@echo "Step 1/6: Creating admin user and platform settings..."
$(PYTHON) scripts/init_production.py
@echo ""
@echo "Step 2/5: Initializing log settings..."
@echo "Step 2/6: Initializing log settings..."
$(PYTHON) scripts/init_log_settings.py
@echo ""
@echo "Step 3/5: Creating default CMS content pages..."
@echo "Step 3/6: Creating default CMS content pages..."
$(PYTHON) scripts/create_default_content_pages.py
@echo ""
@echo "Step 4/5: Creating platform pages and landing..."
@echo "Step 4/6: Creating platform pages and landing..."
$(PYTHON) scripts/create_platform_pages.py
@echo ""
@echo "Step 5/5: Seeding email templates..."
@echo "Step 5/6: Seeding email templates..."
$(PYTHON) scripts/seed_email_templates.py
@echo ""
@echo "Step 6/6: Seeding subscription tiers..."
@echo " (Handled by init_production.py Step 6)"
@echo ""
@echo "✅ Production initialization completed"
@echo "✨ Platform is ready for production OR development"
seed-tiers:
@echo "🏷️ Seeding subscription tiers..."
$(PYTHON) -c "from scripts.init_production import *; from app.core.database import SessionLocal; from sqlalchemy import select; db = SessionLocal(); oms = db.execute(select(Platform).where(Platform.code == 'oms')).scalar_one_or_none(); create_subscription_tiers(db, oms) if oms else print('OMS platform not found'); db.commit(); db.close()"
@echo "✅ Subscription tiers seeded"
# First-time installation - Complete setup with configuration validation
platform-install:
@echo "🚀 WIZAMART PLATFORM INSTALLATION"
@@ -497,6 +505,9 @@ urls-dev:
urls-prod:
@$(PYTHON) scripts/show_urls.py --prod
urls-check:
@$(PYTHON) scripts/show_urls.py --check
check-env:
@echo "Checking Python environment..."
@echo "Detected OS: $(DETECTED_OS)"
@@ -541,7 +552,8 @@ help:
@echo " migrate-down - Rollback last migration"
@echo " migrate-status - Show migration status"
@echo " platform-install - First-time setup (validates config + migrate + init)"
@echo " init-prod - Initialize platform (admin, CMS, pages, emails)"
@echo " init-prod - Initialize platform (admin, CMS, pages, emails, tiers)"
@echo " seed-tiers - Seed subscription tiers only"
@echo " seed-demo - Seed demo data (3 merchants + stores)"
@echo " seed-demo-minimal - Seed minimal demo (1 merchant + store)"
@echo " seed-demo-reset - DELETE ALL demo data and reseed"
@@ -600,6 +612,7 @@ help:
@echo " urls - Show all platform/store/storefront URLs"
@echo " urls-dev - Show development URLs only"
@echo " urls-prod - Show production URLs only"
@echo " urls-check - Check dev URLs with curl (server must be running)"
@echo " clean - Clean build artifacts"
@echo " check-env - Check Python environment and OS"
@echo ""
@@ -631,12 +644,13 @@ help-db:
@echo ""
@echo "PLATFORM INITIALIZATION (Production + Development):"
@echo "──────────────────────────────────────────────────────────"
@echo " init-prod - Complete platform setup (5 steps):"
@echo " init-prod - Complete platform setup (6 steps):"
@echo " 1. Create admin user + settings"
@echo " 2. Initialize log settings"
@echo " 3. Create CMS defaults"
@echo " 4. Create platform pages"
@echo " 5. Seed email templates"
@echo " 6. Seed subscription tiers"
@echo ""
@echo "DEMO DATA (Development Only - NEVER in production):"
@echo "──────────────────────────────────────────────────────────"

View File

@@ -0,0 +1,48 @@
"""add platform_id to store_domains
Revision ID: z_store_domain_platform_id
Revises: core_001
Create Date: 2026-02-08
"""
from alembic import op
import sqlalchemy as sa
revision = "z_store_domain_platform_id"
down_revision = "core_001"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add platform_id column
op.add_column(
"store_domains",
sa.Column(
"platform_id",
sa.Integer(),
sa.ForeignKey("platforms.id", ondelete="SET NULL"),
nullable=True,
comment="Platform this domain is associated with (for platform context resolution)",
),
)
op.create_index("idx_store_domain_platform", "store_domains", ["platform_id"])
# Backfill: set platform_id from the store's primary store_platform
op.execute(
"""
UPDATE store_domains sd
SET platform_id = (
SELECT sp.platform_id
FROM store_platforms sp
WHERE sp.store_id = sd.store_id
AND sp.is_primary = true
LIMIT 1
)
WHERE sd.platform_id IS NULL
"""
)
def downgrade() -> None:
op.drop_index("idx_store_domain_platform", table_name="store_domains")
op.drop_column("store_domains", "platform_id")

View File

@@ -0,0 +1,55 @@
"""add celery_task_id to job tables
Revision ID: 09d84a46530f
Revises: y3d4e5f6g7h8
Create Date: 2026-01-11 16:44:59.070110
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '09d84a46530f'
down_revision: Union[str, None] = 'y3d4e5f6g7h8'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add celery_task_id column to job tracking tables for Celery integration."""
# MarketplaceImportJob
op.add_column('marketplace_import_jobs', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_marketplace_import_jobs_celery_task_id'), 'marketplace_import_jobs', ['celery_task_id'], unique=False)
# LetzshopHistoricalImportJob
op.add_column('letzshop_historical_import_jobs', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_letzshop_historical_import_jobs_celery_task_id'), 'letzshop_historical_import_jobs', ['celery_task_id'], unique=False)
# ArchitectureScan
op.add_column('architecture_scans', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_architecture_scans_celery_task_id'), 'architecture_scans', ['celery_task_id'], unique=False)
# TestRun
op.add_column('test_runs', sa.Column('celery_task_id', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_test_runs_celery_task_id'), 'test_runs', ['celery_task_id'], unique=False)
def downgrade() -> None:
"""Remove celery_task_id column from job tracking tables."""
# TestRun
op.drop_index(op.f('ix_test_runs_celery_task_id'), table_name='test_runs')
op.drop_column('test_runs', 'celery_task_id')
# ArchitectureScan
op.drop_index(op.f('ix_architecture_scans_celery_task_id'), table_name='architecture_scans')
op.drop_column('architecture_scans', 'celery_task_id')
# LetzshopHistoricalImportJob
op.drop_index(op.f('ix_letzshop_historical_import_jobs_celery_task_id'), table_name='letzshop_historical_import_jobs')
op.drop_column('letzshop_historical_import_jobs', 'celery_task_id')
# MarketplaceImportJob
op.drop_index(op.f('ix_marketplace_import_jobs_celery_task_id'), table_name='marketplace_import_jobs')
op.drop_column('marketplace_import_jobs', 'celery_task_id')

View File

@@ -0,0 +1,68 @@
"""add application_logs table for hybrid logging
Revision ID: 0bd9ffaaced1
Revises: 7a7ce92593d5
Create Date: 2025-11-29 12:44:55.427245
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '0bd9ffaaced1'
down_revision: Union[str, None] = '7a7ce92593d5'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create application_logs table
op.create_table(
'application_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('level', sa.String(length=20), nullable=False),
sa.Column('logger_name', sa.String(length=200), nullable=False),
sa.Column('module', sa.String(length=200), nullable=True),
sa.Column('function_name', sa.String(length=100), nullable=True),
sa.Column('line_number', sa.Integer(), nullable=True),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('exception_type', sa.String(length=200), nullable=True),
sa.Column('exception_message', sa.Text(), nullable=True),
sa.Column('stack_trace', sa.Text(), nullable=True),
sa.Column('request_id', sa.String(length=100), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('vendor_id', sa.Integer(), nullable=True),
sa.Column('context', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for better query performance
op.create_index(op.f('ix_application_logs_id'), 'application_logs', ['id'], unique=False)
op.create_index(op.f('ix_application_logs_timestamp'), 'application_logs', ['timestamp'], unique=False)
op.create_index(op.f('ix_application_logs_level'), 'application_logs', ['level'], unique=False)
op.create_index(op.f('ix_application_logs_logger_name'), 'application_logs', ['logger_name'], unique=False)
op.create_index(op.f('ix_application_logs_request_id'), 'application_logs', ['request_id'], unique=False)
op.create_index(op.f('ix_application_logs_user_id'), 'application_logs', ['user_id'], unique=False)
op.create_index(op.f('ix_application_logs_vendor_id'), 'application_logs', ['vendor_id'], unique=False)
def downgrade() -> None:
# Drop indexes
op.drop_index(op.f('ix_application_logs_vendor_id'), table_name='application_logs')
op.drop_index(op.f('ix_application_logs_user_id'), table_name='application_logs')
op.drop_index(op.f('ix_application_logs_request_id'), table_name='application_logs')
op.drop_index(op.f('ix_application_logs_logger_name'), table_name='application_logs')
op.drop_index(op.f('ix_application_logs_level'), table_name='application_logs')
op.drop_index(op.f('ix_application_logs_timestamp'), table_name='application_logs')
op.drop_index(op.f('ix_application_logs_id'), table_name='application_logs')
# Drop table
op.drop_table('application_logs')

View File

@@ -0,0 +1,367 @@
"""add letzshop_vendor_cache table
Revision ID: 1b398cf45e85
Revises: 09d84a46530f
Create Date: 2026-01-13 19:38:45.423378
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision: str = '1b398cf45e85'
down_revision: Union[str, None] = '09d84a46530f'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('letzshop_vendor_cache',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('letzshop_id', sa.String(length=50), nullable=False),
sa.Column('slug', sa.String(length=200), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('company_name', sa.String(length=255), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('description_en', sa.Text(), nullable=True),
sa.Column('description_fr', sa.Text(), nullable=True),
sa.Column('description_de', sa.Text(), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('phone', sa.String(length=50), nullable=True),
sa.Column('fax', sa.String(length=50), nullable=True),
sa.Column('website', sa.String(length=500), nullable=True),
sa.Column('street', sa.String(length=255), nullable=True),
sa.Column('street_number', sa.String(length=50), nullable=True),
sa.Column('city', sa.String(length=100), nullable=True),
sa.Column('zipcode', sa.String(length=20), nullable=True),
sa.Column('country_iso', sa.String(length=5), nullable=True),
sa.Column('latitude', sa.String(length=20), nullable=True),
sa.Column('longitude', sa.String(length=20), nullable=True),
sa.Column('categories', sqlite.JSON(), nullable=True),
sa.Column('background_image_url', sa.String(length=500), nullable=True),
sa.Column('social_media_links', sqlite.JSON(), nullable=True),
sa.Column('opening_hours_en', sa.Text(), nullable=True),
sa.Column('opening_hours_fr', sa.Text(), nullable=True),
sa.Column('opening_hours_de', sa.Text(), nullable=True),
sa.Column('representative_name', sa.String(length=255), nullable=True),
sa.Column('representative_title', sa.String(length=100), nullable=True),
sa.Column('claimed_by_vendor_id', sa.Integer(), nullable=True),
sa.Column('claimed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('raw_data', sqlite.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['claimed_by_vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_vendor_cache_active', 'letzshop_vendor_cache', ['is_active'], unique=False)
op.create_index('idx_vendor_cache_city', 'letzshop_vendor_cache', ['city'], unique=False)
op.create_index('idx_vendor_cache_claimed', 'letzshop_vendor_cache', ['claimed_by_vendor_id'], unique=False)
op.create_index(op.f('ix_letzshop_vendor_cache_claimed_by_vendor_id'), 'letzshop_vendor_cache', ['claimed_by_vendor_id'], unique=False)
op.create_index(op.f('ix_letzshop_vendor_cache_id'), 'letzshop_vendor_cache', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_vendor_cache_letzshop_id'), 'letzshop_vendor_cache', ['letzshop_id'], unique=True)
op.create_index(op.f('ix_letzshop_vendor_cache_slug'), 'letzshop_vendor_cache', ['slug'], unique=True)
op.drop_constraint('architecture_rules_rule_id_key', 'architecture_rules', type_='unique')
op.alter_column('capacity_snapshots', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('capacity_snapshots', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.create_index(op.f('ix_features_id'), 'features', ['id'], unique=False)
op.create_index(op.f('ix_features_minimum_tier_id'), 'features', ['minimum_tier_id'], unique=False)
op.create_index('idx_inv_tx_order', 'inventory_transactions', ['order_id'], unique=False)
op.alter_column('invoices', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('invoices', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('letzshop_fulfillment_queue', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('letzshop_fulfillment_queue', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('letzshop_sync_logs', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('letzshop_sync_logs', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('media_files', 'created_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('media_files', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('order_item_exceptions', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('order_item_exceptions', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('order_items', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('order_items', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('orders', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('orders', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_index('ix_password_reset_tokens_customer_id', table_name='password_reset_tokens')
op.create_index(op.f('ix_password_reset_tokens_id'), 'password_reset_tokens', ['id'], unique=False)
op.alter_column('product_media', 'created_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('product_media', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('products', 'is_digital',
existing_type=sa.BOOLEAN(),
nullable=True,
existing_server_default=sa.text('false'))
op.alter_column('products', 'product_type',
existing_type=sa.VARCHAR(length=20),
nullable=True,
existing_server_default=sa.text("'physical'::character varying"))
op.drop_index('idx_product_is_digital', table_name='products')
op.create_index(op.f('ix_products_is_digital'), 'products', ['is_digital'], unique=False)
op.drop_constraint('uq_vendor_email_settings_vendor_id', 'vendor_email_settings', type_='unique')
op.drop_index('ix_vendor_email_templates_lookup', table_name='vendor_email_templates')
op.create_index(op.f('ix_vendor_email_templates_id'), 'vendor_email_templates', ['id'], unique=False)
op.alter_column('vendor_invoice_settings', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('vendor_invoice_settings', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_constraint('vendor_invoice_settings_vendor_id_key', 'vendor_invoice_settings', type_='unique')
op.alter_column('vendor_letzshop_credentials', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('vendor_letzshop_credentials', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_constraint('vendor_letzshop_credentials_vendor_id_key', 'vendor_letzshop_credentials', type_='unique')
op.alter_column('vendor_subscriptions', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('vendor_subscriptions', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_constraint('vendor_subscriptions_vendor_id_key', 'vendor_subscriptions', type_='unique')
op.drop_constraint('fk_vendor_subscriptions_tier_id', 'vendor_subscriptions', type_='foreignkey')
op.create_foreign_key(None, 'vendor_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
op.alter_column('vendors', 'storefront_locale',
existing_type=sa.VARCHAR(length=10),
comment=None,
existing_comment='Currency/number formatting locale (NULL = inherit from platform)',
existing_nullable=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('vendors', 'storefront_locale',
existing_type=sa.VARCHAR(length=10),
comment='Currency/number formatting locale (NULL = inherit from platform)',
existing_nullable=True)
op.drop_constraint(None, 'vendor_subscriptions', type_='foreignkey')
op.create_foreign_key('fk_vendor_subscriptions_tier_id', 'vendor_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], ondelete='SET NULL')
op.create_unique_constraint('vendor_subscriptions_vendor_id_key', 'vendor_subscriptions', ['vendor_id'])
op.alter_column('vendor_subscriptions', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('vendor_subscriptions', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.create_unique_constraint('vendor_letzshop_credentials_vendor_id_key', 'vendor_letzshop_credentials', ['vendor_id'])
op.alter_column('vendor_letzshop_credentials', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('vendor_letzshop_credentials', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.create_unique_constraint('vendor_invoice_settings_vendor_id_key', 'vendor_invoice_settings', ['vendor_id'])
op.alter_column('vendor_invoice_settings', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('vendor_invoice_settings', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_index(op.f('ix_vendor_email_templates_id'), table_name='vendor_email_templates')
op.create_index('ix_vendor_email_templates_lookup', 'vendor_email_templates', ['vendor_id', 'template_code', 'language'], unique=False)
op.create_unique_constraint('uq_vendor_email_settings_vendor_id', 'vendor_email_settings', ['vendor_id'])
op.drop_index(op.f('ix_products_is_digital'), table_name='products')
op.create_index('idx_product_is_digital', 'products', ['is_digital'], unique=False)
op.alter_column('products', 'product_type',
existing_type=sa.VARCHAR(length=20),
nullable=False,
existing_server_default=sa.text("'physical'::character varying"))
op.alter_column('products', 'is_digital',
existing_type=sa.BOOLEAN(),
nullable=False,
existing_server_default=sa.text('false'))
op.alter_column('product_media', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column('product_media', 'created_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('now()'))
op.drop_index(op.f('ix_password_reset_tokens_id'), table_name='password_reset_tokens')
op.create_index('ix_password_reset_tokens_customer_id', 'password_reset_tokens', ['customer_id'], unique=False)
op.alter_column('orders', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('orders', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('order_items', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('order_items', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('order_item_exceptions', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('order_item_exceptions', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('media_files', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column('media_files', 'created_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('now()'))
op.alter_column('letzshop_sync_logs', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('letzshop_sync_logs', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('letzshop_fulfillment_queue', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('letzshop_fulfillment_queue', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('invoices', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.alter_column('invoices', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_index('idx_inv_tx_order', table_name='inventory_transactions')
op.drop_index(op.f('ix_features_minimum_tier_id'), table_name='features')
op.drop_index(op.f('ix_features_id'), table_name='features')
op.alter_column('capacity_snapshots', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('capacity_snapshots', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.create_unique_constraint('architecture_rules_rule_id_key', 'architecture_rules', ['rule_id'])
op.drop_index(op.f('ix_letzshop_vendor_cache_slug'), table_name='letzshop_vendor_cache')
op.drop_index(op.f('ix_letzshop_vendor_cache_letzshop_id'), table_name='letzshop_vendor_cache')
op.drop_index(op.f('ix_letzshop_vendor_cache_id'), table_name='letzshop_vendor_cache')
op.drop_index(op.f('ix_letzshop_vendor_cache_claimed_by_vendor_id'), table_name='letzshop_vendor_cache')
op.drop_index('idx_vendor_cache_claimed', table_name='letzshop_vendor_cache')
op.drop_index('idx_vendor_cache_city', table_name='letzshop_vendor_cache')
op.drop_index('idx_vendor_cache_active', table_name='letzshop_vendor_cache')
op.drop_table('letzshop_vendor_cache')
# ### end Alembic commands ###

View File

@@ -0,0 +1,57 @@
"""add_letzshop_historical_import_jobs_table
Revision ID: 204273a59d73
Revises: cb88bc9b5f86
Create Date: 2025-12-19 05:40:53.463341
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL)
# revision identifiers, used by Alembic.
revision: str = '204273a59d73'
down_revision: Union[str, None] = 'cb88bc9b5f86'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table('letzshop_historical_import_jobs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('current_phase', sa.String(length=20), nullable=True),
sa.Column('current_page', sa.Integer(), nullable=True),
sa.Column('total_pages', sa.Integer(), nullable=True),
sa.Column('shipments_fetched', sa.Integer(), nullable=True),
sa.Column('orders_processed', sa.Integer(), nullable=True),
sa.Column('orders_imported', sa.Integer(), nullable=True),
sa.Column('orders_updated', sa.Integer(), nullable=True),
sa.Column('orders_skipped', sa.Integer(), nullable=True),
sa.Column('products_matched', sa.Integer(), nullable=True),
sa.Column('products_not_found', sa.Integer(), nullable=True),
sa.Column('confirmed_stats', sa.JSON(), nullable=True),
sa.Column('declined_stats', sa.JSON(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_historical_import_vendor', 'letzshop_historical_import_jobs', ['vendor_id', 'status'], unique=False)
op.create_index(op.f('ix_letzshop_historical_import_jobs_id'), 'letzshop_historical_import_jobs', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_historical_import_jobs_vendor_id'), 'letzshop_historical_import_jobs', ['vendor_id'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_letzshop_historical_import_jobs_vendor_id'), table_name='letzshop_historical_import_jobs')
op.drop_index(op.f('ix_letzshop_historical_import_jobs_id'), table_name='letzshop_historical_import_jobs')
op.drop_index('idx_historical_import_vendor', table_name='letzshop_historical_import_jobs')
op.drop_table('letzshop_historical_import_jobs')

View File

@@ -0,0 +1,27 @@
"""add_order_date_to_letzshop_orders
Revision ID: 2362c2723a93
Revises: 204273a59d73
Create Date: 2025-12-19 08:46:23.731912
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '2362c2723a93'
down_revision: Union[str, None] = '204273a59d73'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add order_date column to letzshop_orders table
op.add_column('letzshop_orders', sa.Column('order_date', sa.DateTime(timezone=True), nullable=True))
def downgrade() -> None:
op.drop_column('letzshop_orders', 'order_date')

View File

@@ -0,0 +1,37 @@
"""add contact fields to vendor
Revision ID: 28d44d503cac
Revises: 9f3a25ea4991
Create Date: 2025-12-03 22:26:02.161087
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '28d44d503cac'
down_revision: Union[str, None] = '9f3a25ea4991'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add nullable contact fields to vendor table
# These allow vendor-specific branding/identity, overriding company defaults
op.add_column('vendors', sa.Column('contact_email', sa.String(255), nullable=True))
op.add_column('vendors', sa.Column('contact_phone', sa.String(50), nullable=True))
op.add_column('vendors', sa.Column('website', sa.String(255), nullable=True))
op.add_column('vendors', sa.Column('business_address', sa.Text(), nullable=True))
op.add_column('vendors', sa.Column('tax_number', sa.String(100), nullable=True))
def downgrade() -> None:
# Remove contact fields from vendor table
op.drop_column('vendors', 'tax_number')
op.drop_column('vendors', 'business_address')
op.drop_column('vendors', 'website')
op.drop_column('vendors', 'contact_phone')
op.drop_column('vendors', 'contact_email')

View File

@@ -0,0 +1,419 @@
"""add_subscription_billing_tables
Revision ID: 2953ed10d22c
Revises: e1bfb453fbe9
Create Date: 2025-12-25 18:29:34.167773
"""
from datetime import datetime
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL)
# revision identifiers, used by Alembic.
revision: str = '2953ed10d22c'
down_revision: Union[str, None] = 'e1bfb453fbe9'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# =========================================================================
# Create new subscription and billing tables
# =========================================================================
# subscription_tiers - Database-driven tier definitions
op.create_table('subscription_tiers',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=30), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('price_monthly_cents', sa.Integer(), nullable=False),
sa.Column('price_annual_cents', sa.Integer(), nullable=True),
sa.Column('orders_per_month', sa.Integer(), nullable=True),
sa.Column('products_limit', sa.Integer(), nullable=True),
sa.Column('team_members', sa.Integer(), nullable=True),
sa.Column('order_history_months', sa.Integer(), nullable=True),
sa.Column('features', sa.JSON(), nullable=True),
sa.Column('stripe_product_id', sa.String(length=100), nullable=True),
sa.Column('stripe_price_monthly_id', sa.String(length=100), nullable=True),
sa.Column('stripe_price_annual_id', sa.String(length=100), nullable=True),
sa.Column('display_order', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_public', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_subscription_tiers_code'), 'subscription_tiers', ['code'], unique=True)
op.create_index(op.f('ix_subscription_tiers_id'), 'subscription_tiers', ['id'], unique=False)
# addon_products - Purchasable add-ons (domains, SSL, email)
op.create_table('addon_products',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=50), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(length=50), nullable=False),
sa.Column('price_cents', sa.Integer(), nullable=False),
sa.Column('billing_period', sa.String(length=20), nullable=False),
sa.Column('quantity_unit', sa.String(length=50), nullable=True),
sa.Column('quantity_value', sa.Integer(), nullable=True),
sa.Column('stripe_product_id', sa.String(length=100), nullable=True),
sa.Column('stripe_price_id', sa.String(length=100), nullable=True),
sa.Column('display_order', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_addon_products_category'), 'addon_products', ['category'], unique=False)
op.create_index(op.f('ix_addon_products_code'), 'addon_products', ['code'], unique=True)
op.create_index(op.f('ix_addon_products_id'), 'addon_products', ['id'], unique=False)
# billing_history - Invoice and payment history
op.create_table('billing_history',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('stripe_invoice_id', sa.String(length=100), nullable=True),
sa.Column('stripe_payment_intent_id', sa.String(length=100), nullable=True),
sa.Column('invoice_number', sa.String(length=50), nullable=True),
sa.Column('invoice_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('due_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('subtotal_cents', sa.Integer(), nullable=False),
sa.Column('tax_cents', sa.Integer(), nullable=False),
sa.Column('total_cents', sa.Integer(), nullable=False),
sa.Column('amount_paid_cents', sa.Integer(), nullable=False),
sa.Column('currency', sa.String(length=3), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('invoice_pdf_url', sa.String(length=500), nullable=True),
sa.Column('hosted_invoice_url', sa.String(length=500), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('line_items', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_billing_status', 'billing_history', ['vendor_id', 'status'], unique=False)
op.create_index('idx_billing_vendor_date', 'billing_history', ['vendor_id', 'invoice_date'], unique=False)
op.create_index(op.f('ix_billing_history_id'), 'billing_history', ['id'], unique=False)
op.create_index(op.f('ix_billing_history_status'), 'billing_history', ['status'], unique=False)
op.create_index(op.f('ix_billing_history_stripe_invoice_id'), 'billing_history', ['stripe_invoice_id'], unique=True)
op.create_index(op.f('ix_billing_history_vendor_id'), 'billing_history', ['vendor_id'], unique=False)
# vendor_addons - Add-ons purchased by vendor
op.create_table('vendor_addons',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('addon_product_id', sa.Integer(), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('domain_name', sa.String(length=255), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=False),
sa.Column('stripe_subscription_item_id', sa.String(length=100), nullable=True),
sa.Column('period_start', sa.DateTime(timezone=True), nullable=True),
sa.Column('period_end', sa.DateTime(timezone=True), nullable=True),
sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['addon_product_id'], ['addon_products.id'], ),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_vendor_addon_product', 'vendor_addons', ['vendor_id', 'addon_product_id'], unique=False)
op.create_index('idx_vendor_addon_status', 'vendor_addons', ['vendor_id', 'status'], unique=False)
op.create_index(op.f('ix_vendor_addons_addon_product_id'), 'vendor_addons', ['addon_product_id'], unique=False)
op.create_index(op.f('ix_vendor_addons_domain_name'), 'vendor_addons', ['domain_name'], unique=False)
op.create_index(op.f('ix_vendor_addons_id'), 'vendor_addons', ['id'], unique=False)
op.create_index(op.f('ix_vendor_addons_status'), 'vendor_addons', ['status'], unique=False)
op.create_index(op.f('ix_vendor_addons_vendor_id'), 'vendor_addons', ['vendor_id'], unique=False)
# stripe_webhook_events - Webhook idempotency tracking
op.create_table('stripe_webhook_events',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.String(length=100), nullable=False),
sa.Column('event_type', sa.String(length=100), nullable=False),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('processed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('payload_encrypted', sa.Text(), nullable=True),
sa.Column('vendor_id', sa.Integer(), nullable=True),
sa.Column('subscription_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['subscription_id'], ['vendor_subscriptions.id'], ),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_webhook_event_type_status', 'stripe_webhook_events', ['event_type', 'status'], unique=False)
op.create_index(op.f('ix_stripe_webhook_events_event_id'), 'stripe_webhook_events', ['event_id'], unique=True)
op.create_index(op.f('ix_stripe_webhook_events_event_type'), 'stripe_webhook_events', ['event_type'], unique=False)
op.create_index(op.f('ix_stripe_webhook_events_id'), 'stripe_webhook_events', ['id'], unique=False)
op.create_index(op.f('ix_stripe_webhook_events_status'), 'stripe_webhook_events', ['status'], unique=False)
op.create_index(op.f('ix_stripe_webhook_events_subscription_id'), 'stripe_webhook_events', ['subscription_id'], unique=False)
op.create_index(op.f('ix_stripe_webhook_events_vendor_id'), 'stripe_webhook_events', ['vendor_id'], unique=False)
# =========================================================================
# Add new columns to vendor_subscriptions
# =========================================================================
op.add_column('vendor_subscriptions', sa.Column('stripe_price_id', sa.String(length=100), nullable=True))
op.add_column('vendor_subscriptions', sa.Column('stripe_payment_method_id', sa.String(length=100), nullable=True))
op.add_column('vendor_subscriptions', sa.Column('proration_behavior', sa.String(length=50), nullable=True))
op.add_column('vendor_subscriptions', sa.Column('scheduled_tier_change', sa.String(length=30), nullable=True))
op.add_column('vendor_subscriptions', sa.Column('scheduled_change_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('vendor_subscriptions', sa.Column('payment_retry_count', sa.Integer(), server_default='0', nullable=False))
op.add_column('vendor_subscriptions', sa.Column('last_payment_error', sa.Text(), nullable=True))
# =========================================================================
# Seed subscription tiers
# =========================================================================
now = datetime.utcnow()
subscription_tiers = sa.table(
'subscription_tiers',
sa.column('code', sa.String),
sa.column('name', sa.String),
sa.column('description', sa.Text),
sa.column('price_monthly_cents', sa.Integer),
sa.column('price_annual_cents', sa.Integer),
sa.column('orders_per_month', sa.Integer),
sa.column('products_limit', sa.Integer),
sa.column('team_members', sa.Integer),
sa.column('order_history_months', sa.Integer),
sa.column('features', sa.JSON),
sa.column('display_order', sa.Integer),
sa.column('is_active', sa.Boolean),
sa.column('is_public', sa.Boolean),
sa.column('created_at', sa.DateTime),
sa.column('updated_at', sa.DateTime),
)
op.bulk_insert(subscription_tiers, [
{
'code': 'essential',
'name': 'Essential',
'description': 'Perfect for solo vendors getting started with Letzshop',
'price_monthly_cents': 4900,
'price_annual_cents': 49000,
'orders_per_month': 100,
'products_limit': 200,
'team_members': 1,
'order_history_months': 6,
'features': ['letzshop_sync', 'inventory_basic', 'invoice_lu', 'customer_view'],
'display_order': 1,
'is_active': True,
'is_public': True,
'created_at': now,
'updated_at': now,
},
{
'code': 'professional',
'name': 'Professional',
'description': 'For active multi-channel vendors shipping EU-wide',
'price_monthly_cents': 9900,
'price_annual_cents': 99000,
'orders_per_month': 500,
'products_limit': None,
'team_members': 3,
'order_history_months': 24,
'features': [
'letzshop_sync', 'inventory_locations', 'inventory_purchase_orders',
'invoice_lu', 'invoice_eu_vat', 'customer_view', 'customer_export'
],
'display_order': 2,
'is_active': True,
'is_public': True,
'created_at': now,
'updated_at': now,
},
{
'code': 'business',
'name': 'Business',
'description': 'For high-volume vendors with teams and data-driven operations',
'price_monthly_cents': 19900,
'price_annual_cents': 199000,
'orders_per_month': 2000,
'products_limit': None,
'team_members': 10,
'order_history_months': None,
'features': [
'letzshop_sync', 'inventory_locations', 'inventory_purchase_orders',
'invoice_lu', 'invoice_eu_vat', 'invoice_bulk', 'customer_view',
'customer_export', 'analytics_dashboard', 'accounting_export',
'api_access', 'automation_rules', 'team_roles'
],
'display_order': 3,
'is_active': True,
'is_public': True,
'created_at': now,
'updated_at': now,
},
{
'code': 'enterprise',
'name': 'Enterprise',
'description': 'Custom solutions for large operations and agencies',
'price_monthly_cents': 39900,
'price_annual_cents': None,
'orders_per_month': None,
'products_limit': None,
'team_members': None,
'order_history_months': None,
'features': [
'letzshop_sync', 'inventory_locations', 'inventory_purchase_orders',
'invoice_lu', 'invoice_eu_vat', 'invoice_bulk', 'customer_view',
'customer_export', 'analytics_dashboard', 'accounting_export',
'api_access', 'automation_rules', 'team_roles', 'white_label',
'multi_vendor', 'custom_integrations', 'sla_guarantee', 'dedicated_support'
],
'display_order': 4,
'is_active': True,
'is_public': False,
'created_at': now,
'updated_at': now,
},
])
# =========================================================================
# Seed add-on products
# =========================================================================
addon_products = sa.table(
'addon_products',
sa.column('code', sa.String),
sa.column('name', sa.String),
sa.column('description', sa.Text),
sa.column('category', sa.String),
sa.column('price_cents', sa.Integer),
sa.column('billing_period', sa.String),
sa.column('quantity_unit', sa.String),
sa.column('quantity_value', sa.Integer),
sa.column('display_order', sa.Integer),
sa.column('is_active', sa.Boolean),
sa.column('created_at', sa.DateTime),
sa.column('updated_at', sa.DateTime),
)
op.bulk_insert(addon_products, [
{
'code': 'domain',
'name': 'Custom Domain',
'description': 'Connect your own domain with SSL certificate included',
'category': 'domain',
'price_cents': 1500,
'billing_period': 'annual',
'quantity_unit': None,
'quantity_value': None,
'display_order': 1,
'is_active': True,
'created_at': now,
'updated_at': now,
},
{
'code': 'email_5',
'name': '5 Email Addresses',
'description': 'Professional email addresses on your domain',
'category': 'email',
'price_cents': 500,
'billing_period': 'monthly',
'quantity_unit': 'emails',
'quantity_value': 5,
'display_order': 2,
'is_active': True,
'created_at': now,
'updated_at': now,
},
{
'code': 'email_10',
'name': '10 Email Addresses',
'description': 'Professional email addresses on your domain',
'category': 'email',
'price_cents': 900,
'billing_period': 'monthly',
'quantity_unit': 'emails',
'quantity_value': 10,
'display_order': 3,
'is_active': True,
'created_at': now,
'updated_at': now,
},
{
'code': 'email_25',
'name': '25 Email Addresses',
'description': 'Professional email addresses on your domain',
'category': 'email',
'price_cents': 1900,
'billing_period': 'monthly',
'quantity_unit': 'emails',
'quantity_value': 25,
'display_order': 4,
'is_active': True,
'created_at': now,
'updated_at': now,
},
{
'code': 'storage_10gb',
'name': 'Additional Storage (10GB)',
'description': 'Extra storage for product images and files',
'category': 'storage',
'price_cents': 500,
'billing_period': 'monthly',
'quantity_unit': 'GB',
'quantity_value': 10,
'display_order': 5,
'is_active': True,
'created_at': now,
'updated_at': now,
},
])
def downgrade() -> None:
# Remove new columns from vendor_subscriptions
op.drop_column('vendor_subscriptions', 'last_payment_error')
op.drop_column('vendor_subscriptions', 'payment_retry_count')
op.drop_column('vendor_subscriptions', 'scheduled_change_at')
op.drop_column('vendor_subscriptions', 'scheduled_tier_change')
op.drop_column('vendor_subscriptions', 'proration_behavior')
op.drop_column('vendor_subscriptions', 'stripe_payment_method_id')
op.drop_column('vendor_subscriptions', 'stripe_price_id')
# Drop stripe_webhook_events
op.drop_index(op.f('ix_stripe_webhook_events_vendor_id'), table_name='stripe_webhook_events')
op.drop_index(op.f('ix_stripe_webhook_events_subscription_id'), table_name='stripe_webhook_events')
op.drop_index(op.f('ix_stripe_webhook_events_status'), table_name='stripe_webhook_events')
op.drop_index(op.f('ix_stripe_webhook_events_id'), table_name='stripe_webhook_events')
op.drop_index(op.f('ix_stripe_webhook_events_event_type'), table_name='stripe_webhook_events')
op.drop_index(op.f('ix_stripe_webhook_events_event_id'), table_name='stripe_webhook_events')
op.drop_index('idx_webhook_event_type_status', table_name='stripe_webhook_events')
op.drop_table('stripe_webhook_events')
# Drop vendor_addons
op.drop_index(op.f('ix_vendor_addons_vendor_id'), table_name='vendor_addons')
op.drop_index(op.f('ix_vendor_addons_status'), table_name='vendor_addons')
op.drop_index(op.f('ix_vendor_addons_id'), table_name='vendor_addons')
op.drop_index(op.f('ix_vendor_addons_domain_name'), table_name='vendor_addons')
op.drop_index(op.f('ix_vendor_addons_addon_product_id'), table_name='vendor_addons')
op.drop_index('idx_vendor_addon_status', table_name='vendor_addons')
op.drop_index('idx_vendor_addon_product', table_name='vendor_addons')
op.drop_table('vendor_addons')
# Drop billing_history
op.drop_index(op.f('ix_billing_history_vendor_id'), table_name='billing_history')
op.drop_index(op.f('ix_billing_history_stripe_invoice_id'), table_name='billing_history')
op.drop_index(op.f('ix_billing_history_status'), table_name='billing_history')
op.drop_index(op.f('ix_billing_history_id'), table_name='billing_history')
op.drop_index('idx_billing_vendor_date', table_name='billing_history')
op.drop_index('idx_billing_status', table_name='billing_history')
op.drop_table('billing_history')
# Drop addon_products
op.drop_index(op.f('ix_addon_products_id'), table_name='addon_products')
op.drop_index(op.f('ix_addon_products_code'), table_name='addon_products')
op.drop_index(op.f('ix_addon_products_category'), table_name='addon_products')
op.drop_table('addon_products')
# Drop subscription_tiers
op.drop_index(op.f('ix_subscription_tiers_id'), table_name='subscription_tiers')
op.drop_index(op.f('ix_subscription_tiers_code'), table_name='subscription_tiers')
op.drop_table('subscription_tiers')

View File

@@ -0,0 +1,44 @@
"""add_letzshop_vendor_fields_and_trial_tracking
Revision ID: 404b3e2d2865
Revises: l0a1b2c3d4e5
Create Date: 2025-12-27 09:49:44.715243
Adds:
- vendors.letzshop_vendor_id - Link to Letzshop marketplace profile
- vendors.letzshop_vendor_slug - Letzshop shop URL slug
- vendor_subscriptions.card_collected_at - Track when card was collected for trial
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '404b3e2d2865'
down_revision: Union[str, None] = 'l0a1b2c3d4e5'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add Letzshop vendor identity fields to vendors table
op.add_column('vendors', sa.Column('letzshop_vendor_id', sa.String(length=100), nullable=True))
op.add_column('vendors', sa.Column('letzshop_vendor_slug', sa.String(length=200), nullable=True))
op.create_index(op.f('ix_vendors_letzshop_vendor_id'), 'vendors', ['letzshop_vendor_id'], unique=True)
op.create_index(op.f('ix_vendors_letzshop_vendor_slug'), 'vendors', ['letzshop_vendor_slug'], unique=False)
# Add card collection tracking to vendor_subscriptions
op.add_column('vendor_subscriptions', sa.Column('card_collected_at', sa.DateTime(timezone=True), nullable=True))
def downgrade() -> None:
# Remove card collection tracking from vendor_subscriptions
op.drop_column('vendor_subscriptions', 'card_collected_at')
# Remove Letzshop vendor identity fields from vendors
op.drop_index(op.f('ix_vendors_letzshop_vendor_slug'), table_name='vendors')
op.drop_index(op.f('ix_vendors_letzshop_vendor_id'), table_name='vendors')
op.drop_column('vendors', 'letzshop_vendor_slug')
op.drop_column('vendors', 'letzshop_vendor_id')

View File

@@ -0,0 +1,908 @@
"""Initial migration - all tables
Revision ID: 4951b2e50581
Revises:
Create Date: 2025-10-27 22:28:33.137564
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "4951b2e50581"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"marketplace_products",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("marketplace_product_id", sa.String(), nullable=False),
sa.Column("title", sa.String(), nullable=False),
sa.Column("description", sa.String(), nullable=True),
sa.Column("link", sa.String(), nullable=True),
sa.Column("image_link", sa.String(), nullable=True),
sa.Column("availability", sa.String(), nullable=True),
sa.Column("price", sa.String(), nullable=True),
sa.Column("brand", sa.String(), nullable=True),
sa.Column("gtin", sa.String(), nullable=True),
sa.Column("mpn", sa.String(), nullable=True),
sa.Column("condition", sa.String(), nullable=True),
sa.Column("adult", sa.String(), nullable=True),
sa.Column("multipack", sa.Integer(), nullable=True),
sa.Column("is_bundle", sa.String(), nullable=True),
sa.Column("age_group", sa.String(), nullable=True),
sa.Column("color", sa.String(), nullable=True),
sa.Column("gender", sa.String(), nullable=True),
sa.Column("material", sa.String(), nullable=True),
sa.Column("pattern", sa.String(), nullable=True),
sa.Column("size", sa.String(), nullable=True),
sa.Column("size_type", sa.String(), nullable=True),
sa.Column("size_system", sa.String(), nullable=True),
sa.Column("item_group_id", sa.String(), nullable=True),
sa.Column("google_product_category", sa.String(), nullable=True),
sa.Column("product_type", sa.String(), nullable=True),
sa.Column("custom_label_0", sa.String(), nullable=True),
sa.Column("custom_label_1", sa.String(), nullable=True),
sa.Column("custom_label_2", sa.String(), nullable=True),
sa.Column("custom_label_3", sa.String(), nullable=True),
sa.Column("custom_label_4", sa.String(), nullable=True),
sa.Column("additional_image_link", sa.String(), nullable=True),
sa.Column("sale_price", sa.String(), nullable=True),
sa.Column("unit_pricing_measure", sa.String(), nullable=True),
sa.Column("unit_pricing_base_measure", sa.String(), nullable=True),
sa.Column("identifier_exists", sa.String(), nullable=True),
sa.Column("shipping", sa.String(), nullable=True),
sa.Column("currency", sa.String(), nullable=True),
sa.Column("marketplace", sa.String(), nullable=True),
sa.Column("vendor_name", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"idx_marketplace_brand",
"marketplace_products",
["marketplace", "brand"],
unique=False,
)
op.create_index(
"idx_marketplace_vendor",
"marketplace_products",
["marketplace", "vendor_name"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_products_availability"),
"marketplace_products",
["availability"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_products_brand"),
"marketplace_products",
["brand"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_products_google_product_category"),
"marketplace_products",
["google_product_category"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_products_gtin"),
"marketplace_products",
["gtin"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_products_id"), "marketplace_products", ["id"], unique=False
)
op.create_index(
op.f("ix_marketplace_products_marketplace"),
"marketplace_products",
["marketplace"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_products_marketplace_product_id"),
"marketplace_products",
["marketplace_product_id"],
unique=True,
)
op.create_index(
op.f("ix_marketplace_products_vendor_name"),
"marketplace_products",
["vendor_name"],
unique=False,
)
op.create_table(
"users",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("email", sa.String(), nullable=False),
sa.Column("username", sa.String(), nullable=False),
sa.Column("first_name", sa.String(), nullable=True),
sa.Column("last_name", sa.String(), nullable=True),
sa.Column("hashed_password", sa.String(), nullable=False),
sa.Column("role", sa.String(), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("last_login", sa.DateTime(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
op.create_index(op.f("ix_users_id"), "users", ["id"], unique=False)
op.create_index(op.f("ix_users_username"), "users", ["username"], unique=True)
op.create_table(
"admin_audit_logs",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("admin_user_id", sa.Integer(), nullable=False),
sa.Column("action", sa.String(length=100), nullable=False),
sa.Column("target_type", sa.String(length=50), nullable=False),
sa.Column("target_id", sa.String(length=100), nullable=False),
sa.Column("details", sa.JSON(), nullable=True),
sa.Column("ip_address", sa.String(length=45), nullable=True),
sa.Column("user_agent", sa.Text(), nullable=True),
sa.Column("request_id", sa.String(length=100), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["admin_user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_admin_audit_logs_action"), "admin_audit_logs", ["action"], unique=False
)
op.create_index(
op.f("ix_admin_audit_logs_admin_user_id"),
"admin_audit_logs",
["admin_user_id"],
unique=False,
)
op.create_index(
op.f("ix_admin_audit_logs_id"), "admin_audit_logs", ["id"], unique=False
)
op.create_index(
op.f("ix_admin_audit_logs_target_id"),
"admin_audit_logs",
["target_id"],
unique=False,
)
op.create_index(
op.f("ix_admin_audit_logs_target_type"),
"admin_audit_logs",
["target_type"],
unique=False,
)
op.create_table(
"admin_notifications",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("type", sa.String(length=50), nullable=False),
sa.Column("priority", sa.String(length=20), nullable=True),
sa.Column("title", sa.String(length=200), nullable=False),
sa.Column("message", sa.Text(), nullable=False),
sa.Column("is_read", sa.Boolean(), nullable=True),
sa.Column("read_at", sa.DateTime(), nullable=True),
sa.Column("read_by_user_id", sa.Integer(), nullable=True),
sa.Column("action_required", sa.Boolean(), nullable=True),
sa.Column("action_url", sa.String(length=500), nullable=True),
sa.Column("notification_metadata", sa.JSON(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["read_by_user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_admin_notifications_action_required"),
"admin_notifications",
["action_required"],
unique=False,
)
op.create_index(
op.f("ix_admin_notifications_id"), "admin_notifications", ["id"], unique=False
)
op.create_index(
op.f("ix_admin_notifications_is_read"),
"admin_notifications",
["is_read"],
unique=False,
)
op.create_index(
op.f("ix_admin_notifications_priority"),
"admin_notifications",
["priority"],
unique=False,
)
op.create_index(
op.f("ix_admin_notifications_type"),
"admin_notifications",
["type"],
unique=False,
)
op.create_table(
"admin_sessions",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("admin_user_id", sa.Integer(), nullable=False),
sa.Column("session_token", sa.String(length=255), nullable=False),
sa.Column("ip_address", sa.String(length=45), nullable=False),
sa.Column("user_agent", sa.Text(), nullable=True),
sa.Column("login_at", sa.DateTime(), nullable=False),
sa.Column("last_activity_at", sa.DateTime(), nullable=False),
sa.Column("logout_at", sa.DateTime(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=True),
sa.Column("logout_reason", sa.String(length=50), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["admin_user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_admin_sessions_admin_user_id"),
"admin_sessions",
["admin_user_id"],
unique=False,
)
op.create_index(
op.f("ix_admin_sessions_id"), "admin_sessions", ["id"], unique=False
)
op.create_index(
op.f("ix_admin_sessions_is_active"),
"admin_sessions",
["is_active"],
unique=False,
)
op.create_index(
op.f("ix_admin_sessions_login_at"), "admin_sessions", ["login_at"], unique=False
)
op.create_index(
op.f("ix_admin_sessions_session_token"),
"admin_sessions",
["session_token"],
unique=True,
)
op.create_table(
"admin_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("key", sa.String(length=100), nullable=False),
sa.Column("value", sa.Text(), nullable=False),
sa.Column("value_type", sa.String(length=20), nullable=True),
sa.Column("category", sa.String(length=50), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("is_encrypted", sa.Boolean(), nullable=True),
sa.Column("is_public", sa.Boolean(), nullable=True),
sa.Column("last_modified_by_user_id", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["last_modified_by_user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_admin_settings_category"), "admin_settings", ["category"], unique=False
)
op.create_index(
op.f("ix_admin_settings_id"), "admin_settings", ["id"], unique=False
)
op.create_index(
op.f("ix_admin_settings_key"), "admin_settings", ["key"], unique=True
)
op.create_table(
"platform_alerts",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("alert_type", sa.String(length=50), nullable=False),
sa.Column("severity", sa.String(length=20), nullable=False),
sa.Column("title", sa.String(length=200), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("affected_vendors", sa.JSON(), nullable=True),
sa.Column("affected_systems", sa.JSON(), nullable=True),
sa.Column("is_resolved", sa.Boolean(), nullable=True),
sa.Column("resolved_at", sa.DateTime(), nullable=True),
sa.Column("resolved_by_user_id", sa.Integer(), nullable=True),
sa.Column("resolution_notes", sa.Text(), nullable=True),
sa.Column("auto_generated", sa.Boolean(), nullable=True),
sa.Column("occurrence_count", sa.Integer(), nullable=True),
sa.Column("first_occurred_at", sa.DateTime(), nullable=False),
sa.Column("last_occurred_at", sa.DateTime(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["resolved_by_user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_platform_alerts_alert_type"),
"platform_alerts",
["alert_type"],
unique=False,
)
op.create_index(
op.f("ix_platform_alerts_id"), "platform_alerts", ["id"], unique=False
)
op.create_index(
op.f("ix_platform_alerts_is_resolved"),
"platform_alerts",
["is_resolved"],
unique=False,
)
op.create_index(
op.f("ix_platform_alerts_severity"),
"platform_alerts",
["severity"],
unique=False,
)
op.create_table(
"vendors",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_code", sa.String(), nullable=False),
sa.Column("subdomain", sa.String(length=100), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("owner_user_id", sa.Integer(), nullable=False),
sa.Column("contact_email", sa.String(), nullable=True),
sa.Column("contact_phone", sa.String(), nullable=True),
sa.Column("website", sa.String(), nullable=True),
sa.Column("letzshop_csv_url_fr", sa.String(), nullable=True),
sa.Column("letzshop_csv_url_en", sa.String(), nullable=True),
sa.Column("letzshop_csv_url_de", sa.String(), nullable=True),
sa.Column("business_address", sa.Text(), nullable=True),
sa.Column("tax_number", sa.String(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=True),
sa.Column("is_verified", sa.Boolean(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["owner_user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_vendors_id"), "vendors", ["id"], unique=False)
op.create_index(op.f("ix_vendors_subdomain"), "vendors", ["subdomain"], unique=True)
op.create_index(
op.f("ix_vendors_vendor_code"), "vendors", ["vendor_code"], unique=True
)
op.create_table(
"customers",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("email", sa.String(length=255), nullable=False),
sa.Column("hashed_password", sa.String(length=255), nullable=False),
sa.Column("first_name", sa.String(length=100), nullable=True),
sa.Column("last_name", sa.String(length=100), nullable=True),
sa.Column("phone", sa.String(length=50), nullable=True),
sa.Column("customer_number", sa.String(length=100), nullable=False),
sa.Column("preferences", sa.JSON(), nullable=True),
sa.Column("marketing_consent", sa.Boolean(), nullable=True),
sa.Column("last_order_date", sa.DateTime(), nullable=True),
sa.Column("total_orders", sa.Integer(), nullable=True),
sa.Column("total_spent", sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_customers_customer_number"),
"customers",
["customer_number"],
unique=False,
)
op.create_index(op.f("ix_customers_email"), "customers", ["email"], unique=False)
op.create_index(op.f("ix_customers_id"), "customers", ["id"], unique=False)
op.create_table(
"marketplace_import_jobs",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("marketplace", sa.String(), nullable=False),
sa.Column("source_url", sa.String(), nullable=False),
sa.Column("status", sa.String(), nullable=False),
sa.Column("imported_count", sa.Integer(), nullable=True),
sa.Column("updated_count", sa.Integer(), nullable=True),
sa.Column("error_count", sa.Integer(), nullable=True),
sa.Column("total_processed", sa.Integer(), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"idx_import_user_marketplace",
"marketplace_import_jobs",
["user_id", "marketplace"],
unique=False,
)
op.create_index(
"idx_import_vendor_created",
"marketplace_import_jobs",
["vendor_id", "created_at"],
unique=False,
)
op.create_index(
"idx_import_vendor_status",
"marketplace_import_jobs",
["vendor_id", "status"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_import_jobs_id"),
"marketplace_import_jobs",
["id"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_import_jobs_marketplace"),
"marketplace_import_jobs",
["marketplace"],
unique=False,
)
op.create_index(
op.f("ix_marketplace_import_jobs_vendor_id"),
"marketplace_import_jobs",
["vendor_id"],
unique=False,
)
op.create_table(
"products",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("marketplace_product_id", sa.Integer(), nullable=False),
sa.Column("product_id", sa.String(), nullable=True),
sa.Column("price", sa.Float(), nullable=True),
sa.Column("sale_price", sa.Float(), nullable=True),
sa.Column("currency", sa.String(), nullable=True),
sa.Column("availability", sa.String(), nullable=True),
sa.Column("condition", sa.String(), nullable=True),
sa.Column("is_featured", sa.Boolean(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=True),
sa.Column("display_order", sa.Integer(), nullable=True),
sa.Column("min_quantity", sa.Integer(), nullable=True),
sa.Column("max_quantity", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["marketplace_product_id"],
["marketplace_products.id"],
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("vendor_id", "marketplace_product_id", name="uq_product"),
)
op.create_index(
"idx_product_active", "products", ["vendor_id", "is_active"], unique=False
)
op.create_index(
"idx_product_featured", "products", ["vendor_id", "is_featured"], unique=False
)
op.create_index(op.f("ix_products_id"), "products", ["id"], unique=False)
op.create_table(
"roles",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=100), nullable=False),
sa.Column("permissions", sa.JSON(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_roles_id"), "roles", ["id"], unique=False)
op.create_table(
"vendor_domains",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("domain", sa.String(length=255), nullable=False),
sa.Column("is_primary", sa.Boolean(), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("ssl_status", sa.String(length=50), nullable=True),
sa.Column("ssl_verified_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("verification_token", sa.String(length=100), nullable=True),
sa.Column("is_verified", sa.Boolean(), nullable=False),
sa.Column("verified_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("vendor_id", "domain", name="uq_vendor_domain"),
sa.UniqueConstraint("verification_token"),
)
op.create_index(
"idx_domain_active", "vendor_domains", ["domain", "is_active"], unique=False
)
op.create_index(
"idx_vendor_primary",
"vendor_domains",
["vendor_id", "is_primary"],
unique=False,
)
op.create_index(
op.f("ix_vendor_domains_domain"), "vendor_domains", ["domain"], unique=True
)
op.create_index(
op.f("ix_vendor_domains_id"), "vendor_domains", ["id"], unique=False
)
op.create_table(
"vendor_themes",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("theme_name", sa.String(length=100), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=True),
sa.Column("colors", sa.JSON(), nullable=True),
sa.Column("font_family_heading", sa.String(length=100), nullable=True),
sa.Column("font_family_body", sa.String(length=100), nullable=True),
sa.Column("logo_url", sa.String(length=500), nullable=True),
sa.Column("logo_dark_url", sa.String(length=500), nullable=True),
sa.Column("favicon_url", sa.String(length=500), nullable=True),
sa.Column("banner_url", sa.String(length=500), nullable=True),
sa.Column("layout_style", sa.String(length=50), nullable=True),
sa.Column("header_style", sa.String(length=50), nullable=True),
sa.Column("product_card_style", sa.String(length=50), nullable=True),
sa.Column("custom_css", sa.Text(), nullable=True),
sa.Column("social_links", sa.JSON(), nullable=True),
sa.Column("meta_title_template", sa.String(length=200), nullable=True),
sa.Column("meta_description", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("vendor_id"),
)
op.create_index(op.f("ix_vendor_themes_id"), "vendor_themes", ["id"], unique=False)
op.create_table(
"customer_addresses",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("customer_id", sa.Integer(), nullable=False),
sa.Column("address_type", sa.String(length=50), nullable=False),
sa.Column("first_name", sa.String(length=100), nullable=False),
sa.Column("last_name", sa.String(length=100), nullable=False),
sa.Column("company", sa.String(length=200), nullable=True),
sa.Column("address_line_1", sa.String(length=255), nullable=False),
sa.Column("address_line_2", sa.String(length=255), nullable=True),
sa.Column("city", sa.String(length=100), nullable=False),
sa.Column("postal_code", sa.String(length=20), nullable=False),
sa.Column("country", sa.String(length=100), nullable=False),
sa.Column("is_default", sa.Boolean(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["customer_id"],
["customers.id"],
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_customer_addresses_id"), "customer_addresses", ["id"], unique=False
)
op.create_table(
"inventory",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("product_id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("location", sa.String(), nullable=False),
sa.Column("quantity", sa.Integer(), nullable=False),
sa.Column("reserved_quantity", sa.Integer(), nullable=True),
sa.Column("gtin", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["product_id"],
["products.id"],
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"product_id", "location", name="uq_inventory_product_location"
),
)
op.create_index(
"idx_inventory_product_location",
"inventory",
["product_id", "location"],
unique=False,
)
op.create_index(
"idx_inventory_vendor_product",
"inventory",
["vendor_id", "product_id"],
unique=False,
)
op.create_index(op.f("ix_inventory_gtin"), "inventory", ["gtin"], unique=False)
op.create_index(op.f("ix_inventory_id"), "inventory", ["id"], unique=False)
op.create_index(
op.f("ix_inventory_location"), "inventory", ["location"], unique=False
)
op.create_index(
op.f("ix_inventory_product_id"), "inventory", ["product_id"], unique=False
)
op.create_index(
op.f("ix_inventory_vendor_id"), "inventory", ["vendor_id"], unique=False
)
op.create_table(
"vendor_users",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("role_id", sa.Integer(), nullable=False),
sa.Column("invited_by", sa.Integer(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["invited_by"],
["users.id"],
),
sa.ForeignKeyConstraint(
["role_id"],
["roles.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_vendor_users_id"), "vendor_users", ["id"], unique=False)
op.create_table(
"orders",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("customer_id", sa.Integer(), nullable=False),
sa.Column("order_number", sa.String(), nullable=False),
sa.Column("status", sa.String(), nullable=False),
sa.Column("subtotal", sa.Float(), nullable=False),
sa.Column("tax_amount", sa.Float(), nullable=True),
sa.Column("shipping_amount", sa.Float(), nullable=True),
sa.Column("discount_amount", sa.Float(), nullable=True),
sa.Column("total_amount", sa.Float(), nullable=False),
sa.Column("currency", sa.String(), nullable=True),
sa.Column("shipping_address_id", sa.Integer(), nullable=False),
sa.Column("billing_address_id", sa.Integer(), nullable=False),
sa.Column("shipping_method", sa.String(), nullable=True),
sa.Column("tracking_number", sa.String(), nullable=True),
sa.Column("customer_notes", sa.Text(), nullable=True),
sa.Column("internal_notes", sa.Text(), nullable=True),
sa.Column("paid_at", sa.DateTime(), nullable=True),
sa.Column("shipped_at", sa.DateTime(), nullable=True),
sa.Column("delivered_at", sa.DateTime(), nullable=True),
sa.Column("cancelled_at", sa.DateTime(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["billing_address_id"],
["customer_addresses.id"],
),
sa.ForeignKeyConstraint(
["customer_id"],
["customers.id"],
),
sa.ForeignKeyConstraint(
["shipping_address_id"],
["customer_addresses.id"],
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_orders_customer_id"), "orders", ["customer_id"], unique=False
)
op.create_index(op.f("ix_orders_id"), "orders", ["id"], unique=False)
op.create_index(
op.f("ix_orders_order_number"), "orders", ["order_number"], unique=True
)
op.create_index(op.f("ix_orders_status"), "orders", ["status"], unique=False)
op.create_index(op.f("ix_orders_vendor_id"), "orders", ["vendor_id"], unique=False)
op.create_table(
"order_items",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("order_id", sa.Integer(), nullable=False),
sa.Column("product_id", sa.Integer(), nullable=False),
sa.Column("product_name", sa.String(), nullable=False),
sa.Column("product_sku", sa.String(), nullable=True),
sa.Column("quantity", sa.Integer(), nullable=False),
sa.Column("unit_price", sa.Float(), nullable=False),
sa.Column("total_price", sa.Float(), nullable=False),
sa.Column("inventory_reserved", sa.Boolean(), nullable=True),
sa.Column("inventory_fulfilled", sa.Boolean(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["order_id"],
["orders.id"],
),
sa.ForeignKeyConstraint(
["product_id"],
["products.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_order_items_id"), "order_items", ["id"], unique=False)
op.create_index(
op.f("ix_order_items_order_id"), "order_items", ["order_id"], unique=False
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_order_items_order_id"), table_name="order_items")
op.drop_index(op.f("ix_order_items_id"), table_name="order_items")
op.drop_table("order_items")
op.drop_index(op.f("ix_orders_vendor_id"), table_name="orders")
op.drop_index(op.f("ix_orders_status"), table_name="orders")
op.drop_index(op.f("ix_orders_order_number"), table_name="orders")
op.drop_index(op.f("ix_orders_id"), table_name="orders")
op.drop_index(op.f("ix_orders_customer_id"), table_name="orders")
op.drop_table("orders")
op.drop_index(op.f("ix_vendor_users_id"), table_name="vendor_users")
op.drop_table("vendor_users")
op.drop_index(op.f("ix_inventory_vendor_id"), table_name="inventory")
op.drop_index(op.f("ix_inventory_product_id"), table_name="inventory")
op.drop_index(op.f("ix_inventory_location"), table_name="inventory")
op.drop_index(op.f("ix_inventory_id"), table_name="inventory")
op.drop_index(op.f("ix_inventory_gtin"), table_name="inventory")
op.drop_index("idx_inventory_vendor_product", table_name="inventory")
op.drop_index("idx_inventory_product_location", table_name="inventory")
op.drop_table("inventory")
op.drop_index(op.f("ix_customer_addresses_id"), table_name="customer_addresses")
op.drop_table("customer_addresses")
op.drop_index(op.f("ix_vendor_themes_id"), table_name="vendor_themes")
op.drop_table("vendor_themes")
op.drop_index(op.f("ix_vendor_domains_id"), table_name="vendor_domains")
op.drop_index(op.f("ix_vendor_domains_domain"), table_name="vendor_domains")
op.drop_index("idx_vendor_primary", table_name="vendor_domains")
op.drop_index("idx_domain_active", table_name="vendor_domains")
op.drop_table("vendor_domains")
op.drop_index(op.f("ix_roles_id"), table_name="roles")
op.drop_table("roles")
op.drop_index(op.f("ix_products_id"), table_name="products")
op.drop_index("idx_product_featured", table_name="products")
op.drop_index("idx_product_active", table_name="products")
op.drop_table("products")
op.drop_index(
op.f("ix_marketplace_import_jobs_vendor_id"),
table_name="marketplace_import_jobs",
)
op.drop_index(
op.f("ix_marketplace_import_jobs_marketplace"),
table_name="marketplace_import_jobs",
)
op.drop_index(
op.f("ix_marketplace_import_jobs_id"), table_name="marketplace_import_jobs"
)
op.drop_index("idx_import_vendor_status", table_name="marketplace_import_jobs")
op.drop_index("idx_import_vendor_created", table_name="marketplace_import_jobs")
op.drop_index("idx_import_user_marketplace", table_name="marketplace_import_jobs")
op.drop_table("marketplace_import_jobs")
op.drop_index(op.f("ix_customers_id"), table_name="customers")
op.drop_index(op.f("ix_customers_email"), table_name="customers")
op.drop_index(op.f("ix_customers_customer_number"), table_name="customers")
op.drop_table("customers")
op.drop_index(op.f("ix_vendors_vendor_code"), table_name="vendors")
op.drop_index(op.f("ix_vendors_subdomain"), table_name="vendors")
op.drop_index(op.f("ix_vendors_id"), table_name="vendors")
op.drop_table("vendors")
op.drop_index(op.f("ix_platform_alerts_severity"), table_name="platform_alerts")
op.drop_index(op.f("ix_platform_alerts_is_resolved"), table_name="platform_alerts")
op.drop_index(op.f("ix_platform_alerts_id"), table_name="platform_alerts")
op.drop_index(op.f("ix_platform_alerts_alert_type"), table_name="platform_alerts")
op.drop_table("platform_alerts")
op.drop_index(op.f("ix_admin_settings_key"), table_name="admin_settings")
op.drop_index(op.f("ix_admin_settings_id"), table_name="admin_settings")
op.drop_index(op.f("ix_admin_settings_category"), table_name="admin_settings")
op.drop_table("admin_settings")
op.drop_index(op.f("ix_admin_sessions_session_token"), table_name="admin_sessions")
op.drop_index(op.f("ix_admin_sessions_login_at"), table_name="admin_sessions")
op.drop_index(op.f("ix_admin_sessions_is_active"), table_name="admin_sessions")
op.drop_index(op.f("ix_admin_sessions_id"), table_name="admin_sessions")
op.drop_index(op.f("ix_admin_sessions_admin_user_id"), table_name="admin_sessions")
op.drop_table("admin_sessions")
op.drop_index(op.f("ix_admin_notifications_type"), table_name="admin_notifications")
op.drop_index(
op.f("ix_admin_notifications_priority"), table_name="admin_notifications"
)
op.drop_index(
op.f("ix_admin_notifications_is_read"), table_name="admin_notifications"
)
op.drop_index(op.f("ix_admin_notifications_id"), table_name="admin_notifications")
op.drop_index(
op.f("ix_admin_notifications_action_required"), table_name="admin_notifications"
)
op.drop_table("admin_notifications")
op.drop_index(
op.f("ix_admin_audit_logs_target_type"), table_name="admin_audit_logs"
)
op.drop_index(op.f("ix_admin_audit_logs_target_id"), table_name="admin_audit_logs")
op.drop_index(op.f("ix_admin_audit_logs_id"), table_name="admin_audit_logs")
op.drop_index(
op.f("ix_admin_audit_logs_admin_user_id"), table_name="admin_audit_logs"
)
op.drop_index(op.f("ix_admin_audit_logs_action"), table_name="admin_audit_logs")
op.drop_table("admin_audit_logs")
op.drop_index(op.f("ix_users_username"), table_name="users")
op.drop_index(op.f("ix_users_id"), table_name="users")
op.drop_index(op.f("ix_users_email"), table_name="users")
op.drop_table("users")
op.drop_index(
op.f("ix_marketplace_products_vendor_name"), table_name="marketplace_products"
)
op.drop_index(
op.f("ix_marketplace_products_marketplace_product_id"),
table_name="marketplace_products",
)
op.drop_index(
op.f("ix_marketplace_products_marketplace"), table_name="marketplace_products"
)
op.drop_index(op.f("ix_marketplace_products_id"), table_name="marketplace_products")
op.drop_index(
op.f("ix_marketplace_products_gtin"), table_name="marketplace_products"
)
op.drop_index(
op.f("ix_marketplace_products_google_product_category"),
table_name="marketplace_products",
)
op.drop_index(
op.f("ix_marketplace_products_brand"), table_name="marketplace_products"
)
op.drop_index(
op.f("ix_marketplace_products_availability"), table_name="marketplace_products"
)
op.drop_index("idx_marketplace_vendor", table_name="marketplace_products")
op.drop_index("idx_marketplace_brand", table_name="marketplace_products")
op.drop_table("marketplace_products")
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""add_order_tracking_fields
Revision ID: 55b92e155566
Revises: d2e3f4a5b6c7
Create Date: 2025-12-20 18:07:51.144136
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '55b92e155566'
down_revision: Union[str, None] = 'd2e3f4a5b6c7'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add new tracking fields to orders table
op.add_column('orders', sa.Column('tracking_url', sa.String(length=500), nullable=True))
op.add_column('orders', sa.Column('shipment_number', sa.String(length=100), nullable=True))
op.add_column('orders', sa.Column('shipping_carrier', sa.String(length=50), nullable=True))
def downgrade() -> None:
op.drop_column('orders', 'shipping_carrier')
op.drop_column('orders', 'shipment_number')
op.drop_column('orders', 'tracking_url')

View File

@@ -0,0 +1,48 @@
"""make_vendor_owner_user_id_nullable_for_company_ownership
Revision ID: 5818330181a5
Revises: d0325d7c0f25
Create Date: 2025-12-01 20:30:06.158027
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '5818330181a5'
down_revision: Union[str, None] = 'd0325d7c0f25'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""
Make vendor.owner_user_id nullable to support company-level ownership.
Architecture Change:
- OLD: Each vendor has its own owner (vendor.owner_user_id)
- NEW: Vendors belong to a company, company has one owner (company.owner_user_id)
This allows one company owner to manage multiple vendor brands.
"""
# Use batch operations for SQLite compatibility
with op.batch_alter_table('vendors', schema=None) as batch_op:
batch_op.alter_column('owner_user_id',
existing_type=sa.INTEGER(),
nullable=True)
def downgrade() -> None:
"""
Revert vendor.owner_user_id to non-nullable.
WARNING: This will fail if there are vendors without owner_user_id!
"""
# Use batch operations for SQLite compatibility
with op.batch_alter_table('vendors', schema=None) as batch_op:
batch_op.alter_column('owner_user_id',
existing_type=sa.INTEGER(),
nullable=False)

View File

@@ -0,0 +1,76 @@
"""Ensure content_pages table with all columns
Revision ID: 72aa309d4007
Revises: fef1d20ce8b4
Create Date: 2025-11-22 15:16:13.213613
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "72aa309d4007"
down_revision: Union[str, None] = "fef1d20ce8b4"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"content_pages",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=True),
sa.Column("slug", sa.String(length=100), nullable=False),
sa.Column("title", sa.String(length=200), nullable=False),
sa.Column("content", sa.Text(), nullable=False),
sa.Column("content_format", sa.String(length=20), nullable=True),
sa.Column("meta_description", sa.String(length=300), nullable=True),
sa.Column("meta_keywords", sa.String(length=300), nullable=True),
sa.Column("is_published", sa.Boolean(), nullable=False),
sa.Column("published_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("display_order", sa.Integer(), nullable=True),
sa.Column("show_in_footer", sa.Boolean(), nullable=True),
sa.Column("show_in_header", sa.Boolean(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("created_by", sa.Integer(), nullable=True),
sa.Column("updated_by", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["created_by"], ["users.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["updated_by"], ["users.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("vendor_id", "slug", name="uq_vendor_slug"),
)
op.create_index(
"idx_slug_published", "content_pages", ["slug", "is_published"], unique=False
)
op.create_index(
"idx_vendor_published",
"content_pages",
["vendor_id", "is_published"],
unique=False,
)
op.create_index(op.f("ix_content_pages_id"), "content_pages", ["id"], unique=False)
op.create_index(
op.f("ix_content_pages_slug"), "content_pages", ["slug"], unique=False
)
op.create_index(
op.f("ix_content_pages_vendor_id"), "content_pages", ["vendor_id"], unique=False
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_content_pages_vendor_id"), table_name="content_pages")
op.drop_index(op.f("ix_content_pages_slug"), table_name="content_pages")
op.drop_index(op.f("ix_content_pages_id"), table_name="content_pages")
op.drop_index("idx_vendor_published", table_name="content_pages")
op.drop_index("idx_slug_published", table_name="content_pages")
op.drop_table("content_pages")
# ### end Alembic commands ###

View File

@@ -0,0 +1,291 @@
"""add_architecture_quality_tracking_tables
Revision ID: 7a7ce92593d5
Revises: a2064e1dfcd4
Create Date: 2025-11-28 09:21:16.545203
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "7a7ce92593d5"
down_revision: Union[str, None] = "a2064e1dfcd4"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create architecture_scans table
op.create_table(
"architecture_scans",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"timestamp",
sa.DateTime(timezone=True),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.Column("total_files", sa.Integer(), nullable=True),
sa.Column("total_violations", sa.Integer(), nullable=True),
sa.Column("errors", sa.Integer(), nullable=True),
sa.Column("warnings", sa.Integer(), nullable=True),
sa.Column("duration_seconds", sa.Float(), nullable=True),
sa.Column("triggered_by", sa.String(length=100), nullable=True),
sa.Column("git_commit_hash", sa.String(length=40), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_architecture_scans_id"), "architecture_scans", ["id"], unique=False
)
op.create_index(
op.f("ix_architecture_scans_timestamp"),
"architecture_scans",
["timestamp"],
unique=False,
)
# Create architecture_rules table
op.create_table(
"architecture_rules",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("rule_id", sa.String(length=20), nullable=False),
sa.Column("category", sa.String(length=50), nullable=False),
sa.Column("name", sa.String(length=200), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("severity", sa.String(length=10), nullable=False),
sa.Column("enabled", sa.Boolean(), nullable=False, server_default="1"),
sa.Column("custom_config", sa.JSON(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("rule_id"),
)
op.create_index(
op.f("ix_architecture_rules_id"), "architecture_rules", ["id"], unique=False
)
op.create_index(
op.f("ix_architecture_rules_rule_id"),
"architecture_rules",
["rule_id"],
unique=True,
)
# Create architecture_violations table
op.create_table(
"architecture_violations",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("scan_id", sa.Integer(), nullable=False),
sa.Column("rule_id", sa.String(length=20), nullable=False),
sa.Column("rule_name", sa.String(length=200), nullable=False),
sa.Column("severity", sa.String(length=10), nullable=False),
sa.Column("file_path", sa.String(length=500), nullable=False),
sa.Column("line_number", sa.Integer(), nullable=False),
sa.Column("message", sa.Text(), nullable=False),
sa.Column("context", sa.Text(), nullable=True),
sa.Column("suggestion", sa.Text(), nullable=True),
sa.Column("status", sa.String(length=20), server_default="open", nullable=True),
sa.Column("assigned_to", sa.Integer(), nullable=True),
sa.Column("resolved_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("resolved_by", sa.Integer(), nullable=True),
sa.Column("resolution_note", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.ForeignKeyConstraint(
["assigned_to"],
["users.id"],
),
sa.ForeignKeyConstraint(
["resolved_by"],
["users.id"],
),
sa.ForeignKeyConstraint(
["scan_id"],
["architecture_scans.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_architecture_violations_file_path"),
"architecture_violations",
["file_path"],
unique=False,
)
op.create_index(
op.f("ix_architecture_violations_id"),
"architecture_violations",
["id"],
unique=False,
)
op.create_index(
op.f("ix_architecture_violations_rule_id"),
"architecture_violations",
["rule_id"],
unique=False,
)
op.create_index(
op.f("ix_architecture_violations_scan_id"),
"architecture_violations",
["scan_id"],
unique=False,
)
op.create_index(
op.f("ix_architecture_violations_severity"),
"architecture_violations",
["severity"],
unique=False,
)
op.create_index(
op.f("ix_architecture_violations_status"),
"architecture_violations",
["status"],
unique=False,
)
# Create violation_assignments table
op.create_table(
"violation_assignments",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("violation_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column(
"assigned_at",
sa.DateTime(timezone=True),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.Column("assigned_by", sa.Integer(), nullable=True),
sa.Column("due_date", sa.DateTime(timezone=True), nullable=True),
sa.Column(
"priority", sa.String(length=10), server_default="medium", nullable=True
),
sa.ForeignKeyConstraint(
["assigned_by"],
["users.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.ForeignKeyConstraint(
["violation_id"],
["architecture_violations.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_violation_assignments_id"),
"violation_assignments",
["id"],
unique=False,
)
op.create_index(
op.f("ix_violation_assignments_violation_id"),
"violation_assignments",
["violation_id"],
unique=False,
)
# Create violation_comments table
op.create_table(
"violation_comments",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("violation_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("comment", sa.Text(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.ForeignKeyConstraint(
["violation_id"],
["architecture_violations.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_violation_comments_id"), "violation_comments", ["id"], unique=False
)
op.create_index(
op.f("ix_violation_comments_violation_id"),
"violation_comments",
["violation_id"],
unique=False,
)
def downgrade() -> None:
# Drop tables in reverse order (to respect foreign key constraints)
op.drop_index(
op.f("ix_violation_comments_violation_id"), table_name="violation_comments"
)
op.drop_index(op.f("ix_violation_comments_id"), table_name="violation_comments")
op.drop_table("violation_comments")
op.drop_index(
op.f("ix_violation_assignments_violation_id"),
table_name="violation_assignments",
)
op.drop_index(
op.f("ix_violation_assignments_id"), table_name="violation_assignments"
)
op.drop_table("violation_assignments")
op.drop_index(
op.f("ix_architecture_violations_status"), table_name="architecture_violations"
)
op.drop_index(
op.f("ix_architecture_violations_severity"),
table_name="architecture_violations",
)
op.drop_index(
op.f("ix_architecture_violations_scan_id"), table_name="architecture_violations"
)
op.drop_index(
op.f("ix_architecture_violations_rule_id"), table_name="architecture_violations"
)
op.drop_index(
op.f("ix_architecture_violations_id"), table_name="architecture_violations"
)
op.drop_index(
op.f("ix_architecture_violations_file_path"),
table_name="architecture_violations",
)
op.drop_table("architecture_violations")
op.drop_index(
op.f("ix_architecture_rules_rule_id"), table_name="architecture_rules"
)
op.drop_index(op.f("ix_architecture_rules_id"), table_name="architecture_rules")
op.drop_table("architecture_rules")
op.drop_index(
op.f("ix_architecture_scans_timestamp"), table_name="architecture_scans"
)
op.drop_index(op.f("ix_architecture_scans_id"), table_name="architecture_scans")
op.drop_table("architecture_scans")

View File

@@ -0,0 +1,103 @@
"""add_test_run_tables
Revision ID: 82ea1b4a3ccb
Revises: b4c5d6e7f8a9
Create Date: 2025-12-12 22:48:09.501172
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '82ea1b4a3ccb'
down_revision: Union[str, None] = 'b4c5d6e7f8a9'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create test_collections table
op.create_table('test_collections',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('total_tests', sa.Integer(), nullable=True),
sa.Column('total_files', sa.Integer(), nullable=True),
sa.Column('total_classes', sa.Integer(), nullable=True),
sa.Column('unit_tests', sa.Integer(), nullable=True),
sa.Column('integration_tests', sa.Integer(), nullable=True),
sa.Column('performance_tests', sa.Integer(), nullable=True),
sa.Column('system_tests', sa.Integer(), nullable=True),
sa.Column('test_files', sa.JSON(), nullable=True),
sa.Column('collected_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_test_collections_id'), 'test_collections', ['id'], unique=False)
# Create test_runs table
op.create_table('test_runs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('total_tests', sa.Integer(), nullable=True),
sa.Column('passed', sa.Integer(), nullable=True),
sa.Column('failed', sa.Integer(), nullable=True),
sa.Column('errors', sa.Integer(), nullable=True),
sa.Column('skipped', sa.Integer(), nullable=True),
sa.Column('xfailed', sa.Integer(), nullable=True),
sa.Column('xpassed', sa.Integer(), nullable=True),
sa.Column('coverage_percent', sa.Float(), nullable=True),
sa.Column('duration_seconds', sa.Float(), nullable=True),
sa.Column('triggered_by', sa.String(length=100), nullable=True),
sa.Column('git_commit_hash', sa.String(length=40), nullable=True),
sa.Column('git_branch', sa.String(length=100), nullable=True),
sa.Column('test_path', sa.String(length=500), nullable=True),
sa.Column('pytest_args', sa.String(length=500), nullable=True),
sa.Column('status', sa.String(length=20), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_test_runs_id'), 'test_runs', ['id'], unique=False)
op.create_index(op.f('ix_test_runs_status'), 'test_runs', ['status'], unique=False)
op.create_index(op.f('ix_test_runs_timestamp'), 'test_runs', ['timestamp'], unique=False)
# Create test_results table
op.create_table('test_results',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('run_id', sa.Integer(), nullable=False),
sa.Column('node_id', sa.String(length=500), nullable=False),
sa.Column('test_name', sa.String(length=200), nullable=False),
sa.Column('test_file', sa.String(length=300), nullable=False),
sa.Column('test_class', sa.String(length=200), nullable=True),
sa.Column('outcome', sa.String(length=20), nullable=False),
sa.Column('duration_seconds', sa.Float(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('traceback', sa.Text(), nullable=True),
sa.Column('markers', sa.JSON(), nullable=True),
sa.Column('parameters', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['run_id'], ['test_runs.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_test_results_id'), 'test_results', ['id'], unique=False)
op.create_index(op.f('ix_test_results_node_id'), 'test_results', ['node_id'], unique=False)
op.create_index(op.f('ix_test_results_outcome'), 'test_results', ['outcome'], unique=False)
op.create_index(op.f('ix_test_results_run_id'), 'test_results', ['run_id'], unique=False)
def downgrade() -> None:
# Drop test_results table first (has foreign key to test_runs)
op.drop_index(op.f('ix_test_results_run_id'), table_name='test_results')
op.drop_index(op.f('ix_test_results_outcome'), table_name='test_results')
op.drop_index(op.f('ix_test_results_node_id'), table_name='test_results')
op.drop_index(op.f('ix_test_results_id'), table_name='test_results')
op.drop_table('test_results')
# Drop test_runs table
op.drop_index(op.f('ix_test_runs_timestamp'), table_name='test_runs')
op.drop_index(op.f('ix_test_runs_status'), table_name='test_runs')
op.drop_index(op.f('ix_test_runs_id'), table_name='test_runs')
op.drop_table('test_runs')
# Drop test_collections table
op.drop_index(op.f('ix_test_collections_id'), table_name='test_collections')
op.drop_table('test_collections')

View File

@@ -0,0 +1,44 @@
"""add marketplace import errors table
Revision ID: 91d02647efae
Revises: 987b4ecfa503
Create Date: 2025-12-13 13:13:46.969503
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '91d02647efae'
down_revision: Union[str, None] = '987b4ecfa503'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create marketplace_import_errors table to store detailed import error information
op.create_table('marketplace_import_errors',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('import_job_id', sa.Integer(), nullable=False),
sa.Column('row_number', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(), nullable=True),
sa.Column('error_type', sa.String(length=50), nullable=False),
sa.Column('error_message', sa.Text(), nullable=False),
sa.Column('row_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['import_job_id'], ['marketplace_import_jobs.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_import_error_job_id', 'marketplace_import_errors', ['import_job_id'], unique=False)
op.create_index('idx_import_error_type', 'marketplace_import_errors', ['error_type'], unique=False)
op.create_index(op.f('ix_marketplace_import_errors_id'), 'marketplace_import_errors', ['id'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_marketplace_import_errors_id'), table_name='marketplace_import_errors')
op.drop_index('idx_import_error_type', table_name='marketplace_import_errors')
op.drop_index('idx_import_error_job_id', table_name='marketplace_import_errors')
op.drop_table('marketplace_import_errors')

View File

@@ -0,0 +1,179 @@
"""add_letzshop_integration_tables
Revision ID: 987b4ecfa503
Revises: 82ea1b4a3ccb
Create Date: 2025-12-13
This migration adds:
- vendor_letzshop_credentials: Per-vendor encrypted API key storage
- letzshop_orders: Track imported orders with external IDs
- letzshop_fulfillment_queue: Queue outbound operations with retry
- letzshop_sync_logs: Audit trail for sync operations
- Adds channel fields to orders table for multi-marketplace support
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '987b4ecfa503'
down_revision: Union[str, None] = '82ea1b4a3ccb'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add channel fields to orders table
op.add_column('orders', sa.Column('channel', sa.String(length=50), nullable=True, server_default='direct'))
op.add_column('orders', sa.Column('external_order_id', sa.String(length=100), nullable=True))
op.add_column('orders', sa.Column('external_channel_data', sa.JSON(), nullable=True))
op.create_index(op.f('ix_orders_channel'), 'orders', ['channel'], unique=False)
op.create_index(op.f('ix_orders_external_order_id'), 'orders', ['external_order_id'], unique=False)
# Create vendor_letzshop_credentials table
op.create_table('vendor_letzshop_credentials',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('api_key_encrypted', sa.Text(), nullable=False),
sa.Column('api_endpoint', sa.String(length=255), server_default='https://letzshop.lu/graphql', nullable=True),
sa.Column('auto_sync_enabled', sa.Boolean(), server_default='0', nullable=True),
sa.Column('sync_interval_minutes', sa.Integer(), server_default='15', nullable=True),
sa.Column('last_sync_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_sync_status', sa.String(length=50), nullable=True),
sa.Column('last_sync_error', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('vendor_id')
)
op.create_index(op.f('ix_vendor_letzshop_credentials_id'), 'vendor_letzshop_credentials', ['id'], unique=False)
op.create_index(op.f('ix_vendor_letzshop_credentials_vendor_id'), 'vendor_letzshop_credentials', ['vendor_id'], unique=True)
# Create letzshop_orders table
op.create_table('letzshop_orders',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('letzshop_order_id', sa.String(length=100), nullable=False),
sa.Column('letzshop_shipment_id', sa.String(length=100), nullable=True),
sa.Column('letzshop_order_number', sa.String(length=100), nullable=True),
sa.Column('local_order_id', sa.Integer(), nullable=True),
sa.Column('letzshop_state', sa.String(length=50), nullable=True),
sa.Column('customer_email', sa.String(length=255), nullable=True),
sa.Column('customer_name', sa.String(length=255), nullable=True),
sa.Column('total_amount', sa.String(length=50), nullable=True),
sa.Column('currency', sa.String(length=10), server_default='EUR', nullable=True),
sa.Column('raw_order_data', sa.JSON(), nullable=True),
sa.Column('inventory_units', sa.JSON(), nullable=True),
sa.Column('sync_status', sa.String(length=50), server_default='pending', nullable=True),
sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('tracking_set_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('tracking_number', sa.String(length=100), nullable=True),
sa.Column('tracking_carrier', sa.String(length=100), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['local_order_id'], ['orders.id'], ),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_letzshop_orders_id'), 'letzshop_orders', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_orders_letzshop_order_id'), 'letzshop_orders', ['letzshop_order_id'], unique=False)
op.create_index(op.f('ix_letzshop_orders_letzshop_shipment_id'), 'letzshop_orders', ['letzshop_shipment_id'], unique=False)
op.create_index(op.f('ix_letzshop_orders_vendor_id'), 'letzshop_orders', ['vendor_id'], unique=False)
op.create_index('idx_letzshop_order_vendor', 'letzshop_orders', ['vendor_id', 'letzshop_order_id'], unique=False)
op.create_index('idx_letzshop_order_state', 'letzshop_orders', ['vendor_id', 'letzshop_state'], unique=False)
op.create_index('idx_letzshop_order_sync', 'letzshop_orders', ['vendor_id', 'sync_status'], unique=False)
# Create letzshop_fulfillment_queue table
op.create_table('letzshop_fulfillment_queue',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('letzshop_order_id', sa.Integer(), nullable=False),
sa.Column('operation', sa.String(length=50), nullable=False),
sa.Column('payload', sa.JSON(), nullable=False),
sa.Column('status', sa.String(length=50), server_default='pending', nullable=True),
sa.Column('attempts', sa.Integer(), server_default='0', nullable=True),
sa.Column('max_attempts', sa.Integer(), server_default='3', nullable=True),
sa.Column('last_attempt_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_retry_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('response_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['letzshop_order_id'], ['letzshop_orders.id'], ),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_letzshop_fulfillment_queue_id'), 'letzshop_fulfillment_queue', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), 'letzshop_fulfillment_queue', ['vendor_id'], unique=False)
op.create_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue', ['status', 'vendor_id'], unique=False)
op.create_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue', ['status', 'next_retry_at'], unique=False)
# Create letzshop_sync_logs table
op.create_table('letzshop_sync_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('operation_type', sa.String(length=50), nullable=False),
sa.Column('direction', sa.String(length=10), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('records_processed', sa.Integer(), server_default='0', nullable=True),
sa.Column('records_succeeded', sa.Integer(), server_default='0', nullable=True),
sa.Column('records_failed', sa.Integer(), server_default='0', nullable=True),
sa.Column('error_details', sa.JSON(), nullable=True),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('duration_seconds', sa.Integer(), nullable=True),
sa.Column('triggered_by', sa.String(length=100), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_letzshop_sync_logs_id'), 'letzshop_sync_logs', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_sync_logs_vendor_id'), 'letzshop_sync_logs', ['vendor_id'], unique=False)
op.create_index('idx_sync_log_vendor_type', 'letzshop_sync_logs', ['vendor_id', 'operation_type'], unique=False)
op.create_index('idx_sync_log_vendor_date', 'letzshop_sync_logs', ['vendor_id', 'started_at'], unique=False)
def downgrade() -> None:
# Drop letzshop_sync_logs table
op.drop_index('idx_sync_log_vendor_date', table_name='letzshop_sync_logs')
op.drop_index('idx_sync_log_vendor_type', table_name='letzshop_sync_logs')
op.drop_index(op.f('ix_letzshop_sync_logs_vendor_id'), table_name='letzshop_sync_logs')
op.drop_index(op.f('ix_letzshop_sync_logs_id'), table_name='letzshop_sync_logs')
op.drop_table('letzshop_sync_logs')
# Drop letzshop_fulfillment_queue table
op.drop_index('idx_fulfillment_queue_retry', table_name='letzshop_fulfillment_queue')
op.drop_index('idx_fulfillment_queue_status', table_name='letzshop_fulfillment_queue')
op.drop_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), table_name='letzshop_fulfillment_queue')
op.drop_index(op.f('ix_letzshop_fulfillment_queue_id'), table_name='letzshop_fulfillment_queue')
op.drop_table('letzshop_fulfillment_queue')
# Drop letzshop_orders table
op.drop_index('idx_letzshop_order_sync', table_name='letzshop_orders')
op.drop_index('idx_letzshop_order_state', table_name='letzshop_orders')
op.drop_index('idx_letzshop_order_vendor', table_name='letzshop_orders')
op.drop_index(op.f('ix_letzshop_orders_vendor_id'), table_name='letzshop_orders')
op.drop_index(op.f('ix_letzshop_orders_letzshop_shipment_id'), table_name='letzshop_orders')
op.drop_index(op.f('ix_letzshop_orders_letzshop_order_id'), table_name='letzshop_orders')
op.drop_index(op.f('ix_letzshop_orders_id'), table_name='letzshop_orders')
op.drop_table('letzshop_orders')
# Drop vendor_letzshop_credentials table
op.drop_index(op.f('ix_vendor_letzshop_credentials_vendor_id'), table_name='vendor_letzshop_credentials')
op.drop_index(op.f('ix_vendor_letzshop_credentials_id'), table_name='vendor_letzshop_credentials')
op.drop_table('vendor_letzshop_credentials')
# Drop channel fields from orders table
op.drop_index(op.f('ix_orders_external_order_id'), table_name='orders')
op.drop_index(op.f('ix_orders_channel'), table_name='orders')
op.drop_column('orders', 'external_channel_data')
op.drop_column('orders', 'external_order_id')
op.drop_column('orders', 'channel')

View File

@@ -0,0 +1,60 @@
"""remove_vendor_owner_user_id_column
Revision ID: 9f3a25ea4991
Revises: 5818330181a5
Create Date: 2025-12-02 17:58:45.663338
This migration removes the owner_user_id column from the vendors table.
Architecture Change:
- OLD: Each vendor had its own owner (vendor.owner_user_id)
- NEW: Vendors belong to a company, company has one owner (company.owner_user_id)
The vendor ownership is now determined via the company relationship:
- vendor.company.owner_user_id contains the owner
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '9f3a25ea4991'
down_revision: Union[str, None] = '5818330181a5'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""
Remove owner_user_id column from vendors table.
Ownership is now determined via the company relationship.
Note: SQLite batch mode recreates the table without the column,
so we don't need to explicitly drop constraints.
"""
with op.batch_alter_table('vendors', schema=None) as batch_op:
# Drop the column - batch mode handles constraints automatically
batch_op.drop_column('owner_user_id')
def downgrade() -> None:
"""
Re-add owner_user_id column to vendors table.
WARNING: This will add the column back but NOT restore the data.
You will need to manually populate owner_user_id from company.owner_user_id
if reverting this migration.
"""
with op.batch_alter_table('vendors', schema=None) as batch_op:
batch_op.add_column(
sa.Column('owner_user_id', sa.Integer(), nullable=True)
)
batch_op.create_foreign_key(
'vendors_owner_user_id_fkey',
'users',
['owner_user_id'],
['id']
)

View File

@@ -0,0 +1,67 @@
"""add cart_items table
Revision ID: a2064e1dfcd4
Revises: f68d8da5315a
Create Date: 2025-11-23 19:52:40.509538
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "a2064e1dfcd4"
down_revision: Union[str, None] = "f68d8da5315a"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create cart_items table
op.create_table(
"cart_items",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("product_id", sa.Integer(), nullable=False),
sa.Column("session_id", sa.String(length=255), nullable=False),
sa.Column("quantity", sa.Integer(), nullable=False),
sa.Column("price_at_add", sa.Float(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(
["product_id"],
["products.id"],
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"vendor_id", "session_id", "product_id", name="uq_cart_item"
),
)
# Create indexes
op.create_index(
"idx_cart_session", "cart_items", ["vendor_id", "session_id"], unique=False
)
op.create_index("idx_cart_created", "cart_items", ["created_at"], unique=False)
op.create_index(op.f("ix_cart_items_id"), "cart_items", ["id"], unique=False)
op.create_index(
op.f("ix_cart_items_session_id"), "cart_items", ["session_id"], unique=False
)
def downgrade() -> None:
# Drop indexes
op.drop_index(op.f("ix_cart_items_session_id"), table_name="cart_items")
op.drop_index(op.f("ix_cart_items_id"), table_name="cart_items")
op.drop_index("idx_cart_created", table_name="cart_items")
op.drop_index("idx_cart_session", table_name="cart_items")
# Drop table
op.drop_table("cart_items")

View File

@@ -0,0 +1,128 @@
"""Add override fields to products table
Revision ID: a3b4c5d6e7f8
Revises: f2b3c4d5e6f7
Create Date: 2025-12-11
This migration:
- Renames 'product_id' to 'vendor_sku' for clarity
- Adds new override fields (brand, images, digital delivery)
- Adds vendor-specific digital fulfillment fields
- Changes relationship from one-to-one to one-to-many (same marketplace product
can be in multiple vendor catalogs)
The override pattern: NULL value means "inherit from marketplace_product".
Setting a value creates a vendor-specific override.
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "a3b4c5d6e7f8"
down_revision: Union[str, None] = "f2b3c4d5e6f7"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Use batch mode for SQLite compatibility
with op.batch_alter_table("products", schema=None) as batch_op:
# Rename product_id to vendor_sku for clarity
batch_op.alter_column(
"product_id",
new_column_name="vendor_sku",
)
# Add new override fields
op.add_column(
"products",
sa.Column("brand", sa.String(), nullable=True),
)
op.add_column(
"products",
sa.Column("primary_image_url", sa.String(), nullable=True),
)
op.add_column(
"products",
sa.Column("additional_images", sa.JSON(), nullable=True),
)
# Add digital product override fields
op.add_column(
"products",
sa.Column("download_url", sa.String(), nullable=True),
)
op.add_column(
"products",
sa.Column("license_type", sa.String(50), nullable=True),
)
# Add vendor-specific digital fulfillment settings
op.add_column(
"products",
sa.Column("fulfillment_email_template", sa.String(), nullable=True),
)
# Add supplier tracking (for products sourced from CodesWholesale, etc.)
op.add_column(
"products",
sa.Column("supplier", sa.String(50), nullable=True),
)
op.add_column(
"products",
sa.Column("supplier_product_id", sa.String(), nullable=True),
)
op.add_column(
"products",
sa.Column("supplier_cost", sa.Float(), nullable=True),
)
# Add margin/markup tracking
op.add_column(
"products",
sa.Column("margin_percent", sa.Float(), nullable=True),
)
# Create index for vendor_sku
op.create_index(
"idx_product_vendor_sku",
"products",
["vendor_id", "vendor_sku"],
)
# Create index for supplier queries
op.create_index(
"idx_product_supplier",
"products",
["supplier", "supplier_product_id"],
)
def downgrade() -> None:
# Drop indexes
op.drop_index("idx_product_supplier", table_name="products")
op.drop_index("idx_product_vendor_sku", table_name="products")
# Drop new columns
op.drop_column("products", "margin_percent")
op.drop_column("products", "supplier_cost")
op.drop_column("products", "supplier_product_id")
op.drop_column("products", "supplier")
op.drop_column("products", "fulfillment_email_template")
op.drop_column("products", "license_type")
op.drop_column("products", "download_url")
op.drop_column("products", "additional_images")
op.drop_column("products", "primary_image_url")
op.drop_column("products", "brand")
# Use batch mode for SQLite compatibility
with op.batch_alter_table("products", schema=None) as batch_op:
# Rename vendor_sku back to product_id
batch_op.alter_column(
"vendor_sku",
new_column_name="product_id",
)

View File

@@ -0,0 +1,31 @@
"""add_letzshop_order_locale_and_country_fields
Revision ID: a9a86cef6cca
Revises: fcfdc02d5138
Create Date: 2025-12-17 20:55:41.477848
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'a9a86cef6cca'
down_revision: Union[str, None] = 'fcfdc02d5138'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add new columns to letzshop_orders for customer locale and country
op.add_column('letzshop_orders', sa.Column('customer_locale', sa.String(length=10), nullable=True))
op.add_column('letzshop_orders', sa.Column('shipping_country_iso', sa.String(length=5), nullable=True))
op.add_column('letzshop_orders', sa.Column('billing_country_iso', sa.String(length=5), nullable=True))
def downgrade() -> None:
op.drop_column('letzshop_orders', 'billing_country_iso')
op.drop_column('letzshop_orders', 'shipping_country_iso')
op.drop_column('letzshop_orders', 'customer_locale')

View File

@@ -0,0 +1,30 @@
"""add language column to marketplace_import_jobs
Revision ID: b412e0b49c2e
Revises: 91d02647efae
Create Date: 2025-12-13 13:35:46.524893
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'b412e0b49c2e'
down_revision: Union[str, None] = '91d02647efae'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add language column with default value for existing rows
op.add_column(
'marketplace_import_jobs',
sa.Column('language', sa.String(length=5), nullable=False, server_default='en')
)
def downgrade() -> None:
op.drop_column('marketplace_import_jobs', 'language')

View File

@@ -0,0 +1,132 @@
"""Migrate existing product data to translation tables
Revision ID: b4c5d6e7f8a9
Revises: a3b4c5d6e7f8
Create Date: 2025-12-11
This migration:
1. Copies existing title/description from marketplace_products to
marketplace_product_translations (default language: 'en')
2. Parses existing price strings to numeric values
3. Removes the old title/description columns from marketplace_products
Since we're not live yet, we can safely remove the old columns
after migrating the data to the new structure.
"""
import re
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy import text
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "b4c5d6e7f8a9"
down_revision: Union[str, None] = "a3b4c5d6e7f8"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def parse_price(price_str: str) -> float | None:
"""Parse price string like '19.99 EUR' to float."""
if not price_str:
return None
# Extract numeric value
numbers = re.findall(r"[\d.,]+", str(price_str))
if numbers:
num_str = numbers[0].replace(",", ".")
try:
return float(num_str)
except ValueError:
pass
return None
def upgrade() -> None:
conn = op.get_bind()
# Step 1: Migrate existing title/description to translations table
# Default language is 'en' for existing data
conn.execute(
text("""
INSERT INTO marketplace_product_translations
(marketplace_product_id, language, title, description, created_at, updated_at)
SELECT
id,
'en',
title,
description,
created_at,
updated_at
FROM marketplace_products
WHERE title IS NOT NULL
""")
)
# Step 2: Parse prices to numeric values
# Get all marketplace products with prices
result = conn.execute(
text("SELECT id, price, sale_price FROM marketplace_products")
)
for row in result:
price_numeric = parse_price(row.price) if row.price else None
sale_price_numeric = parse_price(row.sale_price) if row.sale_price else None
if price_numeric is not None or sale_price_numeric is not None:
conn.execute(
text("""
UPDATE marketplace_products
SET price_numeric = :price_numeric,
sale_price_numeric = :sale_price_numeric
WHERE id = :id
"""),
{
"id": row.id,
"price_numeric": price_numeric,
"sale_price_numeric": sale_price_numeric,
},
)
# Step 3: Since we're not live, remove the old title/description columns
# from marketplace_products (data is now in translations table)
op.drop_column("marketplace_products", "title")
op.drop_column("marketplace_products", "description")
def downgrade() -> None:
# Re-add title and description columns
op.add_column(
"marketplace_products",
sa.Column("title", sa.String(), nullable=True),
)
op.add_column(
"marketplace_products",
sa.Column("description", sa.String(), nullable=True),
)
# Copy data back from translations (only 'en' translations)
conn = op.get_bind()
conn.execute(
text("""
UPDATE marketplace_products
SET title = (
SELECT title FROM marketplace_product_translations
WHERE marketplace_product_translations.marketplace_product_id = marketplace_products.id
AND marketplace_product_translations.language = 'en'
),
description = (
SELECT description FROM marketplace_product_translations
WHERE marketplace_product_translations.marketplace_product_id = marketplace_products.id
AND marketplace_product_translations.language = 'en'
)
""")
)
# Delete the migrated translations
conn.execute(
text("DELETE FROM marketplace_product_translations WHERE language = 'en'")
)

View File

@@ -0,0 +1,41 @@
"""add show_in_legal to content_pages
Revision ID: ba2c0ce78396
Revises: m1b2c3d4e5f6
Create Date: 2025-12-28 20:00:24.263518
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'ba2c0ce78396'
down_revision: Union[str, None] = 'm1b2c3d4e5f6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add show_in_legal column to content_pages table.
This column controls whether a page appears in the bottom bar
alongside the copyright notice (e.g., Privacy Policy, Terms of Service).
"""
op.add_column(
'content_pages',
sa.Column('show_in_legal', sa.Boolean(), nullable=True, default=False)
)
# Set default value for existing rows (PostgreSQL uses true/false for boolean)
op.execute("UPDATE content_pages SET show_in_legal = false WHERE show_in_legal IS NULL")
# Set privacy and terms pages to show in legal by default
op.execute("UPDATE content_pages SET show_in_legal = true WHERE slug IN ('privacy', 'terms')")
def downgrade() -> None:
"""Remove show_in_legal column from content_pages table."""
op.drop_column('content_pages', 'show_in_legal')

View File

@@ -0,0 +1,35 @@
"""add_letzshop_credentials_carrier_fields
Revision ID: c00d2985701f
Revises: 55b92e155566
Create Date: 2025-12-20 18:49:53.432904
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'c00d2985701f'
down_revision: Union[str, None] = '55b92e155566'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add carrier settings and test mode to vendor_letzshop_credentials
op.add_column('vendor_letzshop_credentials', sa.Column('test_mode_enabled', sa.Boolean(), nullable=True, server_default='0'))
op.add_column('vendor_letzshop_credentials', sa.Column('default_carrier', sa.String(length=50), nullable=True))
op.add_column('vendor_letzshop_credentials', sa.Column('carrier_greco_label_url', sa.String(length=500), nullable=True, server_default='https://dispatchweb.fr/Tracky/Home/'))
op.add_column('vendor_letzshop_credentials', sa.Column('carrier_colissimo_label_url', sa.String(length=500), nullable=True))
op.add_column('vendor_letzshop_credentials', sa.Column('carrier_xpresslogistics_label_url', sa.String(length=500), nullable=True))
def downgrade() -> None:
op.drop_column('vendor_letzshop_credentials', 'carrier_xpresslogistics_label_url')
op.drop_column('vendor_letzshop_credentials', 'carrier_colissimo_label_url')
op.drop_column('vendor_letzshop_credentials', 'carrier_greco_label_url')
op.drop_column('vendor_letzshop_credentials', 'default_carrier')
op.drop_column('vendor_letzshop_credentials', 'test_mode_enabled')

View File

@@ -0,0 +1,452 @@
"""unified_order_schema
Revision ID: c1d2e3f4a5b6
Revises: 2362c2723a93
Create Date: 2025-12-19
This migration implements the unified order schema:
- Removes the separate letzshop_orders table
- Enhances the orders table with:
- Customer/address snapshots (preserved at order time)
- External marketplace references
- Tracking provider field
- Enhances order_items with:
- GTIN fields
- External item references
- Item state for marketplace confirmation flow
- Updates letzshop_fulfillment_queue to reference orders table directly
Design principles:
- Single orders table for all channels (direct, letzshop, etc.)
- Customer/address data snapshotted at order time
- Products must exist in catalog (enforced by FK)
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision: str = 'c1d2e3f4a5b6'
down_revision: Union[str, None] = '2362c2723a93'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def table_exists(table_name: str) -> bool:
"""Check if a table exists in the database."""
bind = op.get_bind()
inspector = inspect(bind)
return table_name in inspector.get_table_names()
def index_exists(index_name: str, table_name: str) -> bool:
"""Check if an index exists on a table."""
bind = op.get_bind()
inspector = inspect(bind)
try:
indexes = inspector.get_indexes(table_name)
return any(idx['name'] == index_name for idx in indexes)
except Exception:
return False
def safe_drop_index(index_name: str, table_name: str) -> None:
"""Drop an index if it exists."""
if index_exists(index_name, table_name):
op.drop_index(index_name, table_name=table_name)
def safe_drop_table(table_name: str) -> None:
"""Drop a table if it exists."""
if table_exists(table_name):
op.drop_table(table_name)
def upgrade() -> None:
# =========================================================================
# Step 1: Drop old tables that will be replaced (if they exist)
# =========================================================================
# Drop letzshop_fulfillment_queue (references letzshop_orders)
if table_exists('letzshop_fulfillment_queue'):
safe_drop_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue')
safe_drop_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue')
safe_drop_index('ix_letzshop_fulfillment_queue_vendor_id', 'letzshop_fulfillment_queue')
safe_drop_index('ix_letzshop_fulfillment_queue_id', 'letzshop_fulfillment_queue')
op.drop_table('letzshop_fulfillment_queue')
# Drop letzshop_orders table (replaced by unified orders)
if table_exists('letzshop_orders'):
safe_drop_index('idx_letzshop_order_sync', 'letzshop_orders')
safe_drop_index('idx_letzshop_order_state', 'letzshop_orders')
safe_drop_index('idx_letzshop_order_vendor', 'letzshop_orders')
safe_drop_index('ix_letzshop_orders_vendor_id', 'letzshop_orders')
safe_drop_index('ix_letzshop_orders_letzshop_shipment_id', 'letzshop_orders')
safe_drop_index('ix_letzshop_orders_letzshop_order_id', 'letzshop_orders')
safe_drop_index('ix_letzshop_orders_id', 'letzshop_orders')
op.drop_table('letzshop_orders')
# Drop order_items (references orders)
if table_exists('order_items'):
safe_drop_index('ix_order_items_id', 'order_items')
safe_drop_index('ix_order_items_order_id', 'order_items')
op.drop_table('order_items')
# Drop old orders table
if table_exists('orders'):
safe_drop_index('ix_orders_external_order_id', 'orders')
safe_drop_index('ix_orders_channel', 'orders')
safe_drop_index('ix_orders_vendor_id', 'orders')
safe_drop_index('ix_orders_status', 'orders')
safe_drop_index('ix_orders_order_number', 'orders')
safe_drop_index('ix_orders_id', 'orders')
safe_drop_index('ix_orders_customer_id', 'orders')
op.drop_table('orders')
# =========================================================================
# Step 2: Create new unified orders table
# =========================================================================
op.create_table('orders',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('customer_id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(length=100), nullable=False),
# Channel/Source
sa.Column('channel', sa.String(length=50), nullable=False, server_default='direct'),
# External references (for marketplace orders)
sa.Column('external_order_id', sa.String(length=100), nullable=True),
sa.Column('external_shipment_id', sa.String(length=100), nullable=True),
sa.Column('external_order_number', sa.String(length=100), nullable=True),
sa.Column('external_data', sa.JSON(), nullable=True),
# Status
sa.Column('status', sa.String(length=50), nullable=False, server_default='pending'),
# Financials
sa.Column('subtotal', sa.Float(), nullable=True),
sa.Column('tax_amount', sa.Float(), nullable=True),
sa.Column('shipping_amount', sa.Float(), nullable=True),
sa.Column('discount_amount', sa.Float(), nullable=True),
sa.Column('total_amount', sa.Float(), nullable=False),
sa.Column('currency', sa.String(length=10), server_default='EUR', nullable=True),
# Customer snapshot
sa.Column('customer_first_name', sa.String(length=100), nullable=False),
sa.Column('customer_last_name', sa.String(length=100), nullable=False),
sa.Column('customer_email', sa.String(length=255), nullable=False),
sa.Column('customer_phone', sa.String(length=50), nullable=True),
sa.Column('customer_locale', sa.String(length=10), nullable=True),
# Shipping address snapshot
sa.Column('ship_first_name', sa.String(length=100), nullable=False),
sa.Column('ship_last_name', sa.String(length=100), nullable=False),
sa.Column('ship_company', sa.String(length=200), nullable=True),
sa.Column('ship_address_line_1', sa.String(length=255), nullable=False),
sa.Column('ship_address_line_2', sa.String(length=255), nullable=True),
sa.Column('ship_city', sa.String(length=100), nullable=False),
sa.Column('ship_postal_code', sa.String(length=20), nullable=False),
sa.Column('ship_country_iso', sa.String(length=5), nullable=False),
# Billing address snapshot
sa.Column('bill_first_name', sa.String(length=100), nullable=False),
sa.Column('bill_last_name', sa.String(length=100), nullable=False),
sa.Column('bill_company', sa.String(length=200), nullable=True),
sa.Column('bill_address_line_1', sa.String(length=255), nullable=False),
sa.Column('bill_address_line_2', sa.String(length=255), nullable=True),
sa.Column('bill_city', sa.String(length=100), nullable=False),
sa.Column('bill_postal_code', sa.String(length=20), nullable=False),
sa.Column('bill_country_iso', sa.String(length=5), nullable=False),
# Tracking
sa.Column('shipping_method', sa.String(length=100), nullable=True),
sa.Column('tracking_number', sa.String(length=100), nullable=True),
sa.Column('tracking_provider', sa.String(length=100), nullable=True),
# Notes
sa.Column('customer_notes', sa.Text(), nullable=True),
sa.Column('internal_notes', sa.Text(), nullable=True),
# Timestamps
sa.Column('order_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('shipped_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('delivered_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cancelled_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
# Foreign keys
sa.ForeignKeyConstraint(['customer_id'], ['customers.id']),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.PrimaryKeyConstraint('id')
)
# Indexes for orders
op.create_index(op.f('ix_orders_id'), 'orders', ['id'], unique=False)
op.create_index(op.f('ix_orders_vendor_id'), 'orders', ['vendor_id'], unique=False)
op.create_index(op.f('ix_orders_customer_id'), 'orders', ['customer_id'], unique=False)
op.create_index(op.f('ix_orders_order_number'), 'orders', ['order_number'], unique=True)
op.create_index(op.f('ix_orders_channel'), 'orders', ['channel'], unique=False)
op.create_index(op.f('ix_orders_status'), 'orders', ['status'], unique=False)
op.create_index(op.f('ix_orders_external_order_id'), 'orders', ['external_order_id'], unique=False)
op.create_index(op.f('ix_orders_external_shipment_id'), 'orders', ['external_shipment_id'], unique=False)
op.create_index('idx_order_vendor_status', 'orders', ['vendor_id', 'status'], unique=False)
op.create_index('idx_order_vendor_channel', 'orders', ['vendor_id', 'channel'], unique=False)
op.create_index('idx_order_vendor_date', 'orders', ['vendor_id', 'order_date'], unique=False)
# =========================================================================
# Step 3: Create new order_items table
# =========================================================================
op.create_table('order_items',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=False),
# Product snapshot
sa.Column('product_name', sa.String(length=255), nullable=False),
sa.Column('product_sku', sa.String(length=100), nullable=True),
sa.Column('gtin', sa.String(length=50), nullable=True),
sa.Column('gtin_type', sa.String(length=20), nullable=True),
# Pricing
sa.Column('quantity', sa.Integer(), nullable=False),
sa.Column('unit_price', sa.Float(), nullable=False),
sa.Column('total_price', sa.Float(), nullable=False),
# External references (for marketplace items)
sa.Column('external_item_id', sa.String(length=100), nullable=True),
sa.Column('external_variant_id', sa.String(length=100), nullable=True),
# Item state (for marketplace confirmation flow)
sa.Column('item_state', sa.String(length=50), nullable=True),
# Inventory tracking
sa.Column('inventory_reserved', sa.Boolean(), server_default='0', nullable=True),
sa.Column('inventory_fulfilled', sa.Boolean(), server_default='0', nullable=True),
# Timestamps
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
# Foreign keys
sa.ForeignKeyConstraint(['order_id'], ['orders.id']),
sa.ForeignKeyConstraint(['product_id'], ['products.id']),
sa.PrimaryKeyConstraint('id')
)
# Indexes for order_items
op.create_index(op.f('ix_order_items_id'), 'order_items', ['id'], unique=False)
op.create_index(op.f('ix_order_items_order_id'), 'order_items', ['order_id'], unique=False)
op.create_index(op.f('ix_order_items_product_id'), 'order_items', ['product_id'], unique=False)
op.create_index(op.f('ix_order_items_gtin'), 'order_items', ['gtin'], unique=False)
# =========================================================================
# Step 4: Create updated letzshop_fulfillment_queue (references orders)
# =========================================================================
op.create_table('letzshop_fulfillment_queue',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('order_id', sa.Integer(), nullable=False),
# Operation type
sa.Column('operation', sa.String(length=50), nullable=False),
# Operation payload
sa.Column('payload', sa.JSON(), nullable=False),
# Status and retry
sa.Column('status', sa.String(length=50), server_default='pending', nullable=True),
sa.Column('attempts', sa.Integer(), server_default='0', nullable=True),
sa.Column('max_attempts', sa.Integer(), server_default='3', nullable=True),
sa.Column('last_attempt_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_retry_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
# Response from Letzshop
sa.Column('response_data', sa.JSON(), nullable=True),
# Timestamps
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
# Foreign keys
sa.ForeignKeyConstraint(['order_id'], ['orders.id']),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.PrimaryKeyConstraint('id')
)
# Indexes for letzshop_fulfillment_queue
op.create_index(op.f('ix_letzshop_fulfillment_queue_id'), 'letzshop_fulfillment_queue', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), 'letzshop_fulfillment_queue', ['vendor_id'], unique=False)
op.create_index(op.f('ix_letzshop_fulfillment_queue_order_id'), 'letzshop_fulfillment_queue', ['order_id'], unique=False)
op.create_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue', ['status', 'vendor_id'], unique=False)
op.create_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue', ['status', 'next_retry_at'], unique=False)
op.create_index('idx_fulfillment_queue_order', 'letzshop_fulfillment_queue', ['order_id'], unique=False)
def downgrade() -> None:
# Drop new letzshop_fulfillment_queue
safe_drop_index('idx_fulfillment_queue_order', 'letzshop_fulfillment_queue')
safe_drop_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue')
safe_drop_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue')
safe_drop_index('ix_letzshop_fulfillment_queue_order_id', 'letzshop_fulfillment_queue')
safe_drop_index('ix_letzshop_fulfillment_queue_vendor_id', 'letzshop_fulfillment_queue')
safe_drop_index('ix_letzshop_fulfillment_queue_id', 'letzshop_fulfillment_queue')
safe_drop_table('letzshop_fulfillment_queue')
# Drop new order_items
safe_drop_index('ix_order_items_gtin', 'order_items')
safe_drop_index('ix_order_items_product_id', 'order_items')
safe_drop_index('ix_order_items_order_id', 'order_items')
safe_drop_index('ix_order_items_id', 'order_items')
safe_drop_table('order_items')
# Drop new orders
safe_drop_index('idx_order_vendor_date', 'orders')
safe_drop_index('idx_order_vendor_channel', 'orders')
safe_drop_index('idx_order_vendor_status', 'orders')
safe_drop_index('ix_orders_external_shipment_id', 'orders')
safe_drop_index('ix_orders_external_order_id', 'orders')
safe_drop_index('ix_orders_status', 'orders')
safe_drop_index('ix_orders_channel', 'orders')
safe_drop_index('ix_orders_order_number', 'orders')
safe_drop_index('ix_orders_customer_id', 'orders')
safe_drop_index('ix_orders_vendor_id', 'orders')
safe_drop_index('ix_orders_id', 'orders')
safe_drop_table('orders')
# Recreate old orders table
op.create_table('orders',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('customer_id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=False),
sa.Column('channel', sa.String(length=50), nullable=True, server_default='direct'),
sa.Column('external_order_id', sa.String(length=100), nullable=True),
sa.Column('external_channel_data', sa.JSON(), nullable=True),
sa.Column('status', sa.String(), nullable=False),
sa.Column('subtotal', sa.Float(), nullable=False),
sa.Column('tax_amount', sa.Float(), nullable=True),
sa.Column('shipping_amount', sa.Float(), nullable=True),
sa.Column('discount_amount', sa.Float(), nullable=True),
sa.Column('total_amount', sa.Float(), nullable=False),
sa.Column('currency', sa.String(), nullable=True),
sa.Column('shipping_address_id', sa.Integer(), nullable=False),
sa.Column('billing_address_id', sa.Integer(), nullable=False),
sa.Column('shipping_method', sa.String(), nullable=True),
sa.Column('tracking_number', sa.String(), nullable=True),
sa.Column('customer_notes', sa.Text(), nullable=True),
sa.Column('internal_notes', sa.Text(), nullable=True),
sa.Column('paid_at', sa.DateTime(), nullable=True),
sa.Column('shipped_at', sa.DateTime(), nullable=True),
sa.Column('delivered_at', sa.DateTime(), nullable=True),
sa.Column('cancelled_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['billing_address_id'], ['customer_addresses.id']),
sa.ForeignKeyConstraint(['customer_id'], ['customers.id']),
sa.ForeignKeyConstraint(['shipping_address_id'], ['customer_addresses.id']),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_orders_customer_id'), 'orders', ['customer_id'], unique=False)
op.create_index(op.f('ix_orders_id'), 'orders', ['id'], unique=False)
op.create_index(op.f('ix_orders_order_number'), 'orders', ['order_number'], unique=True)
op.create_index(op.f('ix_orders_status'), 'orders', ['status'], unique=False)
op.create_index(op.f('ix_orders_vendor_id'), 'orders', ['vendor_id'], unique=False)
op.create_index(op.f('ix_orders_channel'), 'orders', ['channel'], unique=False)
op.create_index(op.f('ix_orders_external_order_id'), 'orders', ['external_order_id'], unique=False)
# Recreate old order_items table
op.create_table('order_items',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=False),
sa.Column('product_name', sa.String(), nullable=False),
sa.Column('product_sku', sa.String(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=False),
sa.Column('unit_price', sa.Float(), nullable=False),
sa.Column('total_price', sa.Float(), nullable=False),
sa.Column('inventory_reserved', sa.Boolean(), nullable=True),
sa.Column('inventory_fulfilled', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['order_id'], ['orders.id']),
sa.ForeignKeyConstraint(['product_id'], ['products.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_order_items_id'), 'order_items', ['id'], unique=False)
op.create_index(op.f('ix_order_items_order_id'), 'order_items', ['order_id'], unique=False)
# Recreate old letzshop_orders table
op.create_table('letzshop_orders',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('letzshop_order_id', sa.String(length=100), nullable=False),
sa.Column('letzshop_shipment_id', sa.String(length=100), nullable=True),
sa.Column('letzshop_order_number', sa.String(length=100), nullable=True),
sa.Column('local_order_id', sa.Integer(), nullable=True),
sa.Column('letzshop_state', sa.String(length=50), nullable=True),
sa.Column('customer_email', sa.String(length=255), nullable=True),
sa.Column('customer_name', sa.String(length=255), nullable=True),
sa.Column('total_amount', sa.String(length=50), nullable=True),
sa.Column('currency', sa.String(length=10), server_default='EUR', nullable=True),
sa.Column('customer_locale', sa.String(length=10), nullable=True),
sa.Column('shipping_country_iso', sa.String(length=5), nullable=True),
sa.Column('billing_country_iso', sa.String(length=5), nullable=True),
sa.Column('order_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('raw_order_data', sa.JSON(), nullable=True),
sa.Column('inventory_units', sa.JSON(), nullable=True),
sa.Column('sync_status', sa.String(length=50), server_default='pending', nullable=True),
sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sync_error', sa.Text(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('tracking_set_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('tracking_number', sa.String(length=100), nullable=True),
sa.Column('tracking_carrier', sa.String(length=100), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['local_order_id'], ['orders.id']),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_letzshop_orders_id'), 'letzshop_orders', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_orders_letzshop_order_id'), 'letzshop_orders', ['letzshop_order_id'], unique=False)
op.create_index(op.f('ix_letzshop_orders_letzshop_shipment_id'), 'letzshop_orders', ['letzshop_shipment_id'], unique=False)
op.create_index(op.f('ix_letzshop_orders_vendor_id'), 'letzshop_orders', ['vendor_id'], unique=False)
op.create_index('idx_letzshop_order_vendor', 'letzshop_orders', ['vendor_id', 'letzshop_order_id'], unique=False)
op.create_index('idx_letzshop_order_state', 'letzshop_orders', ['vendor_id', 'letzshop_state'], unique=False)
op.create_index('idx_letzshop_order_sync', 'letzshop_orders', ['vendor_id', 'sync_status'], unique=False)
# Recreate old letzshop_fulfillment_queue table
op.create_table('letzshop_fulfillment_queue',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('letzshop_order_id', sa.Integer(), nullable=False),
sa.Column('operation', sa.String(length=50), nullable=False),
sa.Column('payload', sa.JSON(), nullable=False),
sa.Column('status', sa.String(length=50), server_default='pending', nullable=True),
sa.Column('attempts', sa.Integer(), server_default='0', nullable=True),
sa.Column('max_attempts', sa.Integer(), server_default='3', nullable=True),
sa.Column('last_attempt_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_retry_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('response_data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['letzshop_order_id'], ['letzshop_orders.id']),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_letzshop_fulfillment_queue_id'), 'letzshop_fulfillment_queue', ['id'], unique=False)
op.create_index(op.f('ix_letzshop_fulfillment_queue_vendor_id'), 'letzshop_fulfillment_queue', ['vendor_id'], unique=False)
op.create_index('idx_fulfillment_queue_status', 'letzshop_fulfillment_queue', ['status', 'vendor_id'], unique=False)
op.create_index('idx_fulfillment_queue_retry', 'letzshop_fulfillment_queue', ['status', 'next_retry_at'], unique=False)

View File

@@ -0,0 +1,64 @@
"""add_tax_rate_cost_and_letzshop_settings
Revision ID: c9e22eadf533
Revises: e1f2a3b4c5d6
Create Date: 2025-12-20 21:13:30.709696
Adds:
- tax_rate_percent to products and marketplace_products (NOT NULL, default 17)
- cost_cents to products (for profit calculation)
- Letzshop feed settings to vendors (tax_rate, boost_sort, delivery_method, preorder_days)
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'c9e22eadf533'
down_revision: Union[str, None] = 'e1f2a3b4c5d6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# === MARKETPLACE PRODUCTS: Add tax_rate_percent ===
with op.batch_alter_table('marketplace_products', schema=None) as batch_op:
batch_op.add_column(sa.Column('tax_rate_percent', sa.Integer(), nullable=False, server_default='17'))
# === PRODUCTS: Add tax_rate_percent and cost_cents, rename supplier_cost_cents ===
with op.batch_alter_table('products', schema=None) as batch_op:
batch_op.add_column(sa.Column('tax_rate_percent', sa.Integer(), nullable=False, server_default='17'))
batch_op.add_column(sa.Column('cost_cents', sa.Integer(), nullable=True))
# Drop old supplier_cost_cents column (data migrated to cost_cents if needed)
try:
batch_op.drop_column('supplier_cost_cents')
except Exception:
pass # Column may not exist
# === VENDORS: Add Letzshop feed settings ===
with op.batch_alter_table('vendors', schema=None) as batch_op:
batch_op.add_column(sa.Column('letzshop_default_tax_rate', sa.Integer(), nullable=False, server_default='17'))
batch_op.add_column(sa.Column('letzshop_boost_sort', sa.String(length=10), nullable=True, server_default='5.0'))
batch_op.add_column(sa.Column('letzshop_delivery_method', sa.String(length=100), nullable=True, server_default='package_delivery'))
batch_op.add_column(sa.Column('letzshop_preorder_days', sa.Integer(), nullable=True, server_default='1'))
def downgrade() -> None:
# === VENDORS: Remove Letzshop feed settings ===
with op.batch_alter_table('vendors', schema=None) as batch_op:
batch_op.drop_column('letzshop_preorder_days')
batch_op.drop_column('letzshop_delivery_method')
batch_op.drop_column('letzshop_boost_sort')
batch_op.drop_column('letzshop_default_tax_rate')
# === PRODUCTS: Remove tax_rate_percent and cost_cents ===
with op.batch_alter_table('products', schema=None) as batch_op:
batch_op.drop_column('cost_cents')
batch_op.drop_column('tax_rate_percent')
batch_op.add_column(sa.Column('supplier_cost_cents', sa.Integer(), nullable=True))
# === MARKETPLACE PRODUCTS: Remove tax_rate_percent ===
with op.batch_alter_table('marketplace_products', schema=None) as batch_op:
batch_op.drop_column('tax_rate_percent')

View File

@@ -0,0 +1,37 @@
"""add_gtin_columns_to_product_table
Revision ID: cb88bc9b5f86
Revises: a9a86cef6cca
Create Date: 2025-12-18 20:54:55.185857
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'cb88bc9b5f86'
down_revision: Union[str, None] = 'a9a86cef6cca'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add GTIN (EAN/UPC barcode) columns to products table for order EAN matching
# gtin: The barcode number (e.g., "0889698273022")
# gtin_type: The format type from Letzshop (e.g., "gtin13", "gtin14", "isbn13")
op.add_column('products', sa.Column('gtin', sa.String(length=50), nullable=True))
op.add_column('products', sa.Column('gtin_type', sa.String(length=20), nullable=True))
# Add index for EAN lookups during order matching
op.create_index('idx_product_gtin', 'products', ['gtin'], unique=False)
op.create_index('idx_product_vendor_gtin', 'products', ['vendor_id', 'gtin'], unique=False)
def downgrade() -> None:
op.drop_index('idx_product_vendor_gtin', table_name='products')
op.drop_index('idx_product_gtin', table_name='products')
op.drop_column('products', 'gtin_type')
op.drop_column('products', 'gtin')

View File

@@ -0,0 +1,77 @@
"""add_companies_table_and_restructure_vendors
Revision ID: d0325d7c0f25
Revises: 0bd9ffaaced1
Create Date: 2025-11-30 14:58:17.165142
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'd0325d7c0f25'
down_revision: Union[str, None] = '0bd9ffaaced1'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create companies table
op.create_table(
'companies',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('owner_user_id', sa.Integer(), nullable=False),
sa.Column('contact_email', sa.String(), nullable=False),
sa.Column('contact_phone', sa.String(), nullable=True),
sa.Column('website', sa.String(), nullable=True),
sa.Column('business_address', sa.Text(), nullable=True),
sa.Column('tax_number', sa.String(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
sa.Column('is_verified', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()),
sa.ForeignKeyConstraint(['owner_user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_companies_id'), 'companies', ['id'], unique=False)
op.create_index(op.f('ix_companies_name'), 'companies', ['name'], unique=False)
# Use batch mode for SQLite to modify vendors table
with op.batch_alter_table('vendors', schema=None) as batch_op:
# Add company_id column
batch_op.add_column(sa.Column('company_id', sa.Integer(), nullable=True))
batch_op.create_index(batch_op.f('ix_vendors_company_id'), ['company_id'], unique=False)
batch_op.create_foreign_key('fk_vendors_company_id', 'companies', ['company_id'], ['id'])
# Remove old contact fields
batch_op.drop_column('contact_email')
batch_op.drop_column('contact_phone')
batch_op.drop_column('website')
batch_op.drop_column('business_address')
batch_op.drop_column('tax_number')
def downgrade() -> None:
# Use batch mode for SQLite to modify vendors table
with op.batch_alter_table('vendors', schema=None) as batch_op:
# Re-add contact fields to vendors
batch_op.add_column(sa.Column('tax_number', sa.String(), nullable=True))
batch_op.add_column(sa.Column('business_address', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('website', sa.String(), nullable=True))
batch_op.add_column(sa.Column('contact_phone', sa.String(), nullable=True))
batch_op.add_column(sa.Column('contact_email', sa.String(), nullable=True))
# Remove company_id from vendors
batch_op.drop_constraint('fk_vendors_company_id', type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_vendors_company_id'))
batch_op.drop_column('company_id')
# Drop companies table
op.drop_index(op.f('ix_companies_name'), table_name='companies')
op.drop_index(op.f('ix_companies_id'), table_name='companies')
op.drop_table('companies')

View File

@@ -0,0 +1,179 @@
"""add_order_item_exceptions
Revision ID: d2e3f4a5b6c7
Revises: c1d2e3f4a5b6
Create Date: 2025-12-20
This migration adds the Order Item Exception system:
- Adds needs_product_match column to order_items table
- Creates order_item_exceptions table for tracking unmatched products
The exception system allows marketplace orders to be imported even when
products are not found by GTIN. Items are linked to a placeholder product
and exceptions are tracked for QC resolution.
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision: str = 'd2e3f4a5b6c7'
down_revision: Union[str, None] = 'c1d2e3f4a5b6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def column_exists(table_name: str, column_name: str) -> bool:
"""Check if a column exists in a table."""
bind = op.get_bind()
inspector = inspect(bind)
columns = [col['name'] for col in inspector.get_columns(table_name)]
return column_name in columns
def table_exists(table_name: str) -> bool:
"""Check if a table exists in the database."""
bind = op.get_bind()
inspector = inspect(bind)
return table_name in inspector.get_table_names()
def index_exists(index_name: str, table_name: str) -> bool:
"""Check if an index exists on a table."""
bind = op.get_bind()
inspector = inspect(bind)
try:
indexes = inspector.get_indexes(table_name)
return any(idx['name'] == index_name for idx in indexes)
except Exception:
return False
def upgrade() -> None:
# =========================================================================
# Step 1: Add needs_product_match column to order_items
# =========================================================================
if not column_exists('order_items', 'needs_product_match'):
op.add_column(
'order_items',
sa.Column(
'needs_product_match',
sa.Boolean(),
server_default='0',
nullable=False
)
)
if not index_exists('ix_order_items_needs_product_match', 'order_items'):
op.create_index(
'ix_order_items_needs_product_match',
'order_items',
['needs_product_match']
)
# =========================================================================
# Step 2: Create order_item_exceptions table
# =========================================================================
if not table_exists('order_item_exceptions'):
op.create_table(
'order_item_exceptions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_item_id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
sa.Column('original_gtin', sa.String(length=50), nullable=True),
sa.Column('original_product_name', sa.String(length=500), nullable=True),
sa.Column('original_sku', sa.String(length=100), nullable=True),
sa.Column(
'exception_type',
sa.String(length=50),
nullable=False,
server_default='product_not_found'
),
sa.Column(
'status',
sa.String(length=50),
nullable=False,
server_default='pending'
),
sa.Column('resolved_product_id', sa.Integer(), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolved_by', sa.Integer(), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=False
),
sa.Column(
'updated_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=False
),
sa.ForeignKeyConstraint(
['order_item_id'],
['order_items.id'],
ondelete='CASCADE'
),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.ForeignKeyConstraint(['resolved_product_id'], ['products.id']),
sa.ForeignKeyConstraint(['resolved_by'], ['users.id']),
sa.PrimaryKeyConstraint('id')
)
# Create indexes
op.create_index(
'ix_order_item_exceptions_id',
'order_item_exceptions',
['id']
)
op.create_index(
'ix_order_item_exceptions_vendor_id',
'order_item_exceptions',
['vendor_id']
)
op.create_index(
'ix_order_item_exceptions_status',
'order_item_exceptions',
['status']
)
op.create_index(
'idx_exception_vendor_status',
'order_item_exceptions',
['vendor_id', 'status']
)
op.create_index(
'idx_exception_gtin',
'order_item_exceptions',
['vendor_id', 'original_gtin']
)
# Unique constraint on order_item_id (one exception per item)
op.create_index(
'uq_order_item_exception',
'order_item_exceptions',
['order_item_id'],
unique=True
)
def downgrade() -> None:
# Drop order_item_exceptions table
if table_exists('order_item_exceptions'):
op.drop_index('uq_order_item_exception', table_name='order_item_exceptions')
op.drop_index('idx_exception_gtin', table_name='order_item_exceptions')
op.drop_index('idx_exception_vendor_status', table_name='order_item_exceptions')
op.drop_index('ix_order_item_exceptions_status', table_name='order_item_exceptions')
op.drop_index('ix_order_item_exceptions_vendor_id', table_name='order_item_exceptions')
op.drop_index('ix_order_item_exceptions_id', table_name='order_item_exceptions')
op.drop_table('order_item_exceptions')
# Remove needs_product_match column from order_items
if column_exists('order_items', 'needs_product_match'):
if index_exists('ix_order_items_needs_product_match', 'order_items'):
op.drop_index('ix_order_items_needs_product_match', table_name='order_items')
op.drop_column('order_items', 'needs_product_match')

View File

@@ -0,0 +1,332 @@
"""add email templates and logs tables
Revision ID: d7a4a3f06394
Revises: 404b3e2d2865
Create Date: 2025-12-27 20:48:00.661523
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision: str = 'd7a4a3f06394'
down_revision: Union[str, None] = '404b3e2d2865'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create email_templates table
op.create_table('email_templates',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=100), nullable=False),
sa.Column('language', sa.String(length=5), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(length=50), nullable=False),
sa.Column('subject', sa.String(length=500), nullable=False),
sa.Column('body_html', sa.Text(), nullable=False),
sa.Column('body_text', sa.Text(), nullable=True),
sa.Column('variables', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_email_templates_category'), 'email_templates', ['category'], unique=False)
op.create_index(op.f('ix_email_templates_code'), 'email_templates', ['code'], unique=False)
op.create_index(op.f('ix_email_templates_id'), 'email_templates', ['id'], unique=False)
# Create email_logs table
op.create_table('email_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('template_code', sa.String(length=100), nullable=True),
sa.Column('template_id', sa.Integer(), nullable=True),
sa.Column('recipient_email', sa.String(length=255), nullable=False),
sa.Column('recipient_name', sa.String(length=255), nullable=True),
sa.Column('subject', sa.String(length=500), nullable=False),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('body_text', sa.Text(), nullable=True),
sa.Column('from_email', sa.String(length=255), nullable=False),
sa.Column('from_name', sa.String(length=255), nullable=True),
sa.Column('reply_to', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('sent_at', sa.DateTime(), nullable=True),
sa.Column('delivered_at', sa.DateTime(), nullable=True),
sa.Column('opened_at', sa.DateTime(), nullable=True),
sa.Column('clicked_at', sa.DateTime(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('provider', sa.String(length=50), nullable=True),
sa.Column('provider_message_id', sa.String(length=255), nullable=True),
sa.Column('vendor_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('related_type', sa.String(length=50), nullable=True),
sa.Column('related_id', sa.Integer(), nullable=True),
sa.Column('extra_data', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['template_id'], ['email_templates.id']),
sa.ForeignKeyConstraint(['user_id'], ['users.id']),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_email_logs_id'), 'email_logs', ['id'], unique=False)
op.create_index(op.f('ix_email_logs_provider_message_id'), 'email_logs', ['provider_message_id'], unique=False)
op.create_index(op.f('ix_email_logs_recipient_email'), 'email_logs', ['recipient_email'], unique=False)
op.create_index(op.f('ix_email_logs_status'), 'email_logs', ['status'], unique=False)
op.create_index(op.f('ix_email_logs_template_code'), 'email_logs', ['template_code'], unique=False)
op.create_index(op.f('ix_email_logs_user_id'), 'email_logs', ['user_id'], unique=False)
op.create_index(op.f('ix_email_logs_vendor_id'), 'email_logs', ['vendor_id'], unique=False)
# application_logs - alter columns
op.alter_column('application_logs', 'created_at', existing_type=sa.DATETIME(), nullable=False)
op.alter_column('application_logs', 'updated_at', existing_type=sa.DATETIME(), nullable=False)
# capacity_snapshots indexes (PostgreSQL IF EXISTS/IF NOT EXISTS)
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_date"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_capacity_snapshots_date ON capacity_snapshots (snapshot_date)"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_snapshot_date ON capacity_snapshots (snapshot_date)"))
# cart_items - alter columns
op.alter_column('cart_items', 'created_at', existing_type=sa.DATETIME(), nullable=False)
op.alter_column('cart_items', 'updated_at', existing_type=sa.DATETIME(), nullable=False)
# customer_addresses index rename
op.execute(text("DROP INDEX IF EXISTS ix_customers_addresses_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_customer_addresses_id ON customer_addresses (id)"))
# inventory - alter columns and constraints
op.alter_column('inventory', 'warehouse', existing_type=sa.VARCHAR(), nullable=False)
op.alter_column('inventory', 'bin_location', existing_type=sa.VARCHAR(), nullable=False)
op.alter_column('inventory', 'location', existing_type=sa.VARCHAR(), nullable=True)
op.execute(text("DROP INDEX IF EXISTS idx_inventory_product_location"))
op.execute(text("ALTER TABLE inventory DROP CONSTRAINT IF EXISTS uq_inventory_product_location"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_inventory_product_warehouse_bin') THEN
ALTER TABLE inventory ADD CONSTRAINT uq_inventory_product_warehouse_bin UNIQUE (product_id, warehouse, bin_location);
END IF;
END $$;
"""))
# marketplace_import_errors and translations indexes
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_import_errors_import_job_id ON marketplace_import_errors (import_job_id)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_product_translations_id ON marketplace_product_translations (id)"))
# marketplace_products - alter columns
op.alter_column('marketplace_products', 'is_digital', existing_type=sa.BOOLEAN(), nullable=True)
op.alter_column('marketplace_products', 'is_active', existing_type=sa.BOOLEAN(), nullable=True)
# marketplace_products indexes
op.execute(text("DROP INDEX IF EXISTS idx_mp_is_active"))
op.execute(text("DROP INDEX IF EXISTS idx_mp_platform"))
op.execute(text("DROP INDEX IF EXISTS idx_mp_sku"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_is_active ON marketplace_products (is_active)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_is_digital ON marketplace_products (is_digital)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_mpn ON marketplace_products (mpn)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_platform ON marketplace_products (platform)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_sku ON marketplace_products (sku)"))
# order_item_exceptions - constraints and indexes
op.execute(text("DROP INDEX IF EXISTS uq_order_item_exception"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_item_exceptions_original_gtin ON order_item_exceptions (original_gtin)"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_order_item_exceptions_order_item_id') THEN
ALTER TABLE order_item_exceptions ADD CONSTRAINT uq_order_item_exceptions_order_item_id UNIQUE (order_item_id);
END IF;
END $$;
"""))
# order_items - alter column
op.alter_column('order_items', 'needs_product_match', existing_type=sa.BOOLEAN(), nullable=True)
# order_items indexes
op.execute(text("DROP INDEX IF EXISTS ix_order_items_gtin"))
op.execute(text("DROP INDEX IF EXISTS ix_order_items_product_id"))
# product_translations index
op.execute(text("CREATE INDEX IF NOT EXISTS ix_product_translations_id ON product_translations (id)"))
# products indexes
op.execute(text("DROP INDEX IF EXISTS idx_product_active"))
op.execute(text("DROP INDEX IF EXISTS idx_product_featured"))
op.execute(text("DROP INDEX IF EXISTS idx_product_gtin"))
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_gtin"))
# products constraint
op.execute(text("ALTER TABLE products DROP CONSTRAINT IF EXISTS uq_product"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_vendor_marketplace_product') THEN
ALTER TABLE products ADD CONSTRAINT uq_vendor_marketplace_product UNIQUE (vendor_id, marketplace_product_id);
END IF;
END $$;
"""))
# products new indexes
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_active ON products (vendor_id, is_active)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_featured ON products (vendor_id, is_featured)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_products_gtin ON products (gtin)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_products_vendor_sku ON products (vendor_sku)"))
# vendor_domains indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendors_domains_domain"))
op.execute(text("DROP INDEX IF EXISTS ix_vendors_domains_id"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_vendor_domains_domain ON vendor_domains (domain)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_domains_id ON vendor_domains (id)"))
# vendor_subscriptions - alter column and FK
op.alter_column('vendor_subscriptions', 'payment_retry_count', existing_type=sa.INTEGER(), nullable=False)
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_vendor_subscriptions_tier_id') THEN
ALTER TABLE vendor_subscriptions ADD CONSTRAINT fk_vendor_subscriptions_tier_id
FOREIGN KEY (tier_id) REFERENCES subscription_tiers(id);
END IF;
END $$;
"""))
# vendor_themes indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendors_themes_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_themes_id ON vendor_themes (id)"))
# vendor_users indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendors_users_id"))
op.execute(text("DROP INDEX IF EXISTS ix_vendors_users_invitation_token"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_users_id ON vendor_users (id)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_users_invitation_token ON vendor_users (invitation_token)"))
# vendors - alter column
op.alter_column('vendors', 'company_id', existing_type=sa.INTEGER(), nullable=False)
def downgrade() -> None:
# vendors
op.alter_column('vendors', 'company_id', existing_type=sa.INTEGER(), nullable=True)
# vendor_users indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendor_users_invitation_token"))
op.execute(text("DROP INDEX IF EXISTS ix_vendor_users_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_users_invitation_token ON vendor_users (invitation_token)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_users_id ON vendor_users (id)"))
# vendor_themes indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendor_themes_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_themes_id ON vendor_themes (id)"))
# vendor_subscriptions
op.execute(text("ALTER TABLE vendor_subscriptions DROP CONSTRAINT IF EXISTS fk_vendor_subscriptions_tier_id"))
op.alter_column('vendor_subscriptions', 'payment_retry_count', existing_type=sa.INTEGER(), nullable=True)
# vendor_domains indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendor_domains_id"))
op.execute(text("DROP INDEX IF EXISTS ix_vendor_domains_domain"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_domains_id ON vendor_domains (id)"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_vendors_domains_domain ON vendor_domains (domain)"))
# products constraint and indexes
op.execute(text("ALTER TABLE products DROP CONSTRAINT IF EXISTS uq_vendor_marketplace_product"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_product') THEN
ALTER TABLE products ADD CONSTRAINT uq_product UNIQUE (vendor_id, marketplace_product_id);
END IF;
END $$;
"""))
op.execute(text("DROP INDEX IF EXISTS ix_products_vendor_sku"))
op.execute(text("DROP INDEX IF EXISTS ix_products_gtin"))
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_featured"))
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_active"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_gtin ON products (vendor_id, gtin)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_gtin ON products (gtin)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_featured ON products (vendor_id, is_featured)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_active ON products (vendor_id, is_active)"))
# product_translations
op.execute(text("DROP INDEX IF EXISTS ix_product_translations_id"))
# order_items
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_product_id ON order_items (product_id)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_gtin ON order_items (gtin)"))
op.alter_column('order_items', 'needs_product_match', existing_type=sa.BOOLEAN(), nullable=False)
# order_item_exceptions
op.execute(text("ALTER TABLE order_item_exceptions DROP CONSTRAINT IF EXISTS uq_order_item_exceptions_order_item_id"))
op.execute(text("DROP INDEX IF EXISTS ix_order_item_exceptions_original_gtin"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS uq_order_item_exception ON order_item_exceptions (order_item_id)"))
# marketplace_products indexes
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_sku"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_platform"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_mpn"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_is_digital"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_is_active"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_sku ON marketplace_products (sku)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_platform ON marketplace_products (platform)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_is_active ON marketplace_products (is_active)"))
# marketplace_products columns
op.alter_column('marketplace_products', 'is_active', existing_type=sa.BOOLEAN(), nullable=False)
op.alter_column('marketplace_products', 'is_digital', existing_type=sa.BOOLEAN(), nullable=False)
# marketplace imports
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_product_translations_id"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_import_errors_import_job_id"))
# inventory
op.execute(text("ALTER TABLE inventory DROP CONSTRAINT IF EXISTS uq_inventory_product_warehouse_bin"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_inventory_product_location') THEN
ALTER TABLE inventory ADD CONSTRAINT uq_inventory_product_location UNIQUE (product_id, location);
END IF;
END $$;
"""))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_inventory_product_location ON inventory (product_id, location)"))
op.alter_column('inventory', 'location', existing_type=sa.VARCHAR(), nullable=False)
op.alter_column('inventory', 'bin_location', existing_type=sa.VARCHAR(), nullable=True)
op.alter_column('inventory', 'warehouse', existing_type=sa.VARCHAR(), nullable=True)
# customer_addresses
op.execute(text("DROP INDEX IF EXISTS ix_customer_addresses_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_customers_addresses_id ON customer_addresses (id)"))
# cart_items
op.alter_column('cart_items', 'updated_at', existing_type=sa.DATETIME(), nullable=True)
op.alter_column('cart_items', 'created_at', existing_type=sa.DATETIME(), nullable=True)
# capacity_snapshots
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_snapshot_date"))
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_date"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_date ON capacity_snapshots (snapshot_date)"))
# application_logs
op.alter_column('application_logs', 'updated_at', existing_type=sa.DATETIME(), nullable=True)
op.alter_column('application_logs', 'created_at', existing_type=sa.DATETIME(), nullable=True)
# Drop email tables
op.drop_index(op.f('ix_email_logs_vendor_id'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_user_id'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_template_code'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_status'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_recipient_email'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_provider_message_id'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_id'), table_name='email_logs')
op.drop_table('email_logs')
op.drop_index(op.f('ix_email_templates_id'), table_name='email_templates')
op.drop_index(op.f('ix_email_templates_code'), table_name='email_templates')
op.drop_index(op.f('ix_email_templates_category'), table_name='email_templates')
op.drop_table('email_templates')

View File

@@ -0,0 +1,204 @@
"""Add product type and digital fields to marketplace_products
Revision ID: e1a2b3c4d5e6
Revises: 28d44d503cac
Create Date: 2025-12-11
This migration adds support for:
- Product type classification (physical, digital, service, subscription)
- Digital product fields (delivery method, platform, region restrictions)
- Numeric price fields for filtering/sorting
- Additional images as JSON array
- Source URL tracking
- Flexible attributes JSON column
- Active status flag
It also renames 'product_type' to 'product_type_raw' to preserve the original
Google Shopping feed value while using 'product_type' for the new enum.
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "e1a2b3c4d5e6"
down_revision: Union[str, None] = "28d44d503cac"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Rename existing product_type column to product_type_raw
# to preserve the original Google Shopping feed value
op.alter_column(
"marketplace_products",
"product_type",
new_column_name="product_type_raw",
)
# Add new product classification columns
op.add_column(
"marketplace_products",
sa.Column(
"product_type_enum",
sa.String(20),
nullable=False,
server_default="physical",
),
)
op.add_column(
"marketplace_products",
sa.Column(
"is_digital",
sa.Boolean(),
nullable=False,
server_default=sa.text("false"),
),
)
# Add digital product specific fields
op.add_column(
"marketplace_products",
sa.Column("digital_delivery_method", sa.String(20), nullable=True),
)
op.add_column(
"marketplace_products",
sa.Column("platform", sa.String(50), nullable=True),
)
op.add_column(
"marketplace_products",
sa.Column("region_restrictions", sa.JSON(), nullable=True),
)
op.add_column(
"marketplace_products",
sa.Column("license_type", sa.String(50), nullable=True),
)
# Add source tracking
op.add_column(
"marketplace_products",
sa.Column("source_url", sa.String(), nullable=True),
)
# Add numeric price fields for filtering/sorting
op.add_column(
"marketplace_products",
sa.Column("price_numeric", sa.Float(), nullable=True),
)
op.add_column(
"marketplace_products",
sa.Column("sale_price_numeric", sa.Float(), nullable=True),
)
# Add flexible attributes JSON column
op.add_column(
"marketplace_products",
sa.Column("attributes", sa.JSON(), nullable=True),
)
# Add additional images as JSON array (complements existing additional_image_link)
op.add_column(
"marketplace_products",
sa.Column("additional_images", sa.JSON(), nullable=True),
)
# Add active status flag
op.add_column(
"marketplace_products",
sa.Column(
"is_active",
sa.Boolean(),
nullable=False,
server_default=sa.text("true"),
),
)
# Add SKU field for internal reference
op.add_column(
"marketplace_products",
sa.Column("sku", sa.String(), nullable=True),
)
# Add weight fields for physical products
op.add_column(
"marketplace_products",
sa.Column("weight", sa.Float(), nullable=True),
)
op.add_column(
"marketplace_products",
sa.Column("weight_unit", sa.String(10), nullable=True, server_default="kg"),
)
op.add_column(
"marketplace_products",
sa.Column("dimensions", sa.JSON(), nullable=True),
)
# Add category_path for normalized hierarchy
op.add_column(
"marketplace_products",
sa.Column("category_path", sa.String(), nullable=True),
)
# Create indexes for new columns
op.create_index(
"idx_mp_product_type",
"marketplace_products",
["product_type_enum", "is_digital"],
)
op.create_index(
"idx_mp_is_active",
"marketplace_products",
["is_active"],
)
op.create_index(
"idx_mp_platform",
"marketplace_products",
["platform"],
)
op.create_index(
"idx_mp_sku",
"marketplace_products",
["sku"],
)
op.create_index(
"idx_mp_gtin_marketplace",
"marketplace_products",
["gtin", "marketplace"],
)
def downgrade() -> None:
# Drop indexes
op.drop_index("idx_mp_gtin_marketplace", table_name="marketplace_products")
op.drop_index("idx_mp_sku", table_name="marketplace_products")
op.drop_index("idx_mp_platform", table_name="marketplace_products")
op.drop_index("idx_mp_is_active", table_name="marketplace_products")
op.drop_index("idx_mp_product_type", table_name="marketplace_products")
# Drop new columns
op.drop_column("marketplace_products", "category_path")
op.drop_column("marketplace_products", "dimensions")
op.drop_column("marketplace_products", "weight_unit")
op.drop_column("marketplace_products", "weight")
op.drop_column("marketplace_products", "sku")
op.drop_column("marketplace_products", "is_active")
op.drop_column("marketplace_products", "additional_images")
op.drop_column("marketplace_products", "attributes")
op.drop_column("marketplace_products", "sale_price_numeric")
op.drop_column("marketplace_products", "price_numeric")
op.drop_column("marketplace_products", "source_url")
op.drop_column("marketplace_products", "license_type")
op.drop_column("marketplace_products", "region_restrictions")
op.drop_column("marketplace_products", "platform")
op.drop_column("marketplace_products", "digital_delivery_method")
op.drop_column("marketplace_products", "is_digital")
op.drop_column("marketplace_products", "product_type_enum")
# Rename product_type_raw back to product_type
op.alter_column(
"marketplace_products",
"product_type_raw",
new_column_name="product_type",
)

View File

@@ -0,0 +1,90 @@
"""add_warehouse_and_bin_location_to_inventory
Revision ID: e1bfb453fbe9
Revises: j8e9f0a1b2c3
Create Date: 2025-12-25 12:21:24.006548
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision: str = 'e1bfb453fbe9'
down_revision: Union[str, None] = 'j8e9f0a1b2c3'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def get_column_names(conn, table_name: str) -> set:
"""Get column names for a table (PostgreSQL)."""
result = conn.execute(text(
"SELECT column_name FROM information_schema.columns "
"WHERE table_name = :table AND table_schema = 'public'"
), {"table": table_name})
return {row[0] for row in result.fetchall()}
def get_index_names(conn, table_name: str) -> set:
"""Get index names for a table (PostgreSQL)."""
result = conn.execute(text(
"SELECT indexname FROM pg_indexes "
"WHERE tablename = :table AND schemaname = 'public'"
), {"table": table_name})
return {row[0] for row in result.fetchall()}
def upgrade() -> None:
conn = op.get_bind()
# Check if columns already exist (idempotent)
columns = get_column_names(conn, "inventory")
if 'warehouse' not in columns:
op.add_column('inventory', sa.Column('warehouse', sa.String(), nullable=False, server_default='strassen'))
if 'bin_location' not in columns:
op.add_column('inventory', sa.Column('bin_location', sa.String(), nullable=False, server_default=''))
# Migrate existing data: copy location to bin_location, set default warehouse
conn.execute(text("""
UPDATE inventory
SET bin_location = COALESCE(location, 'UNKNOWN'),
warehouse = 'strassen'
WHERE bin_location IS NULL OR bin_location = ''
"""))
# Create indexes if they don't exist
existing_indexes = get_index_names(conn, "inventory")
if 'idx_inventory_warehouse_bin' not in existing_indexes:
op.create_index('idx_inventory_warehouse_bin', 'inventory', ['warehouse', 'bin_location'], unique=False)
if 'ix_inventory_bin_location' not in existing_indexes:
op.create_index(op.f('ix_inventory_bin_location'), 'inventory', ['bin_location'], unique=False)
if 'ix_inventory_warehouse' not in existing_indexes:
op.create_index(op.f('ix_inventory_warehouse'), 'inventory', ['warehouse'], unique=False)
def downgrade() -> None:
conn = op.get_bind()
# Check which indexes exist before dropping
existing_indexes = get_index_names(conn, "inventory")
if 'ix_inventory_warehouse' in existing_indexes:
op.drop_index(op.f('ix_inventory_warehouse'), table_name='inventory')
if 'ix_inventory_bin_location' in existing_indexes:
op.drop_index(op.f('ix_inventory_bin_location'), table_name='inventory')
if 'idx_inventory_warehouse_bin' in existing_indexes:
op.drop_index('idx_inventory_warehouse_bin', table_name='inventory')
# Check if columns exist before dropping
columns = get_column_names(conn, "inventory")
if 'bin_location' in columns:
op.drop_column('inventory', 'bin_location')
if 'warehouse' in columns:
op.drop_column('inventory', 'warehouse')

View File

@@ -0,0 +1,223 @@
"""convert_prices_to_integer_cents
Revision ID: e1f2a3b4c5d6
Revises: c00d2985701f
Create Date: 2025-12-20 21:30:00.000000
Converts all price/amount columns from Float to Integer cents.
This follows e-commerce best practices (Stripe, PayPal, Shopify) for
precise monetary calculations.
Example: €105.91 is stored as 10591 (integer cents)
Affected tables:
- products: price, sale_price, supplier_cost, margin_percent
- orders: subtotal, tax_amount, shipping_amount, discount_amount, total_amount
- order_items: unit_price, total_price
- cart_items: price_at_add
- marketplace_products: price_numeric, sale_price_numeric
See docs/architecture/money-handling.md for full documentation.
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'e1f2a3b4c5d6'
down_revision: Union[str, None] = 'c00d2985701f'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# SQLite requires batch mode for column alterations
# Strategy: Add new _cents columns, migrate data, drop old columns
# === PRODUCTS TABLE ===
with op.batch_alter_table('products', schema=None) as batch_op:
# Add new cents columns
batch_op.add_column(sa.Column('price_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('sale_price_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('supplier_cost_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('margin_percent_x100', sa.Integer(), nullable=True))
# Migrate data for products
op.execute('UPDATE products SET price_cents = ROUND(COALESCE(price, 0) * 100)')
op.execute('UPDATE products SET sale_price_cents = ROUND(sale_price * 100) WHERE sale_price IS NOT NULL')
op.execute('UPDATE products SET supplier_cost_cents = ROUND(supplier_cost * 100) WHERE supplier_cost IS NOT NULL')
op.execute('UPDATE products SET margin_percent_x100 = ROUND(margin_percent * 100) WHERE margin_percent IS NOT NULL')
# Drop old columns
with op.batch_alter_table('products', schema=None) as batch_op:
batch_op.drop_column('price')
batch_op.drop_column('sale_price')
batch_op.drop_column('supplier_cost')
batch_op.drop_column('margin_percent')
# === ORDERS TABLE ===
with op.batch_alter_table('orders', schema=None) as batch_op:
batch_op.add_column(sa.Column('subtotal_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('tax_amount_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('shipping_amount_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('discount_amount_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('total_amount_cents', sa.Integer(), nullable=True))
# Migrate data for orders
op.execute('UPDATE orders SET subtotal_cents = ROUND(COALESCE(subtotal, 0) * 100)')
op.execute('UPDATE orders SET tax_amount_cents = ROUND(COALESCE(tax_amount, 0) * 100)')
op.execute('UPDATE orders SET shipping_amount_cents = ROUND(COALESCE(shipping_amount, 0) * 100)')
op.execute('UPDATE orders SET discount_amount_cents = ROUND(COALESCE(discount_amount, 0) * 100)')
op.execute('UPDATE orders SET total_amount_cents = ROUND(COALESCE(total_amount, 0) * 100)')
# Make total_amount_cents NOT NULL after migration
with op.batch_alter_table('orders', schema=None) as batch_op:
batch_op.drop_column('subtotal')
batch_op.drop_column('tax_amount')
batch_op.drop_column('shipping_amount')
batch_op.drop_column('discount_amount')
batch_op.drop_column('total_amount')
# Alter total_amount_cents to be NOT NULL
batch_op.alter_column('total_amount_cents',
existing_type=sa.Integer(),
nullable=False)
# === ORDER_ITEMS TABLE ===
with op.batch_alter_table('order_items', schema=None) as batch_op:
batch_op.add_column(sa.Column('unit_price_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('total_price_cents', sa.Integer(), nullable=True))
# Migrate data for order_items
op.execute('UPDATE order_items SET unit_price_cents = ROUND(COALESCE(unit_price, 0) * 100)')
op.execute('UPDATE order_items SET total_price_cents = ROUND(COALESCE(total_price, 0) * 100)')
with op.batch_alter_table('order_items', schema=None) as batch_op:
batch_op.drop_column('unit_price')
batch_op.drop_column('total_price')
batch_op.alter_column('unit_price_cents',
existing_type=sa.Integer(),
nullable=False)
batch_op.alter_column('total_price_cents',
existing_type=sa.Integer(),
nullable=False)
# === CART_ITEMS TABLE ===
with op.batch_alter_table('cart_items', schema=None) as batch_op:
batch_op.add_column(sa.Column('price_at_add_cents', sa.Integer(), nullable=True))
# Migrate data for cart_items
op.execute('UPDATE cart_items SET price_at_add_cents = ROUND(COALESCE(price_at_add, 0) * 100)')
with op.batch_alter_table('cart_items', schema=None) as batch_op:
batch_op.drop_column('price_at_add')
batch_op.alter_column('price_at_add_cents',
existing_type=sa.Integer(),
nullable=False)
# === MARKETPLACE_PRODUCTS TABLE ===
with op.batch_alter_table('marketplace_products', schema=None) as batch_op:
batch_op.add_column(sa.Column('price_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('sale_price_cents', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('weight_grams', sa.Integer(), nullable=True))
# Migrate data for marketplace_products
op.execute('UPDATE marketplace_products SET price_cents = ROUND(price_numeric * 100) WHERE price_numeric IS NOT NULL')
op.execute('UPDATE marketplace_products SET sale_price_cents = ROUND(sale_price_numeric * 100) WHERE sale_price_numeric IS NOT NULL')
op.execute('UPDATE marketplace_products SET weight_grams = ROUND(weight * 1000) WHERE weight IS NOT NULL')
with op.batch_alter_table('marketplace_products', schema=None) as batch_op:
batch_op.drop_column('price_numeric')
batch_op.drop_column('sale_price_numeric')
batch_op.drop_column('weight')
def downgrade() -> None:
# === MARKETPLACE_PRODUCTS TABLE ===
with op.batch_alter_table('marketplace_products', schema=None) as batch_op:
batch_op.add_column(sa.Column('price_numeric', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('sale_price_numeric', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('weight', sa.Float(), nullable=True))
op.execute('UPDATE marketplace_products SET price_numeric = price_cents / 100.0 WHERE price_cents IS NOT NULL')
op.execute('UPDATE marketplace_products SET sale_price_numeric = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL')
op.execute('UPDATE marketplace_products SET weight = weight_grams / 1000.0 WHERE weight_grams IS NOT NULL')
with op.batch_alter_table('marketplace_products', schema=None) as batch_op:
batch_op.drop_column('price_cents')
batch_op.drop_column('sale_price_cents')
batch_op.drop_column('weight_grams')
# === CART_ITEMS TABLE ===
with op.batch_alter_table('cart_items', schema=None) as batch_op:
batch_op.add_column(sa.Column('price_at_add', sa.Float(), nullable=True))
op.execute('UPDATE cart_items SET price_at_add = price_at_add_cents / 100.0')
with op.batch_alter_table('cart_items', schema=None) as batch_op:
batch_op.drop_column('price_at_add_cents')
batch_op.alter_column('price_at_add',
existing_type=sa.Float(),
nullable=False)
# === ORDER_ITEMS TABLE ===
with op.batch_alter_table('order_items', schema=None) as batch_op:
batch_op.add_column(sa.Column('unit_price', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('total_price', sa.Float(), nullable=True))
op.execute('UPDATE order_items SET unit_price = unit_price_cents / 100.0')
op.execute('UPDATE order_items SET total_price = total_price_cents / 100.0')
with op.batch_alter_table('order_items', schema=None) as batch_op:
batch_op.drop_column('unit_price_cents')
batch_op.drop_column('total_price_cents')
batch_op.alter_column('unit_price',
existing_type=sa.Float(),
nullable=False)
batch_op.alter_column('total_price',
existing_type=sa.Float(),
nullable=False)
# === ORDERS TABLE ===
with op.batch_alter_table('orders', schema=None) as batch_op:
batch_op.add_column(sa.Column('subtotal', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('tax_amount', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('shipping_amount', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('discount_amount', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('total_amount', sa.Float(), nullable=True))
op.execute('UPDATE orders SET subtotal = subtotal_cents / 100.0')
op.execute('UPDATE orders SET tax_amount = tax_amount_cents / 100.0')
op.execute('UPDATE orders SET shipping_amount = shipping_amount_cents / 100.0')
op.execute('UPDATE orders SET discount_amount = discount_amount_cents / 100.0')
op.execute('UPDATE orders SET total_amount = total_amount_cents / 100.0')
with op.batch_alter_table('orders', schema=None) as batch_op:
batch_op.drop_column('subtotal_cents')
batch_op.drop_column('tax_amount_cents')
batch_op.drop_column('shipping_amount_cents')
batch_op.drop_column('discount_amount_cents')
batch_op.drop_column('total_amount_cents')
batch_op.alter_column('total_amount',
existing_type=sa.Float(),
nullable=False)
# === PRODUCTS TABLE ===
with op.batch_alter_table('products', schema=None) as batch_op:
batch_op.add_column(sa.Column('price', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('sale_price', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('supplier_cost', sa.Float(), nullable=True))
batch_op.add_column(sa.Column('margin_percent', sa.Float(), nullable=True))
op.execute('UPDATE products SET price = price_cents / 100.0 WHERE price_cents IS NOT NULL')
op.execute('UPDATE products SET sale_price = sale_price_cents / 100.0 WHERE sale_price_cents IS NOT NULL')
op.execute('UPDATE products SET supplier_cost = supplier_cost_cents / 100.0 WHERE supplier_cost_cents IS NOT NULL')
op.execute('UPDATE products SET margin_percent = margin_percent_x100 / 100.0 WHERE margin_percent_x100 IS NOT NULL')
with op.batch_alter_table('products', schema=None) as batch_op:
batch_op.drop_column('price_cents')
batch_op.drop_column('sale_price_cents')
batch_op.drop_column('supplier_cost_cents')
batch_op.drop_column('margin_percent_x100')

View File

@@ -0,0 +1,339 @@
"""add_messaging_tables
Revision ID: e3f4a5b6c7d8
Revises: c9e22eadf533
Create Date: 2025-12-21
This migration adds the messaging system tables:
- conversations: Threaded conversation threads
- conversation_participants: Links users/customers to conversations
- messages: Individual messages within conversations
- message_attachments: File attachments for messages
Supports three communication channels:
- Admin <-> Vendor
- Vendor <-> Customer
- Admin <-> Customer
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision: str = "e3f4a5b6c7d8"
down_revision: Union[str, None] = "c9e22eadf533"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def table_exists(table_name: str) -> bool:
"""Check if a table exists in the database."""
bind = op.get_bind()
inspector = inspect(bind)
return table_name in inspector.get_table_names()
def index_exists(index_name: str, table_name: str) -> bool:
"""Check if an index exists on a table."""
bind = op.get_bind()
inspector = inspect(bind)
try:
indexes = inspector.get_indexes(table_name)
return any(idx["name"] == index_name for idx in indexes)
except Exception:
return False
def upgrade() -> None:
# =========================================================================
# Step 1: Create conversations table
# =========================================================================
if not table_exists("conversations"):
op.create_table(
"conversations",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"conversation_type",
sa.Enum(
"admin_vendor",
"vendor_customer",
"admin_customer",
name="conversationtype",
),
nullable=False,
),
sa.Column("subject", sa.String(length=500), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=True),
sa.Column("is_closed", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("closed_at", sa.DateTime(), nullable=True),
sa.Column(
"closed_by_type",
sa.Enum("admin", "vendor", "customer", name="participanttype"),
nullable=True,
),
sa.Column("closed_by_id", sa.Integer(), nullable=True),
sa.Column("last_message_at", sa.DateTime(), nullable=True),
sa.Column("message_count", sa.Integer(), nullable=False, server_default="0"),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_conversations_id"), "conversations", ["id"], unique=False
)
op.create_index(
op.f("ix_conversations_conversation_type"),
"conversations",
["conversation_type"],
unique=False,
)
op.create_index(
op.f("ix_conversations_vendor_id"),
"conversations",
["vendor_id"],
unique=False,
)
op.create_index(
op.f("ix_conversations_last_message_at"),
"conversations",
["last_message_at"],
unique=False,
)
op.create_index(
"ix_conversations_type_vendor",
"conversations",
["conversation_type", "vendor_id"],
unique=False,
)
# =========================================================================
# Step 2: Create conversation_participants table
# =========================================================================
if not table_exists("conversation_participants"):
op.create_table(
"conversation_participants",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("conversation_id", sa.Integer(), nullable=False),
sa.Column(
"participant_type",
sa.Enum("admin", "vendor", "customer", name="participanttype"),
nullable=False,
),
sa.Column("participant_id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=True),
sa.Column("unread_count", sa.Integer(), nullable=False, server_default="0"),
sa.Column("last_read_at", sa.DateTime(), nullable=True),
sa.Column(
"email_notifications", sa.Boolean(), nullable=False, server_default="1"
),
sa.Column("muted", sa.Boolean(), nullable=False, server_default="0"),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.ForeignKeyConstraint(
["conversation_id"],
["conversations.id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"conversation_id",
"participant_type",
"participant_id",
name="uq_conversation_participant",
),
)
op.create_index(
op.f("ix_conversation_participants_id"),
"conversation_participants",
["id"],
unique=False,
)
op.create_index(
op.f("ix_conversation_participants_conversation_id"),
"conversation_participants",
["conversation_id"],
unique=False,
)
op.create_index(
op.f("ix_conversation_participants_participant_id"),
"conversation_participants",
["participant_id"],
unique=False,
)
op.create_index(
"ix_participant_lookup",
"conversation_participants",
["participant_type", "participant_id"],
unique=False,
)
# =========================================================================
# Step 3: Create messages table
# =========================================================================
if not table_exists("messages"):
op.create_table(
"messages",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("conversation_id", sa.Integer(), nullable=False),
sa.Column(
"sender_type",
sa.Enum("admin", "vendor", "customer", name="participanttype"),
nullable=False,
),
sa.Column("sender_id", sa.Integer(), nullable=False),
sa.Column("content", sa.Text(), nullable=False),
sa.Column(
"is_system_message", sa.Boolean(), nullable=False, server_default="0"
),
sa.Column("is_deleted", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.Column(
"deleted_by_type",
sa.Enum("admin", "vendor", "customer", name="participanttype"),
nullable=True,
),
sa.Column("deleted_by_id", sa.Integer(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.ForeignKeyConstraint(
["conversation_id"],
["conversations.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_messages_id"), "messages", ["id"], unique=False)
op.create_index(
op.f("ix_messages_conversation_id"),
"messages",
["conversation_id"],
unique=False,
)
op.create_index(
op.f("ix_messages_sender_id"), "messages", ["sender_id"], unique=False
)
op.create_index(
"ix_messages_conversation_created",
"messages",
["conversation_id", "created_at"],
unique=False,
)
# =========================================================================
# Step 4: Create message_attachments table
# =========================================================================
if not table_exists("message_attachments"):
op.create_table(
"message_attachments",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("message_id", sa.Integer(), nullable=False),
sa.Column("filename", sa.String(length=255), nullable=False),
sa.Column("original_filename", sa.String(length=255), nullable=False),
sa.Column("file_path", sa.String(length=1000), nullable=False),
sa.Column("file_size", sa.Integer(), nullable=False),
sa.Column("mime_type", sa.String(length=100), nullable=False),
sa.Column("is_image", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("image_width", sa.Integer(), nullable=True),
sa.Column("image_height", sa.Integer(), nullable=True),
sa.Column("thumbnail_path", sa.String(length=1000), nullable=True),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.ForeignKeyConstraint(
["message_id"],
["messages.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_message_attachments_id"),
"message_attachments",
["id"],
unique=False,
)
op.create_index(
op.f("ix_message_attachments_message_id"),
"message_attachments",
["message_id"],
unique=False,
)
# =========================================================================
# Step 5: Add platform setting for attachment size limit
# =========================================================================
# Note: This will be added via seed script or manually
# Key: message_attachment_max_size_mb
# Value: 10
# Category: messaging
def downgrade() -> None:
# Drop tables in reverse order (respecting foreign keys)
if table_exists("message_attachments"):
op.drop_table("message_attachments")
if table_exists("messages"):
op.drop_table("messages")
if table_exists("conversation_participants"):
op.drop_table("conversation_participants")
if table_exists("conversations"):
op.drop_table("conversations")
# Note: Enum types are not dropped automatically
# They can be manually dropped with:
# op.execute("DROP TYPE IF EXISTS conversationtype")
# op.execute("DROP TYPE IF EXISTS participanttype")

View File

@@ -0,0 +1,147 @@
"""Create translation tables for multi-language support
Revision ID: f2b3c4d5e6f7
Revises: e1a2b3c4d5e6
Create Date: 2025-12-11
This migration creates:
- marketplace_product_translations: Localized content from marketplace sources
- product_translations: Vendor-specific localized overrides
The translation tables support multi-language product information with
language fallback capabilities. Fields in product_translations can be
NULL to inherit from marketplace_product_translations.
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "f2b3c4d5e6f7"
down_revision: Union[str, None] = "e1a2b3c4d5e6"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create marketplace_product_translations table
# Note: Unique constraint is included in create_table for SQLite compatibility
op.create_table(
"marketplace_product_translations",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column(
"marketplace_product_id",
sa.Integer(),
sa.ForeignKey("marketplace_products.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("language", sa.String(5), nullable=False),
# Localized content
sa.Column("title", sa.String(), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("short_description", sa.String(500), nullable=True),
# SEO fields
sa.Column("meta_title", sa.String(70), nullable=True),
sa.Column("meta_description", sa.String(160), nullable=True),
sa.Column("url_slug", sa.String(255), nullable=True),
# Source tracking
sa.Column("source_import_id", sa.Integer(), nullable=True),
sa.Column("source_file", sa.String(), nullable=True),
# Timestamps
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
# Unique constraint included in table creation for SQLite
sa.UniqueConstraint(
"marketplace_product_id",
"language",
name="uq_marketplace_product_translation",
),
)
# Create indexes for marketplace_product_translations
op.create_index(
"idx_mpt_marketplace_product_id",
"marketplace_product_translations",
["marketplace_product_id"],
)
op.create_index(
"idx_mpt_language",
"marketplace_product_translations",
["language"],
)
# Create product_translations table
# Note: Unique constraint is included in create_table for SQLite compatibility
op.create_table(
"product_translations",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("language", sa.String(5), nullable=False),
# Overridable localized content (NULL = inherit from marketplace)
sa.Column("title", sa.String(), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("short_description", sa.String(500), nullable=True),
# SEO overrides
sa.Column("meta_title", sa.String(70), nullable=True),
sa.Column("meta_description", sa.String(160), nullable=True),
sa.Column("url_slug", sa.String(255), nullable=True),
# Timestamps
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
# Unique constraint included in table creation for SQLite
sa.UniqueConstraint("product_id", "language", name="uq_product_translation"),
)
# Create indexes for product_translations
op.create_index(
"idx_pt_product_id",
"product_translations",
["product_id"],
)
op.create_index(
"idx_pt_product_language",
"product_translations",
["product_id", "language"],
)
def downgrade() -> None:
# Drop product_translations table and its indexes
op.drop_index("idx_pt_product_language", table_name="product_translations")
op.drop_index("idx_pt_product_id", table_name="product_translations")
op.drop_table("product_translations")
# Drop marketplace_product_translations table and its indexes
op.drop_index("idx_mpt_language", table_name="marketplace_product_translations")
op.drop_index(
"idx_mpt_marketplace_product_id", table_name="marketplace_product_translations"
)
op.drop_table("marketplace_product_translations")

View File

@@ -0,0 +1,95 @@
"""add_validator_type_to_code_quality
Revision ID: f4a5b6c7d8e9
Revises: e3f4a5b6c7d8
Create Date: 2025-12-21
This migration adds validator_type column to architecture scans and violations
to support multiple validator types (architecture, security, performance).
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "f4a5b6c7d8e9"
down_revision: Union[str, None] = "e3f4a5b6c7d8"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add validator_type to architecture_scans table
op.add_column(
"architecture_scans",
sa.Column(
"validator_type",
sa.String(length=20),
nullable=False,
server_default="architecture",
),
)
op.create_index(
op.f("ix_architecture_scans_validator_type"),
"architecture_scans",
["validator_type"],
unique=False,
)
# Add validator_type to architecture_violations table
op.add_column(
"architecture_violations",
sa.Column(
"validator_type",
sa.String(length=20),
nullable=False,
server_default="architecture",
),
)
op.create_index(
op.f("ix_architecture_violations_validator_type"),
"architecture_violations",
["validator_type"],
unique=False,
)
# Add validator_type to architecture_rules table
op.add_column(
"architecture_rules",
sa.Column(
"validator_type",
sa.String(length=20),
nullable=False,
server_default="architecture",
),
)
op.create_index(
op.f("ix_architecture_rules_validator_type"),
"architecture_rules",
["validator_type"],
unique=False,
)
def downgrade() -> None:
# Drop indexes first
op.drop_index(
op.f("ix_architecture_rules_validator_type"),
table_name="architecture_rules",
)
op.drop_index(
op.f("ix_architecture_violations_validator_type"),
table_name="architecture_violations",
)
op.drop_index(
op.f("ix_architecture_scans_validator_type"),
table_name="architecture_scans",
)
# Drop columns
op.drop_column("architecture_rules", "validator_type")
op.drop_column("architecture_violations", "validator_type")
op.drop_column("architecture_scans", "validator_type")

View File

@@ -0,0 +1,34 @@
"""add template field to content pages for landing page designs
Revision ID: f68d8da5315a
Revises: 72aa309d4007
Create Date: 2025-11-22 23:51:40.694983
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "f68d8da5315a"
down_revision: Union[str, None] = "72aa309d4007"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add template column to content_pages table
op.add_column(
"content_pages",
sa.Column(
"template", sa.String(length=50), nullable=False, server_default="default"
),
)
def downgrade() -> None:
# Remove template column from content_pages table
op.drop_column("content_pages", "template")

View File

@@ -0,0 +1,148 @@
"""add_rbac_enhancements
Revision ID: fa7d4d10e358
Revises: 4951b2e50581
Create Date: 2025-11-13 16:51:25.010057
SQLite-compatible version
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "fa7d4d10e358"
down_revision: Union[str, None] = "4951b2e50581"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade():
"""Upgrade database schema for RBAC enhancements.
SQLite-compatible version using batch operations for table modifications.
"""
# ========================================================================
# User table changes
# ========================================================================
with op.batch_alter_table("users", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"is_email_verified",
sa.Boolean(),
nullable=False,
server_default="false",
)
)
# Set existing active users as verified
op.execute("UPDATE users SET is_email_verified = TRUE WHERE is_active = TRUE")
# ========================================================================
# VendorUser table changes (requires table recreation for SQLite)
# ========================================================================
with op.batch_alter_table("vendor_users", schema=None) as batch_op:
# Add new columns
batch_op.add_column(
sa.Column(
"user_type",
sa.String(length=20),
nullable=False,
server_default="member",
)
)
batch_op.add_column(
sa.Column("invitation_token", sa.String(length=100), nullable=True)
)
batch_op.add_column(
sa.Column("invitation_sent_at", sa.DateTime(), nullable=True)
)
batch_op.add_column(
sa.Column("invitation_accepted_at", sa.DateTime(), nullable=True)
)
# Create index on invitation_token
batch_op.create_index("idx_vendor_users_invitation_token", ["invitation_token"])
# Modify role_id to be nullable (this recreates the table in SQLite)
batch_op.alter_column("role_id", existing_type=sa.Integer(), nullable=True)
# Change is_active default (this recreates the table in SQLite)
batch_op.alter_column(
"is_active", existing_type=sa.Boolean(), server_default="false"
)
# Set owners correctly (after table modifications)
# SQLite-compatible UPDATE with subquery
op.execute(
"""
UPDATE vendor_users
SET user_type = 'owner'
WHERE (vendor_id, user_id) IN (
SELECT id, owner_user_id
FROM vendors
)
"""
)
# Set existing owners as active
op.execute(
"""
UPDATE vendor_users
SET is_active = TRUE
WHERE user_type = 'owner'
"""
)
# ========================================================================
# Role table changes
# ========================================================================
with op.batch_alter_table("roles", schema=None) as batch_op:
# Create index on vendor_id and name
batch_op.create_index("idx_roles_vendor_name", ["vendor_id", "name"])
# Note: JSONB conversion only for PostgreSQL
# SQLite stores JSON as TEXT by default, no conversion needed
def downgrade():
"""Downgrade database schema.
SQLite-compatible version using batch operations.
"""
# ========================================================================
# Role table changes
# ========================================================================
with op.batch_alter_table("roles", schema=None) as batch_op:
batch_op.drop_index("idx_roles_vendor_name")
# ========================================================================
# VendorUser table changes
# ========================================================================
with op.batch_alter_table("vendor_users", schema=None) as batch_op:
# Revert is_active default
batch_op.alter_column(
"is_active", existing_type=sa.Boolean(), server_default="true"
)
# Revert role_id to NOT NULL
# Note: This might fail if there are NULL values
batch_op.alter_column("role_id", existing_type=sa.Integer(), nullable=False)
# Drop indexes and columns
batch_op.drop_index("idx_vendor_users_invitation_token")
batch_op.drop_column("invitation_accepted_at")
batch_op.drop_column("invitation_sent_at")
batch_op.drop_column("invitation_token")
batch_op.drop_column("user_type")
# ========================================================================
# User table changes
# ========================================================================
with op.batch_alter_table("users", schema=None) as batch_op:
batch_op.drop_column("is_email_verified")

View File

@@ -0,0 +1,84 @@
"""add_language_settings_to_vendor_user_customer
Revision ID: fcfdc02d5138
Revises: b412e0b49c2e
Create Date: 2025-12-13 20:08:27.120863
This migration adds language preference fields to support multi-language UI:
- Vendor: default_language, dashboard_language, storefront_language
- User: preferred_language
- Customer: preferred_language
Supported languages: en (English), fr (French), de (German), lb (Luxembourgish)
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'fcfdc02d5138'
down_revision: Union[str, None] = 'b412e0b49c2e'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ========================================================================
# Vendor language settings
# ========================================================================
# default_language: Default language for vendor content (products, etc.)
op.add_column(
'vendors',
sa.Column('default_language', sa.String(5), nullable=False, server_default='fr')
)
# dashboard_language: Language for vendor team dashboard UI
op.add_column(
'vendors',
sa.Column('dashboard_language', sa.String(5), nullable=False, server_default='fr')
)
# storefront_language: Default language for customer-facing shop
op.add_column(
'vendors',
sa.Column('storefront_language', sa.String(5), nullable=False, server_default='fr')
)
# storefront_languages: JSON array of enabled languages for storefront
# Allows vendors to enable/disable specific languages
op.add_column(
'vendors',
sa.Column(
'storefront_languages',
sa.JSON,
nullable=False,
server_default='["fr", "de", "en"]'
)
)
# ========================================================================
# User language preference
# ========================================================================
# preferred_language: User's preferred UI language (NULL = use context default)
op.add_column(
'users',
sa.Column('preferred_language', sa.String(5), nullable=True)
)
# ========================================================================
# Customer language preference
# ========================================================================
# preferred_language: Customer's preferred language (NULL = use storefront default)
op.add_column(
'customers',
sa.Column('preferred_language', sa.String(5), nullable=True)
)
def downgrade() -> None:
# Remove columns in reverse order
op.drop_column('customers', 'preferred_language')
op.drop_column('users', 'preferred_language')
op.drop_column('vendors', 'storefront_languages')
op.drop_column('vendors', 'storefront_language')
op.drop_column('vendors', 'dashboard_language')
op.drop_column('vendors', 'default_language')

View File

@@ -0,0 +1,47 @@
"""Add content_pages table for CMS
Revision ID: fef1d20ce8b4
Revises: fa7d4d10e358
Create Date: 2025-11-22 13:41:18.069674
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "fef1d20ce8b4"
down_revision: Union[str, None] = "fa7d4d10e358"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("idx_roles_vendor_name", table_name="roles")
op.drop_index("idx_vendor_users_invitation_token", table_name="vendor_users")
op.create_index(
op.f("ix_vendor_users_invitation_token"),
"vendor_users",
["invitation_token"],
unique=False,
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_vendor_users_invitation_token"), table_name="vendor_users")
op.create_index(
"idx_vendor_users_invitation_token",
"vendor_users",
["invitation_token"],
unique=False,
)
op.create_index(
"idx_roles_vendor_name", "roles", ["vendor_id", "name"], unique=False
)
# ### end Alembic commands ###

View File

@@ -0,0 +1,82 @@
"""add_scan_status_fields
Add background task status fields to architecture_scans table
for harmonized background task architecture.
Revision ID: g5b6c7d8e9f0
Revises: f4a5b6c7d8e9
Create Date: 2024-12-21
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "g5b6c7d8e9f0"
down_revision: str | None = "f4a5b6c7d8e9"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# Add status field with default 'completed' for existing records
# New records will use 'pending' as default
op.add_column(
"architecture_scans",
sa.Column(
"status",
sa.String(length=30),
nullable=False,
server_default="completed", # Existing scans are already completed
),
)
op.create_index(
op.f("ix_architecture_scans_status"), "architecture_scans", ["status"]
)
# Add started_at - for existing records, use timestamp as started_at
op.add_column(
"architecture_scans",
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
)
# Add completed_at - for existing records, use timestamp + duration as completed_at
op.add_column(
"architecture_scans",
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
)
# Add error_message for failed scans
op.add_column(
"architecture_scans",
sa.Column("error_message", sa.Text(), nullable=True),
)
# Add progress_message for showing current step
op.add_column(
"architecture_scans",
sa.Column("progress_message", sa.String(length=255), nullable=True),
)
# Update existing records to have proper started_at and completed_at
# This is done via raw SQL for efficiency (PostgreSQL syntax)
op.execute(
"""
UPDATE architecture_scans
SET started_at = timestamp,
completed_at = timestamp + (COALESCE(duration_seconds, 0) || ' seconds')::interval
WHERE started_at IS NULL
"""
)
def downgrade() -> None:
op.drop_index(op.f("ix_architecture_scans_status"), table_name="architecture_scans")
op.drop_column("architecture_scans", "progress_message")
op.drop_column("architecture_scans", "error_message")
op.drop_column("architecture_scans", "completed_at")
op.drop_column("architecture_scans", "started_at")
op.drop_column("architecture_scans", "status")

View File

@@ -0,0 +1,198 @@
"""Add invoice tables
Revision ID: h6c7d8e9f0a1
Revises: g5b6c7d8e9f0
Create Date: 2025-12-24
This migration adds:
- vendor_invoice_settings: Per-vendor invoice configuration
- invoices: Invoice records with seller/buyer snapshots
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "h6c7d8e9f0a1"
down_revision: Union[str, None] = "g5b6c7d8e9f0"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create vendor_invoice_settings table
op.create_table(
"vendor_invoice_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
# Company details
sa.Column("company_name", sa.String(length=255), nullable=False),
sa.Column("company_address", sa.String(length=255), nullable=True),
sa.Column("company_city", sa.String(length=100), nullable=True),
sa.Column("company_postal_code", sa.String(length=20), nullable=True),
sa.Column(
"company_country", sa.String(length=2), server_default="LU", nullable=False
),
# VAT information
sa.Column("vat_number", sa.String(length=50), nullable=True),
sa.Column("is_vat_registered", sa.Boolean(), server_default="1", nullable=False),
# OSS
sa.Column("is_oss_registered", sa.Boolean(), server_default="0", nullable=False),
sa.Column("oss_registration_country", sa.String(length=2), nullable=True),
# Invoice numbering
sa.Column(
"invoice_prefix", sa.String(length=20), server_default="INV", nullable=False
),
sa.Column("invoice_next_number", sa.Integer(), server_default="1", nullable=False),
sa.Column(
"invoice_number_padding", sa.Integer(), server_default="5", nullable=False
),
# Payment information
sa.Column("payment_terms", sa.Text(), nullable=True),
sa.Column("bank_name", sa.String(length=255), nullable=True),
sa.Column("bank_iban", sa.String(length=50), nullable=True),
sa.Column("bank_bic", sa.String(length=20), nullable=True),
# Footer
sa.Column("footer_text", sa.Text(), nullable=True),
# Default VAT rate
sa.Column(
"default_vat_rate", sa.Numeric(precision=5, scale=2), server_default="17.00", nullable=False
),
# Timestamps
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("vendor_id"),
)
op.create_index(
op.f("ix_vendor_invoice_settings_id"),
"vendor_invoice_settings",
["id"],
unique=False,
)
op.create_index(
op.f("ix_vendor_invoice_settings_vendor_id"),
"vendor_invoice_settings",
["vendor_id"],
unique=True,
)
# Create invoices table
op.create_table(
"invoices",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("order_id", sa.Integer(), nullable=True),
# Invoice identification
sa.Column("invoice_number", sa.String(length=50), nullable=False),
sa.Column("invoice_date", sa.DateTime(timezone=True), nullable=False),
# Status
sa.Column(
"status", sa.String(length=20), server_default="draft", nullable=False
),
# Snapshots (JSON)
sa.Column("seller_details", sa.JSON(), nullable=False),
sa.Column("buyer_details", sa.JSON(), nullable=False),
sa.Column("line_items", sa.JSON(), nullable=False),
# VAT information
sa.Column(
"vat_regime", sa.String(length=20), server_default="domestic", nullable=False
),
sa.Column("destination_country", sa.String(length=2), nullable=True),
sa.Column("vat_rate", sa.Numeric(precision=5, scale=2), nullable=False),
sa.Column("vat_rate_label", sa.String(length=50), nullable=True),
# Amounts (in cents)
sa.Column("currency", sa.String(length=3), server_default="EUR", nullable=False),
sa.Column("subtotal_cents", sa.Integer(), nullable=False),
sa.Column("vat_amount_cents", sa.Integer(), nullable=False),
sa.Column("total_cents", sa.Integer(), nullable=False),
# Payment info
sa.Column("payment_terms", sa.Text(), nullable=True),
sa.Column("bank_details", sa.JSON(), nullable=True),
sa.Column("footer_text", sa.Text(), nullable=True),
# PDF
sa.Column("pdf_generated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("pdf_path", sa.String(length=500), nullable=True),
# Notes
sa.Column("notes", sa.Text(), nullable=True),
# Timestamps
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.ForeignKeyConstraint(
["order_id"],
["orders.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_invoices_id"), "invoices", ["id"], unique=False)
op.create_index(op.f("ix_invoices_vendor_id"), "invoices", ["vendor_id"], unique=False)
op.create_index(op.f("ix_invoices_order_id"), "invoices", ["order_id"], unique=False)
op.create_index(
"idx_invoice_vendor_number",
"invoices",
["vendor_id", "invoice_number"],
unique=True,
)
op.create_index(
"idx_invoice_vendor_date",
"invoices",
["vendor_id", "invoice_date"],
unique=False,
)
op.create_index(
"idx_invoice_status",
"invoices",
["vendor_id", "status"],
unique=False,
)
def downgrade() -> None:
# Drop invoices table
op.drop_index("idx_invoice_status", table_name="invoices")
op.drop_index("idx_invoice_vendor_date", table_name="invoices")
op.drop_index("idx_invoice_vendor_number", table_name="invoices")
op.drop_index(op.f("ix_invoices_order_id"), table_name="invoices")
op.drop_index(op.f("ix_invoices_vendor_id"), table_name="invoices")
op.drop_index(op.f("ix_invoices_id"), table_name="invoices")
op.drop_table("invoices")
# Drop vendor_invoice_settings table
op.drop_index(
op.f("ix_vendor_invoice_settings_vendor_id"),
table_name="vendor_invoice_settings",
)
op.drop_index(
op.f("ix_vendor_invoice_settings_id"), table_name="vendor_invoice_settings"
)
op.drop_table("vendor_invoice_settings")

View File

@@ -0,0 +1,148 @@
"""Add vendor subscriptions table
Revision ID: i7d8e9f0a1b2
Revises: h6c7d8e9f0a1
Create Date: 2025-12-24
This migration adds:
- vendor_subscriptions: Per-vendor subscription tracking with tier limits
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "i7d8e9f0a1b2"
down_revision: Union[str, None] = "h6c7d8e9f0a1"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create vendor_subscriptions table
op.create_table(
"vendor_subscriptions",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
# Tier and status
sa.Column(
"tier", sa.String(length=20), server_default="essential", nullable=False
),
sa.Column(
"status", sa.String(length=20), server_default="trial", nullable=False
),
# Billing period
sa.Column("period_start", sa.DateTime(timezone=True), nullable=False),
sa.Column("period_end", sa.DateTime(timezone=True), nullable=False),
sa.Column("is_annual", sa.Boolean(), server_default="0", nullable=False),
# Trial
sa.Column("trial_ends_at", sa.DateTime(timezone=True), nullable=True),
# Usage counters
sa.Column("orders_this_period", sa.Integer(), server_default="0", nullable=False),
sa.Column("orders_limit_reached_at", sa.DateTime(timezone=True), nullable=True),
# Custom overrides
sa.Column("custom_orders_limit", sa.Integer(), nullable=True),
sa.Column("custom_products_limit", sa.Integer(), nullable=True),
sa.Column("custom_team_limit", sa.Integer(), nullable=True),
# Payment (future Stripe integration)
sa.Column("stripe_customer_id", sa.String(length=100), nullable=True),
sa.Column("stripe_subscription_id", sa.String(length=100), nullable=True),
# Cancellation
sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("cancellation_reason", sa.Text(), nullable=True),
# Timestamps
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("vendor_id"),
)
op.create_index(
op.f("ix_vendor_subscriptions_id"),
"vendor_subscriptions",
["id"],
unique=False,
)
op.create_index(
op.f("ix_vendor_subscriptions_vendor_id"),
"vendor_subscriptions",
["vendor_id"],
unique=True,
)
op.create_index(
op.f("ix_vendor_subscriptions_tier"),
"vendor_subscriptions",
["tier"],
unique=False,
)
op.create_index(
op.f("ix_vendor_subscriptions_status"),
"vendor_subscriptions",
["status"],
unique=False,
)
op.create_index(
op.f("ix_vendor_subscriptions_stripe_customer_id"),
"vendor_subscriptions",
["stripe_customer_id"],
unique=False,
)
op.create_index(
op.f("ix_vendor_subscriptions_stripe_subscription_id"),
"vendor_subscriptions",
["stripe_subscription_id"],
unique=False,
)
op.create_index(
"idx_subscription_vendor_status",
"vendor_subscriptions",
["vendor_id", "status"],
unique=False,
)
op.create_index(
"idx_subscription_period",
"vendor_subscriptions",
["period_start", "period_end"],
unique=False,
)
def downgrade() -> None:
op.drop_index("idx_subscription_period", table_name="vendor_subscriptions")
op.drop_index("idx_subscription_vendor_status", table_name="vendor_subscriptions")
op.drop_index(
op.f("ix_vendor_subscriptions_stripe_subscription_id"),
table_name="vendor_subscriptions",
)
op.drop_index(
op.f("ix_vendor_subscriptions_stripe_customer_id"),
table_name="vendor_subscriptions",
)
op.drop_index(
op.f("ix_vendor_subscriptions_status"), table_name="vendor_subscriptions"
)
op.drop_index(
op.f("ix_vendor_subscriptions_tier"), table_name="vendor_subscriptions"
)
op.drop_index(
op.f("ix_vendor_subscriptions_vendor_id"), table_name="vendor_subscriptions"
)
op.drop_index(
op.f("ix_vendor_subscriptions_id"), table_name="vendor_subscriptions"
)
op.drop_table("vendor_subscriptions")

View File

@@ -0,0 +1,262 @@
"""Populate product fields from marketplace for independence refactor
Revision ID: j8e9f0a1b2c3
Revises: i7d8e9f0a1b2
Create Date: 2025-12-24
This migration populates NULL fields on products and product_translations
with values from their linked marketplace products. This is part of the
"product independence" refactor where products become standalone entities
instead of inheriting from marketplace products via NULL fallback.
After this migration:
- All Product fields will have actual values (no NULL inheritance)
- All ProductTranslation records will exist with actual values
- The marketplace_product_id FK is kept for "view original source" feature
"""
from typing import Sequence, Union
from alembic import op
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision: str = "j8e9f0a1b2c3"
down_revision: Union[str, None] = "i7d8e9f0a1b2"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Populate NULL product fields with marketplace product values."""
# Get database connection for raw SQL
connection = op.get_bind()
# =========================================================================
# STEP 1: Populate Product fields from MarketplaceProduct
# =========================================================================
# Price cents
connection.execute(text("""
UPDATE products
SET price_cents = (
SELECT mp.price_cents
FROM marketplace_products mp
WHERE mp.id = products.marketplace_product_id
)
WHERE price_cents IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# Sale price cents
connection.execute(text("""
UPDATE products
SET sale_price_cents = (
SELECT mp.sale_price_cents
FROM marketplace_products mp
WHERE mp.id = products.marketplace_product_id
)
WHERE sale_price_cents IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# Currency (default to EUR if marketplace has NULL)
connection.execute(text("""
UPDATE products
SET currency = COALESCE(
(SELECT mp.currency FROM marketplace_products mp WHERE mp.id = products.marketplace_product_id),
'EUR'
)
WHERE currency IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# Brand
connection.execute(text("""
UPDATE products
SET brand = (
SELECT mp.brand
FROM marketplace_products mp
WHERE mp.id = products.marketplace_product_id
)
WHERE brand IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# Condition
connection.execute(text("""
UPDATE products
SET condition = (
SELECT mp.condition
FROM marketplace_products mp
WHERE mp.id = products.marketplace_product_id
)
WHERE condition IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# Availability
connection.execute(text("""
UPDATE products
SET availability = (
SELECT mp.availability
FROM marketplace_products mp
WHERE mp.id = products.marketplace_product_id
)
WHERE availability IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# Primary image URL (marketplace uses 'image_link')
connection.execute(text("""
UPDATE products
SET primary_image_url = (
SELECT mp.image_link
FROM marketplace_products mp
WHERE mp.id = products.marketplace_product_id
)
WHERE primary_image_url IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# Additional images
connection.execute(text("""
UPDATE products
SET additional_images = (
SELECT mp.additional_images
FROM marketplace_products mp
WHERE mp.id = products.marketplace_product_id
)
WHERE additional_images IS NULL
AND marketplace_product_id IS NOT NULL
"""))
# =========================================================================
# STEP 2: Create missing ProductTranslation records from MarketplaceProductTranslation
# =========================================================================
# Insert missing translations (where product doesn't have translation for a language
# that the marketplace product has)
connection.execute(text("""
INSERT INTO product_translations (product_id, language, title, description, short_description,
meta_title, meta_description, url_slug, created_at, updated_at)
SELECT
p.id,
mpt.language,
mpt.title,
mpt.description,
mpt.short_description,
mpt.meta_title,
mpt.meta_description,
mpt.url_slug,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
FROM products p
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
WHERE NOT EXISTS (
SELECT 1 FROM product_translations pt
WHERE pt.product_id = p.id AND pt.language = mpt.language
)
"""))
# =========================================================================
# STEP 3: Update existing ProductTranslation NULL fields with marketplace values
# =========================================================================
# Update title where NULL
connection.execute(text("""
UPDATE product_translations
SET title = (
SELECT mpt.title
FROM products p
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
AND mpt.language = product_translations.language
WHERE p.id = product_translations.product_id
)
WHERE title IS NULL
"""))
# Update description where NULL
connection.execute(text("""
UPDATE product_translations
SET description = (
SELECT mpt.description
FROM products p
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
AND mpt.language = product_translations.language
WHERE p.id = product_translations.product_id
)
WHERE description IS NULL
"""))
# Update short_description where NULL
connection.execute(text("""
UPDATE product_translations
SET short_description = (
SELECT mpt.short_description
FROM products p
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
AND mpt.language = product_translations.language
WHERE p.id = product_translations.product_id
)
WHERE short_description IS NULL
"""))
# Update meta_title where NULL
connection.execute(text("""
UPDATE product_translations
SET meta_title = (
SELECT mpt.meta_title
FROM products p
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
AND mpt.language = product_translations.language
WHERE p.id = product_translations.product_id
)
WHERE meta_title IS NULL
"""))
# Update meta_description where NULL
connection.execute(text("""
UPDATE product_translations
SET meta_description = (
SELECT mpt.meta_description
FROM products p
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
AND mpt.language = product_translations.language
WHERE p.id = product_translations.product_id
)
WHERE meta_description IS NULL
"""))
# Update url_slug where NULL
connection.execute(text("""
UPDATE product_translations
SET url_slug = (
SELECT mpt.url_slug
FROM products p
JOIN marketplace_products mp ON mp.id = p.marketplace_product_id
JOIN marketplace_product_translations mpt ON mpt.marketplace_product_id = mp.id
AND mpt.language = product_translations.language
WHERE p.id = product_translations.product_id
)
WHERE url_slug IS NULL
"""))
def downgrade() -> None:
"""
Downgrade is a no-op for data population.
The data was copied, not moved. The original marketplace product data
is still intact. We don't reset fields to NULL because:
1. It would lose any vendor customizations made after migration
2. The model code may still work with populated fields
"""
pass

View File

@@ -0,0 +1,69 @@
"""Add tier_id FK to vendor_subscriptions
Revision ID: k9f0a1b2c3d4
Revises: 2953ed10d22c
Create Date: 2025-12-26
Adds tier_id column to vendor_subscriptions table with FK to subscription_tiers.
Backfills tier_id based on existing tier (code) values.
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "k9f0a1b2c3d4"
down_revision = "2953ed10d22c"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Use batch mode for SQLite compatibility
with op.batch_alter_table("vendor_subscriptions", schema=None) as batch_op:
# Add tier_id column (nullable for backfill)
batch_op.add_column(
sa.Column("tier_id", sa.Integer(), nullable=True)
)
# Create index for tier_id
batch_op.create_index(
"ix_vendor_subscriptions_tier_id",
["tier_id"],
unique=False,
)
# Add FK constraint
batch_op.create_foreign_key(
"fk_vendor_subscriptions_tier_id",
"subscription_tiers",
["tier_id"],
["id"],
ondelete="SET NULL",
)
# Backfill tier_id from tier code
# This updates existing subscriptions to link to their tier
op.execute(
"""
UPDATE vendor_subscriptions
SET tier_id = (
SELECT id FROM subscription_tiers
WHERE subscription_tiers.code = vendor_subscriptions.tier
)
WHERE EXISTS (
SELECT 1 FROM subscription_tiers
WHERE subscription_tiers.code = vendor_subscriptions.tier
)
"""
)
def downgrade() -> None:
# In SQLite batch mode, we must explicitly drop the index before dropping
# the column, otherwise batch mode will try to recreate the index on the
# new table (which won't have the column).
with op.batch_alter_table("vendor_subscriptions", schema=None) as batch_op:
# First drop the index on tier_id
batch_op.drop_index("ix_vendor_subscriptions_tier_id")
# Then drop the column (FK is automatically removed with the column)
batch_op.drop_column("tier_id")

View File

@@ -0,0 +1,65 @@
"""Add capacity_snapshots table
Revision ID: l0a1b2c3d4e5
Revises: k9f0a1b2c3d4
Create Date: 2025-12-26
Adds table for tracking daily platform capacity metrics for growth forecasting.
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "l0a1b2c3d4e5"
down_revision = "k9f0a1b2c3d4"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"capacity_snapshots",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("snapshot_date", sa.DateTime(timezone=True), nullable=False),
# Vendor metrics
sa.Column("total_vendors", sa.Integer(), nullable=False, server_default="0"),
sa.Column("active_vendors", sa.Integer(), nullable=False, server_default="0"),
sa.Column("trial_vendors", sa.Integer(), nullable=False, server_default="0"),
# Subscription metrics
sa.Column("total_subscriptions", sa.Integer(), nullable=False, server_default="0"),
sa.Column("active_subscriptions", sa.Integer(), nullable=False, server_default="0"),
# Resource metrics
sa.Column("total_products", sa.Integer(), nullable=False, server_default="0"),
sa.Column("total_orders_month", sa.Integer(), nullable=False, server_default="0"),
sa.Column("total_team_members", sa.Integer(), nullable=False, server_default="0"),
# Storage metrics
sa.Column("storage_used_gb", sa.Numeric(10, 2), nullable=False, server_default="0"),
sa.Column("db_size_mb", sa.Numeric(10, 2), nullable=False, server_default="0"),
# Capacity metrics
sa.Column("theoretical_products_limit", sa.Integer(), nullable=True),
sa.Column("theoretical_orders_limit", sa.Integer(), nullable=True),
sa.Column("theoretical_team_limit", sa.Integer(), nullable=True),
# Tier distribution
sa.Column("tier_distribution", sa.JSON(), nullable=True),
# Performance metrics
sa.Column("avg_response_ms", sa.Integer(), nullable=True),
sa.Column("peak_cpu_percent", sa.Numeric(5, 2), nullable=True),
sa.Column("peak_memory_percent", sa.Numeric(5, 2), nullable=True),
# Timestamps
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
# Primary key
sa.PrimaryKeyConstraint("id"),
)
# Create indexes
op.create_index("ix_capacity_snapshots_id", "capacity_snapshots", ["id"], unique=False)
op.create_index("ix_capacity_snapshots_date", "capacity_snapshots", ["snapshot_date"], unique=True)
def downgrade() -> None:
op.drop_index("ix_capacity_snapshots_date", table_name="capacity_snapshots")
op.drop_index("ix_capacity_snapshots_id", table_name="capacity_snapshots")
op.drop_table("capacity_snapshots")

View File

@@ -0,0 +1,71 @@
"""add vendor onboarding table
Revision ID: m1b2c3d4e5f6
Revises: d7a4a3f06394
Create Date: 2025-12-27 22:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'm1b2c3d4e5f6'
down_revision: Union[str, None] = 'd7a4a3f06394'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table('vendor_onboarding',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('vendor_id', sa.Integer(), nullable=False),
# Overall status
sa.Column('status', sa.String(length=20), nullable=False, server_default='not_started'),
sa.Column('current_step', sa.String(length=30), nullable=False, server_default='company_profile'),
# Step 1: Company Profile
sa.Column('step_company_profile_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_company_profile_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_company_profile_data', sa.JSON(), nullable=True),
# Step 2: Letzshop API Configuration
sa.Column('step_letzshop_api_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_letzshop_api_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_letzshop_api_connection_verified', sa.Boolean(), nullable=False, server_default=sa.text('false')),
# Step 3: Product Import
sa.Column('step_product_import_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_product_import_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_product_import_csv_url_set', sa.Boolean(), nullable=False, server_default=sa.text('false')),
# Step 4: Order Sync
sa.Column('step_order_sync_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_order_sync_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_order_sync_job_id', sa.Integer(), nullable=True),
# Completion tracking
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
# Admin override
sa.Column('skipped_by_admin', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('skipped_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('skipped_reason', sa.Text(), nullable=True),
sa.Column('skipped_by_user_id', sa.Integer(), nullable=True),
# Timestamps
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
# Constraints
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['skipped_by_user_id'], ['users.id']),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_vendor_onboarding_id'), 'vendor_onboarding', ['id'], unique=False)
op.create_index(op.f('ix_vendor_onboarding_vendor_id'), 'vendor_onboarding', ['vendor_id'], unique=True)
op.create_index(op.f('ix_vendor_onboarding_status'), 'vendor_onboarding', ['status'], unique=False)
op.create_index('idx_onboarding_vendor_status', 'vendor_onboarding', ['vendor_id', 'status'], unique=False)
def downgrade() -> None:
op.drop_index('idx_onboarding_vendor_status', table_name='vendor_onboarding')
op.drop_index(op.f('ix_vendor_onboarding_status'), table_name='vendor_onboarding')
op.drop_index(op.f('ix_vendor_onboarding_vendor_id'), table_name='vendor_onboarding')
op.drop_index(op.f('ix_vendor_onboarding_id'), table_name='vendor_onboarding')
op.drop_table('vendor_onboarding')

View File

@@ -0,0 +1,179 @@
# app/modules/billing/migrations/versions/billing_001_merchant_subscriptions_and_feature_limits.py
"""
Merchant subscriptions and feature limits migration.
Creates:
- merchant_subscriptions table (replaces store_subscriptions)
- tier_feature_limits table (replaces hardcoded limit columns)
- merchant_feature_overrides table (replaces custom_*_limit columns)
Drops:
- store_subscriptions table
- features table
Alters:
- subscription_tiers: removes limit columns and features JSON
Revision ID: billing_001
"""
from alembic import op
import sqlalchemy as sa
# Revision identifiers
revision = "billing_001"
down_revision = None
branch_labels = ("billing",)
depends_on = None
def upgrade() -> None:
# ========================================================================
# Create merchant_subscriptions table
# ========================================================================
op.create_table(
"merchant_subscriptions",
sa.Column("id", sa.Integer(), primary_key=True, index=True),
sa.Column("merchant_id", sa.Integer(), sa.ForeignKey("merchants.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column("platform_id", sa.Integer(), sa.ForeignKey("platforms.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column("tier_id", sa.Integer(), sa.ForeignKey("subscription_tiers.id", ondelete="SET NULL"), nullable=True, index=True),
sa.Column("status", sa.String(20), nullable=False, server_default="trial", index=True),
sa.Column("is_annual", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("period_start", sa.DateTime(timezone=True), nullable=False),
sa.Column("period_end", sa.DateTime(timezone=True), nullable=False),
sa.Column("trial_ends_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("stripe_customer_id", sa.String(100), nullable=True, index=True),
sa.Column("stripe_subscription_id", sa.String(100), nullable=True, index=True),
sa.Column("stripe_payment_method_id", sa.String(100), nullable=True),
sa.Column("payment_retry_count", sa.Integer(), nullable=False, server_default="0"),
sa.Column("last_payment_error", sa.Text(), nullable=True),
sa.Column("cancelled_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("cancellation_reason", sa.Text(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.UniqueConstraint("merchant_id", "platform_id", name="uq_merchant_platform_subscription"),
)
op.create_index("idx_merchant_sub_status", "merchant_subscriptions", ["merchant_id", "status"])
op.create_index("idx_merchant_sub_platform", "merchant_subscriptions", ["platform_id", "status"])
# ========================================================================
# Create tier_feature_limits table
# ========================================================================
op.create_table(
"tier_feature_limits",
sa.Column("id", sa.Integer(), primary_key=True, index=True),
sa.Column("tier_id", sa.Integer(), sa.ForeignKey("subscription_tiers.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column("feature_code", sa.String(80), nullable=False, index=True),
sa.Column("limit_value", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.UniqueConstraint("tier_id", "feature_code", name="uq_tier_feature_code"),
)
op.create_index("idx_tier_feature_lookup", "tier_feature_limits", ["tier_id", "feature_code"])
# ========================================================================
# Create merchant_feature_overrides table
# ========================================================================
op.create_table(
"merchant_feature_overrides",
sa.Column("id", sa.Integer(), primary_key=True, index=True),
sa.Column("merchant_id", sa.Integer(), sa.ForeignKey("merchants.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column("platform_id", sa.Integer(), sa.ForeignKey("platforms.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column("feature_code", sa.String(80), nullable=False, index=True),
sa.Column("limit_value", sa.Integer(), nullable=True),
sa.Column("is_enabled", sa.Boolean(), nullable=False, server_default="1"),
sa.Column("reason", sa.String(255), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.UniqueConstraint("merchant_id", "platform_id", "feature_code", name="uq_merchant_platform_feature"),
)
op.create_index("idx_merchant_override_lookup", "merchant_feature_overrides", ["merchant_id", "platform_id", "feature_code"])
# ========================================================================
# Drop legacy tables
# ========================================================================
op.drop_table("store_subscriptions")
op.drop_table("features")
# ========================================================================
# Remove legacy columns from subscription_tiers
# ========================================================================
with op.batch_alter_table("subscription_tiers") as batch_op:
batch_op.drop_column("orders_per_month")
batch_op.drop_column("products_limit")
batch_op.drop_column("team_members")
batch_op.drop_column("order_history_months")
batch_op.drop_column("cms_pages_limit")
batch_op.drop_column("cms_custom_pages_limit")
batch_op.drop_column("features")
# ========================================================================
# Update stripe_webhook_events FK to merchant_subscriptions
# ========================================================================
with op.batch_alter_table("stripe_webhook_events") as batch_op:
batch_op.drop_column("subscription_id")
batch_op.add_column(
sa.Column("merchant_subscription_id", sa.Integer(),
sa.ForeignKey("merchant_subscriptions.id"), nullable=True, index=True)
)
# ========================================================================
# Add merchant_id to billing_history
# ========================================================================
with op.batch_alter_table("billing_history") as batch_op:
batch_op.add_column(
sa.Column("merchant_id", sa.Integer(),
sa.ForeignKey("merchants.id"), nullable=True, index=True)
)
def downgrade() -> None:
# Remove merchant_id from billing_history
with op.batch_alter_table("billing_history") as batch_op:
batch_op.drop_column("merchant_id")
# Restore subscription_id on stripe_webhook_events
with op.batch_alter_table("stripe_webhook_events") as batch_op:
batch_op.drop_column("merchant_subscription_id")
batch_op.add_column(
sa.Column("subscription_id", sa.Integer(),
sa.ForeignKey("store_subscriptions.id"), nullable=True, index=True)
)
# Restore columns on subscription_tiers
with op.batch_alter_table("subscription_tiers") as batch_op:
batch_op.add_column(sa.Column("orders_per_month", sa.Integer(), nullable=True))
batch_op.add_column(sa.Column("products_limit", sa.Integer(), nullable=True))
batch_op.add_column(sa.Column("team_members", sa.Integer(), nullable=True))
batch_op.add_column(sa.Column("order_history_months", sa.Integer(), nullable=True))
batch_op.add_column(sa.Column("cms_pages_limit", sa.Integer(), nullable=True))
batch_op.add_column(sa.Column("cms_custom_pages_limit", sa.Integer(), nullable=True))
batch_op.add_column(sa.Column("features", sa.JSON(), nullable=True))
# Recreate features table
op.create_table(
"features",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("code", sa.String(50), unique=True, nullable=False),
sa.Column("name", sa.String(100), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("category", sa.String(50), nullable=False),
sa.Column("is_active", sa.Boolean(), server_default="1"),
)
# Recreate store_subscriptions table
op.create_table(
"store_subscriptions",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("store_id", sa.Integer(), sa.ForeignKey("stores.id"), unique=True, nullable=False),
sa.Column("tier", sa.String(20), nullable=False, server_default="essential"),
sa.Column("status", sa.String(20), nullable=False, server_default="trial"),
sa.Column("period_start", sa.DateTime(timezone=True), nullable=False),
sa.Column("period_end", sa.DateTime(timezone=True), nullable=False),
)
# Drop new tables
op.drop_table("merchant_feature_overrides")
op.drop_table("tier_feature_limits")
op.drop_table("merchant_subscriptions")

View File

@@ -0,0 +1,650 @@
"""add loyalty module tables
Revision ID: 0fb5d6d6ff97
Revises: zd3n4o5p6q7r8
Create Date: 2026-01-28 22:55:34.074321
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision: str = '0fb5d6d6ff97'
down_revision: Union[str, None] = 'zd3n4o5p6q7r8'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('loyalty_programs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('store_id', sa.Integer(), nullable=False),
sa.Column('loyalty_type', sa.String(length=20), nullable=False),
sa.Column('stamps_target', sa.Integer(), nullable=False, comment='Number of stamps needed for reward'),
sa.Column('stamps_reward_description', sa.String(length=255), nullable=False, comment='Description of stamp reward'),
sa.Column('stamps_reward_value_cents', sa.Integer(), nullable=True, comment='Value of stamp reward in cents (for analytics)'),
sa.Column('points_per_euro', sa.Integer(), nullable=False, comment='Points earned per euro spent'),
sa.Column('points_rewards', sqlite.JSON(), nullable=False, comment='List of point rewards: [{id, name, points_required, description}]'),
sa.Column('cooldown_minutes', sa.Integer(), nullable=False, comment='Minutes between stamps for same card'),
sa.Column('max_daily_stamps', sa.Integer(), nullable=False, comment='Maximum stamps per card per day'),
sa.Column('require_staff_pin', sa.Boolean(), nullable=False, comment='Require staff PIN for stamp/points operations'),
sa.Column('card_name', sa.String(length=100), nullable=True, comment='Display name for loyalty card'),
sa.Column('card_color', sa.String(length=7), nullable=False, comment='Primary color for card (hex)'),
sa.Column('card_secondary_color', sa.String(length=7), nullable=True, comment='Secondary color for card (hex)'),
sa.Column('logo_url', sa.String(length=500), nullable=True, comment='URL to store logo for card'),
sa.Column('hero_image_url', sa.String(length=500), nullable=True, comment='URL to hero image for card'),
sa.Column('google_issuer_id', sa.String(length=100), nullable=True, comment='Google Wallet Issuer ID'),
sa.Column('google_class_id', sa.String(length=200), nullable=True, comment='Google Wallet Loyalty Class ID'),
sa.Column('apple_pass_type_id', sa.String(length=100), nullable=True, comment='Apple Wallet Pass Type ID'),
sa.Column('terms_text', sa.Text(), nullable=True, comment='Loyalty program terms and conditions'),
sa.Column('privacy_url', sa.String(length=500), nullable=True, comment='URL to privacy policy'),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('activated_at', sa.DateTime(timezone=True), nullable=True, comment='When program was first activated'),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_loyalty_program_store_active', 'loyalty_programs', ['store_id', 'is_active'], unique=False)
op.create_index(op.f('ix_loyalty_programs_id'), 'loyalty_programs', ['id'], unique=False)
op.create_index(op.f('ix_loyalty_programs_is_active'), 'loyalty_programs', ['is_active'], unique=False)
op.create_index(op.f('ix_loyalty_programs_store_id'), 'loyalty_programs', ['store_id'], unique=True)
op.create_table('loyalty_cards',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('customer_id', sa.Integer(), nullable=False),
sa.Column('program_id', sa.Integer(), nullable=False),
sa.Column('store_id', sa.Integer(), nullable=False, comment='Denormalized for query performance'),
sa.Column('card_number', sa.String(length=20), nullable=False, comment='Human-readable card number'),
sa.Column('qr_code_data', sa.String(length=50), nullable=False, comment='Data encoded in QR code for scanning'),
sa.Column('stamp_count', sa.Integer(), nullable=False, comment='Current stamps toward next reward'),
sa.Column('total_stamps_earned', sa.Integer(), nullable=False, comment='Lifetime stamps earned'),
sa.Column('stamps_redeemed', sa.Integer(), nullable=False, comment='Total rewards redeemed (stamps reset on redemption)'),
sa.Column('points_balance', sa.Integer(), nullable=False, comment='Current available points'),
sa.Column('total_points_earned', sa.Integer(), nullable=False, comment='Lifetime points earned'),
sa.Column('points_redeemed', sa.Integer(), nullable=False, comment='Lifetime points redeemed'),
sa.Column('google_object_id', sa.String(length=200), nullable=True, comment='Google Wallet Loyalty Object ID'),
sa.Column('google_object_jwt', sa.String(length=2000), nullable=True, comment="JWT for Google Wallet 'Add to Wallet' button"),
sa.Column('apple_serial_number', sa.String(length=100), nullable=True, comment='Apple Wallet pass serial number'),
sa.Column('apple_auth_token', sa.String(length=100), nullable=True, comment='Apple Wallet authentication token for updates'),
sa.Column('last_stamp_at', sa.DateTime(timezone=True), nullable=True, comment='Last stamp added (for cooldown)'),
sa.Column('last_points_at', sa.DateTime(timezone=True), nullable=True, comment='Last points earned'),
sa.Column('last_redemption_at', sa.DateTime(timezone=True), nullable=True, comment='Last reward redemption'),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['program_id'], ['loyalty_programs.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_loyalty_card_customer_program', 'loyalty_cards', ['customer_id', 'program_id'], unique=True)
op.create_index('idx_loyalty_card_store_active', 'loyalty_cards', ['store_id', 'is_active'], unique=False)
op.create_index(op.f('ix_loyalty_cards_apple_serial_number'), 'loyalty_cards', ['apple_serial_number'], unique=True)
op.create_index(op.f('ix_loyalty_cards_card_number'), 'loyalty_cards', ['card_number'], unique=True)
op.create_index(op.f('ix_loyalty_cards_customer_id'), 'loyalty_cards', ['customer_id'], unique=False)
op.create_index(op.f('ix_loyalty_cards_google_object_id'), 'loyalty_cards', ['google_object_id'], unique=False)
op.create_index(op.f('ix_loyalty_cards_id'), 'loyalty_cards', ['id'], unique=False)
op.create_index(op.f('ix_loyalty_cards_is_active'), 'loyalty_cards', ['is_active'], unique=False)
op.create_index(op.f('ix_loyalty_cards_program_id'), 'loyalty_cards', ['program_id'], unique=False)
op.create_index(op.f('ix_loyalty_cards_qr_code_data'), 'loyalty_cards', ['qr_code_data'], unique=True)
op.create_index(op.f('ix_loyalty_cards_store_id'), 'loyalty_cards', ['store_id'], unique=False)
op.create_table('staff_pins',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('program_id', sa.Integer(), nullable=False),
sa.Column('store_id', sa.Integer(), nullable=False, comment='Denormalized for query performance'),
sa.Column('name', sa.String(length=100), nullable=False, comment='Staff member name'),
sa.Column('staff_id', sa.String(length=50), nullable=True, comment='Optional staff ID/employee number'),
sa.Column('pin_hash', sa.String(length=255), nullable=False, comment='bcrypt hash of PIN'),
sa.Column('failed_attempts', sa.Integer(), nullable=False, comment='Consecutive failed PIN attempts'),
sa.Column('locked_until', sa.DateTime(timezone=True), nullable=True, comment='Lockout expires at this time'),
sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=True, comment='Last successful use of PIN'),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['program_id'], ['loyalty_programs.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_staff_pin_program_active', 'staff_pins', ['program_id', 'is_active'], unique=False)
op.create_index('idx_staff_pin_store_active', 'staff_pins', ['store_id', 'is_active'], unique=False)
op.create_index(op.f('ix_staff_pins_id'), 'staff_pins', ['id'], unique=False)
op.create_index(op.f('ix_staff_pins_is_active'), 'staff_pins', ['is_active'], unique=False)
op.create_index(op.f('ix_staff_pins_program_id'), 'staff_pins', ['program_id'], unique=False)
op.create_index(op.f('ix_staff_pins_staff_id'), 'staff_pins', ['staff_id'], unique=False)
op.create_index(op.f('ix_staff_pins_store_id'), 'staff_pins', ['store_id'], unique=False)
op.create_table('apple_device_registrations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('card_id', sa.Integer(), nullable=False),
sa.Column('device_library_identifier', sa.String(length=100), nullable=False, comment='Unique identifier for the device/library'),
sa.Column('push_token', sa.String(length=100), nullable=False, comment='APNs push token for this device'),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['card_id'], ['loyalty_cards.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_apple_device_card', 'apple_device_registrations', ['device_library_identifier', 'card_id'], unique=True)
op.create_index(op.f('ix_apple_device_registrations_card_id'), 'apple_device_registrations', ['card_id'], unique=False)
op.create_index(op.f('ix_apple_device_registrations_device_library_identifier'), 'apple_device_registrations', ['device_library_identifier'], unique=False)
op.create_index(op.f('ix_apple_device_registrations_id'), 'apple_device_registrations', ['id'], unique=False)
op.create_table('loyalty_transactions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('card_id', sa.Integer(), nullable=False),
sa.Column('store_id', sa.Integer(), nullable=False, comment='Denormalized for query performance'),
sa.Column('staff_pin_id', sa.Integer(), nullable=True, comment='Staff PIN used for this operation'),
sa.Column('transaction_type', sa.String(length=30), nullable=False),
sa.Column('stamps_delta', sa.Integer(), nullable=False, comment='Change in stamps (+1 for earn, -N for redeem)'),
sa.Column('points_delta', sa.Integer(), nullable=False, comment='Change in points (+N for earn, -N for redeem)'),
sa.Column('stamps_balance_after', sa.Integer(), nullable=True, comment='Stamp count after this transaction'),
sa.Column('points_balance_after', sa.Integer(), nullable=True, comment='Points balance after this transaction'),
sa.Column('purchase_amount_cents', sa.Integer(), nullable=True, comment='Purchase amount in cents (for points calculation)'),
sa.Column('order_reference', sa.String(length=100), nullable=True, comment='Reference to order that triggered points'),
sa.Column('reward_id', sa.String(length=50), nullable=True, comment='ID of redeemed reward (from program.points_rewards)'),
sa.Column('reward_description', sa.String(length=255), nullable=True, comment='Description of redeemed reward'),
sa.Column('ip_address', sa.String(length=45), nullable=True, comment='IP address of requester (IPv4 or IPv6)'),
sa.Column('user_agent', sa.String(length=500), nullable=True, comment='User agent string'),
sa.Column('notes', sa.Text(), nullable=True, comment='Additional notes (e.g., reason for adjustment)'),
sa.Column('transaction_at', sa.DateTime(timezone=True), nullable=False, comment='When the transaction occurred (may differ from created_at)'),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['card_id'], ['loyalty_cards.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['staff_pin_id'], ['staff_pins.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['store_id'], ['stores.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_loyalty_tx_card_type', 'loyalty_transactions', ['card_id', 'transaction_type'], unique=False)
op.create_index('idx_loyalty_tx_type_date', 'loyalty_transactions', ['transaction_type', 'transaction_at'], unique=False)
op.create_index('idx_loyalty_tx_store_date', 'loyalty_transactions', ['store_id', 'transaction_at'], unique=False)
op.create_index(op.f('ix_loyalty_transactions_card_id'), 'loyalty_transactions', ['card_id'], unique=False)
op.create_index(op.f('ix_loyalty_transactions_id'), 'loyalty_transactions', ['id'], unique=False)
op.create_index(op.f('ix_loyalty_transactions_order_reference'), 'loyalty_transactions', ['order_reference'], unique=False)
op.create_index(op.f('ix_loyalty_transactions_staff_pin_id'), 'loyalty_transactions', ['staff_pin_id'], unique=False)
op.create_index(op.f('ix_loyalty_transactions_transaction_at'), 'loyalty_transactions', ['transaction_at'], unique=False)
op.create_index(op.f('ix_loyalty_transactions_transaction_type'), 'loyalty_transactions', ['transaction_type'], unique=False)
op.create_index(op.f('ix_loyalty_transactions_store_id'), 'loyalty_transactions', ['store_id'], unique=False)
op.alter_column('admin_menu_configs', 'platform_id',
existing_type=sa.INTEGER(),
comment='Platform scope - applies to users/stores of this platform',
existing_comment='Platform scope - applies to all platform admins of this platform',
existing_nullable=True)
op.alter_column('admin_menu_configs', 'user_id',
existing_type=sa.INTEGER(),
comment='User scope - applies to this specific super admin (admin frontend only)',
existing_comment='User scope - applies to this specific super admin',
existing_nullable=True)
op.alter_column('admin_menu_configs', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('admin_menu_configs', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.drop_index('idx_admin_menu_configs_frontend_type', table_name='admin_menu_configs')
op.drop_index('idx_admin_menu_configs_menu_item_id', table_name='admin_menu_configs')
op.drop_index('idx_admin_menu_configs_platform_id', table_name='admin_menu_configs')
op.drop_index('idx_admin_menu_configs_user_id', table_name='admin_menu_configs')
op.create_index(op.f('ix_admin_menu_configs_frontend_type'), 'admin_menu_configs', ['frontend_type'], unique=False)
op.create_index(op.f('ix_admin_menu_configs_id'), 'admin_menu_configs', ['id'], unique=False)
op.create_index(op.f('ix_admin_menu_configs_menu_item_id'), 'admin_menu_configs', ['menu_item_id'], unique=False)
op.create_index(op.f('ix_admin_menu_configs_platform_id'), 'admin_menu_configs', ['platform_id'], unique=False)
op.create_index(op.f('ix_admin_menu_configs_user_id'), 'admin_menu_configs', ['user_id'], unique=False)
op.alter_column('admin_platforms', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('admin_platforms', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.drop_index('idx_admin_platforms_platform_id', table_name='admin_platforms')
op.drop_index('idx_admin_platforms_user_id', table_name='admin_platforms')
op.create_index(op.f('ix_admin_platforms_id'), 'admin_platforms', ['id'], unique=False)
op.create_index(op.f('ix_admin_platforms_platform_id'), 'admin_platforms', ['platform_id'], unique=False)
op.create_index(op.f('ix_admin_platforms_user_id'), 'admin_platforms', ['user_id'], unique=False)
op.alter_column('content_pages', 'platform_id',
existing_type=sa.INTEGER(),
comment='Platform this page belongs to',
existing_nullable=False)
op.alter_column('content_pages', 'store_id',
existing_type=sa.INTEGER(),
comment='Store this page belongs to (NULL for platform/default pages)',
existing_nullable=True)
op.alter_column('content_pages', 'is_platform_page',
existing_type=sa.BOOLEAN(),
comment='True = platform marketing page (homepage, pricing); False = store default or override',
existing_nullable=False,
existing_server_default=sa.text('false'))
op.alter_column('platform_modules', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('platform_modules', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.create_index(op.f('ix_platform_modules_id'), 'platform_modules', ['id'], unique=False)
op.alter_column('platforms', 'code',
existing_type=sa.VARCHAR(length=50),
comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')",
existing_nullable=False)
op.alter_column('platforms', 'name',
existing_type=sa.VARCHAR(length=100),
comment="Display name (e.g., 'Wizamart OMS')",
existing_nullable=False)
op.alter_column('platforms', 'description',
existing_type=sa.TEXT(),
comment='Platform description for admin/marketing purposes',
existing_nullable=True)
op.alter_column('platforms', 'domain',
existing_type=sa.VARCHAR(length=255),
comment="Production domain (e.g., 'oms.lu', 'loyalty.lu')",
existing_nullable=True)
op.alter_column('platforms', 'path_prefix',
existing_type=sa.VARCHAR(length=50),
comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)",
existing_nullable=True)
op.alter_column('platforms', 'logo',
existing_type=sa.VARCHAR(length=500),
comment='Logo URL for light mode',
existing_nullable=True)
op.alter_column('platforms', 'logo_dark',
existing_type=sa.VARCHAR(length=500),
comment='Logo URL for dark mode',
existing_nullable=True)
op.alter_column('platforms', 'favicon',
existing_type=sa.VARCHAR(length=500),
comment='Favicon URL',
existing_nullable=True)
op.alter_column('platforms', 'theme_config',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment='Theme configuration (colors, fonts, etc.)',
existing_nullable=True)
op.alter_column('platforms', 'default_language',
existing_type=sa.VARCHAR(length=5),
comment="Default language code (e.g., 'fr', 'en', 'de')",
existing_nullable=False,
existing_server_default=sa.text("'fr'::character varying"))
op.alter_column('platforms', 'supported_languages',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment='List of supported language codes',
existing_nullable=False)
op.alter_column('platforms', 'is_active',
existing_type=sa.BOOLEAN(),
comment='Whether the platform is active and accessible',
existing_nullable=False,
existing_server_default=sa.text('true'))
op.alter_column('platforms', 'is_public',
existing_type=sa.BOOLEAN(),
comment='Whether the platform is visible in public listings',
existing_nullable=False,
existing_server_default=sa.text('true'))
op.alter_column('platforms', 'settings',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment='Platform-specific settings and feature flags',
existing_nullable=True)
op.alter_column('platforms', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('platforms', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.create_index(op.f('ix_platforms_id'), 'platforms', ['id'], unique=False)
op.alter_column('subscription_tiers', 'platform_id',
existing_type=sa.INTEGER(),
comment='Platform this tier belongs to (NULL = global tier)',
existing_nullable=True)
op.alter_column('subscription_tiers', 'cms_pages_limit',
existing_type=sa.INTEGER(),
comment='Total CMS pages limit (NULL = unlimited)',
existing_nullable=True)
op.alter_column('subscription_tiers', 'cms_custom_pages_limit',
existing_type=sa.INTEGER(),
comment='Custom pages limit, excluding overrides (NULL = unlimited)',
existing_nullable=True)
op.drop_index('ix_subscription_tiers_code', table_name='subscription_tiers')
op.create_index(op.f('ix_subscription_tiers_code'), 'subscription_tiers', ['code'], unique=False)
op.alter_column('users', 'is_super_admin',
existing_type=sa.BOOLEAN(),
comment=None,
existing_comment='Whether this admin has access to all platforms (super admin)',
existing_nullable=False,
existing_server_default=sa.text('false'))
op.alter_column('store_platforms', 'store_id',
existing_type=sa.INTEGER(),
comment='Reference to the store',
existing_nullable=False)
op.alter_column('store_platforms', 'platform_id',
existing_type=sa.INTEGER(),
comment='Reference to the platform',
existing_nullable=False)
op.alter_column('store_platforms', 'tier_id',
existing_type=sa.INTEGER(),
comment='Platform-specific subscription tier',
existing_nullable=True)
op.alter_column('store_platforms', 'is_active',
existing_type=sa.BOOLEAN(),
comment='Whether the store is active on this platform',
existing_nullable=False,
existing_server_default=sa.text('true'))
op.alter_column('store_platforms', 'is_primary',
existing_type=sa.BOOLEAN(),
comment="Whether this is the store's primary platform",
existing_nullable=False,
existing_server_default=sa.text('false'))
op.alter_column('store_platforms', 'custom_subdomain',
existing_type=sa.VARCHAR(length=100),
comment='Platform-specific subdomain (if different from main subdomain)',
existing_nullable=True)
op.alter_column('store_platforms', 'settings',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment='Platform-specific store settings',
existing_nullable=True)
op.alter_column('store_platforms', 'joined_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
comment='When the store joined this platform',
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('store_platforms', 'created_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('store_platforms', 'updated_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
type_=sa.DateTime(),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.create_index(op.f('ix_store_platforms_id'), 'store_platforms', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_store_platforms_id'), table_name='store_platforms')
op.alter_column('store_platforms', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('store_platforms', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('store_platforms', 'joined_at',
existing_type=postgresql.TIMESTAMP(timezone=True),
comment=None,
existing_comment='When the store joined this platform',
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('store_platforms', 'settings',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment=None,
existing_comment='Platform-specific store settings',
existing_nullable=True)
op.alter_column('store_platforms', 'custom_subdomain',
existing_type=sa.VARCHAR(length=100),
comment=None,
existing_comment='Platform-specific subdomain (if different from main subdomain)',
existing_nullable=True)
op.alter_column('store_platforms', 'is_primary',
existing_type=sa.BOOLEAN(),
comment=None,
existing_comment="Whether this is the store's primary platform",
existing_nullable=False,
existing_server_default=sa.text('false'))
op.alter_column('store_platforms', 'is_active',
existing_type=sa.BOOLEAN(),
comment=None,
existing_comment='Whether the store is active on this platform',
existing_nullable=False,
existing_server_default=sa.text('true'))
op.alter_column('store_platforms', 'tier_id',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Platform-specific subscription tier',
existing_nullable=True)
op.alter_column('store_platforms', 'platform_id',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Reference to the platform',
existing_nullable=False)
op.alter_column('store_platforms', 'store_id',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Reference to the store',
existing_nullable=False)
op.alter_column('users', 'is_super_admin',
existing_type=sa.BOOLEAN(),
comment='Whether this admin has access to all platforms (super admin)',
existing_nullable=False,
existing_server_default=sa.text('false'))
op.drop_index(op.f('ix_subscription_tiers_code'), table_name='subscription_tiers')
op.create_index('ix_subscription_tiers_code', 'subscription_tiers', ['code'], unique=True)
op.alter_column('subscription_tiers', 'cms_custom_pages_limit',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Custom pages limit, excluding overrides (NULL = unlimited)',
existing_nullable=True)
op.alter_column('subscription_tiers', 'cms_pages_limit',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Total CMS pages limit (NULL = unlimited)',
existing_nullable=True)
op.alter_column('subscription_tiers', 'platform_id',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Platform this tier belongs to (NULL = global tier)',
existing_nullable=True)
op.drop_index(op.f('ix_platforms_id'), table_name='platforms')
op.alter_column('platforms', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('platforms', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('platforms', 'settings',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment=None,
existing_comment='Platform-specific settings and feature flags',
existing_nullable=True)
op.alter_column('platforms', 'is_public',
existing_type=sa.BOOLEAN(),
comment=None,
existing_comment='Whether the platform is visible in public listings',
existing_nullable=False,
existing_server_default=sa.text('true'))
op.alter_column('platforms', 'is_active',
existing_type=sa.BOOLEAN(),
comment=None,
existing_comment='Whether the platform is active and accessible',
existing_nullable=False,
existing_server_default=sa.text('true'))
op.alter_column('platforms', 'supported_languages',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment=None,
existing_comment='List of supported language codes',
existing_nullable=False)
op.alter_column('platforms', 'default_language',
existing_type=sa.VARCHAR(length=5),
comment=None,
existing_comment="Default language code (e.g., 'fr', 'en', 'de')",
existing_nullable=False,
existing_server_default=sa.text("'fr'::character varying"))
op.alter_column('platforms', 'theme_config',
existing_type=postgresql.JSON(astext_type=sa.Text()),
comment=None,
existing_comment='Theme configuration (colors, fonts, etc.)',
existing_nullable=True)
op.alter_column('platforms', 'favicon',
existing_type=sa.VARCHAR(length=500),
comment=None,
existing_comment='Favicon URL',
existing_nullable=True)
op.alter_column('platforms', 'logo_dark',
existing_type=sa.VARCHAR(length=500),
comment=None,
existing_comment='Logo URL for dark mode',
existing_nullable=True)
op.alter_column('platforms', 'logo',
existing_type=sa.VARCHAR(length=500),
comment=None,
existing_comment='Logo URL for light mode',
existing_nullable=True)
op.alter_column('platforms', 'path_prefix',
existing_type=sa.VARCHAR(length=50),
comment=None,
existing_comment="Development path prefix (e.g., 'oms' for localhost:9999/oms/*)",
existing_nullable=True)
op.alter_column('platforms', 'domain',
existing_type=sa.VARCHAR(length=255),
comment=None,
existing_comment="Production domain (e.g., 'oms.lu', 'loyalty.lu')",
existing_nullable=True)
op.alter_column('platforms', 'description',
existing_type=sa.TEXT(),
comment=None,
existing_comment='Platform description for admin/marketing purposes',
existing_nullable=True)
op.alter_column('platforms', 'name',
existing_type=sa.VARCHAR(length=100),
comment=None,
existing_comment="Display name (e.g., 'Wizamart OMS')",
existing_nullable=False)
op.alter_column('platforms', 'code',
existing_type=sa.VARCHAR(length=50),
comment=None,
existing_comment="Unique platform identifier (e.g., 'oms', 'loyalty', 'sites')",
existing_nullable=False)
op.drop_index(op.f('ix_platform_modules_id'), table_name='platform_modules')
op.alter_column('platform_modules', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('platform_modules', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('content_pages', 'is_platform_page',
existing_type=sa.BOOLEAN(),
comment=None,
existing_comment='True = platform marketing page (homepage, pricing); False = store default or override',
existing_nullable=False,
existing_server_default=sa.text('false'))
op.alter_column('content_pages', 'store_id',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Store this page belongs to (NULL for platform/default pages)',
existing_nullable=True)
op.alter_column('content_pages', 'platform_id',
existing_type=sa.INTEGER(),
comment=None,
existing_comment='Platform this page belongs to',
existing_nullable=False)
op.drop_index(op.f('ix_admin_platforms_user_id'), table_name='admin_platforms')
op.drop_index(op.f('ix_admin_platforms_platform_id'), table_name='admin_platforms')
op.drop_index(op.f('ix_admin_platforms_id'), table_name='admin_platforms')
op.create_index('idx_admin_platforms_user_id', 'admin_platforms', ['user_id'], unique=False)
op.create_index('idx_admin_platforms_platform_id', 'admin_platforms', ['platform_id'], unique=False)
op.alter_column('admin_platforms', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('admin_platforms', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.drop_index(op.f('ix_admin_menu_configs_user_id'), table_name='admin_menu_configs')
op.drop_index(op.f('ix_admin_menu_configs_platform_id'), table_name='admin_menu_configs')
op.drop_index(op.f('ix_admin_menu_configs_menu_item_id'), table_name='admin_menu_configs')
op.drop_index(op.f('ix_admin_menu_configs_id'), table_name='admin_menu_configs')
op.drop_index(op.f('ix_admin_menu_configs_frontend_type'), table_name='admin_menu_configs')
op.create_index('idx_admin_menu_configs_user_id', 'admin_menu_configs', ['user_id'], unique=False)
op.create_index('idx_admin_menu_configs_platform_id', 'admin_menu_configs', ['platform_id'], unique=False)
op.create_index('idx_admin_menu_configs_menu_item_id', 'admin_menu_configs', ['menu_item_id'], unique=False)
op.create_index('idx_admin_menu_configs_frontend_type', 'admin_menu_configs', ['frontend_type'], unique=False)
op.alter_column('admin_menu_configs', 'updated_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('admin_menu_configs', 'created_at',
existing_type=sa.DateTime(),
type_=postgresql.TIMESTAMP(timezone=True),
existing_nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('admin_menu_configs', 'user_id',
existing_type=sa.INTEGER(),
comment='User scope - applies to this specific super admin',
existing_comment='User scope - applies to this specific super admin (admin frontend only)',
existing_nullable=True)
op.alter_column('admin_menu_configs', 'platform_id',
existing_type=sa.INTEGER(),
comment='Platform scope - applies to all platform admins of this platform',
existing_comment='Platform scope - applies to users/stores of this platform',
existing_nullable=True)
op.drop_index(op.f('ix_loyalty_transactions_store_id'), table_name='loyalty_transactions')
op.drop_index(op.f('ix_loyalty_transactions_transaction_type'), table_name='loyalty_transactions')
op.drop_index(op.f('ix_loyalty_transactions_transaction_at'), table_name='loyalty_transactions')
op.drop_index(op.f('ix_loyalty_transactions_staff_pin_id'), table_name='loyalty_transactions')
op.drop_index(op.f('ix_loyalty_transactions_order_reference'), table_name='loyalty_transactions')
op.drop_index(op.f('ix_loyalty_transactions_id'), table_name='loyalty_transactions')
op.drop_index(op.f('ix_loyalty_transactions_card_id'), table_name='loyalty_transactions')
op.drop_index('idx_loyalty_tx_store_date', table_name='loyalty_transactions')
op.drop_index('idx_loyalty_tx_type_date', table_name='loyalty_transactions')
op.drop_index('idx_loyalty_tx_card_type', table_name='loyalty_transactions')
op.drop_table('loyalty_transactions')
op.drop_index(op.f('ix_apple_device_registrations_id'), table_name='apple_device_registrations')
op.drop_index(op.f('ix_apple_device_registrations_device_library_identifier'), table_name='apple_device_registrations')
op.drop_index(op.f('ix_apple_device_registrations_card_id'), table_name='apple_device_registrations')
op.drop_index('idx_apple_device_card', table_name='apple_device_registrations')
op.drop_table('apple_device_registrations')
op.drop_index(op.f('ix_staff_pins_store_id'), table_name='staff_pins')
op.drop_index(op.f('ix_staff_pins_staff_id'), table_name='staff_pins')
op.drop_index(op.f('ix_staff_pins_program_id'), table_name='staff_pins')
op.drop_index(op.f('ix_staff_pins_is_active'), table_name='staff_pins')
op.drop_index(op.f('ix_staff_pins_id'), table_name='staff_pins')
op.drop_index('idx_staff_pin_store_active', table_name='staff_pins')
op.drop_index('idx_staff_pin_program_active', table_name='staff_pins')
op.drop_table('staff_pins')
op.drop_index(op.f('ix_loyalty_cards_store_id'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_qr_code_data'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_program_id'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_is_active'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_id'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_google_object_id'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_customer_id'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_card_number'), table_name='loyalty_cards')
op.drop_index(op.f('ix_loyalty_cards_apple_serial_number'), table_name='loyalty_cards')
op.drop_index('idx_loyalty_card_store_active', table_name='loyalty_cards')
op.drop_index('idx_loyalty_card_customer_program', table_name='loyalty_cards')
op.drop_table('loyalty_cards')
op.drop_index(op.f('ix_loyalty_programs_store_id'), table_name='loyalty_programs')
op.drop_index(op.f('ix_loyalty_programs_is_active'), table_name='loyalty_programs')
op.drop_index(op.f('ix_loyalty_programs_id'), table_name='loyalty_programs')
op.drop_index('idx_loyalty_program_store_active', table_name='loyalty_programs')
op.drop_table('loyalty_programs')
# ### end Alembic commands ###

View File

@@ -0,0 +1,587 @@
"""Phase 2: migrate loyalty module to merchant-based architecture
Revision ID: loyalty_003_phase2
Revises: 0fb5d6d6ff97
Create Date: 2026-02-06 20:30:00.000000
Phase 2 changes:
- loyalty_programs: store_id -> merchant_id (one program per merchant)
- loyalty_cards: add merchant_id, rename store_id -> enrolled_at_store_id
- loyalty_transactions: add merchant_id, add related_transaction_id, store_id nullable
- staff_pins: add merchant_id
- NEW TABLE: merchant_loyalty_settings
- NEW COLUMNS on loyalty_programs: points_expiration_days, welcome_bonus_points,
minimum_redemption_points, minimum_purchase_cents, tier_config
- NEW COLUMN on loyalty_cards: last_activity_at
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision: str = "loyalty_003_phase2"
down_revision: Union[str, None] = "0fb5d6d6ff97"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# =========================================================================
# 1. Create merchant_loyalty_settings table
# =========================================================================
op.create_table(
"merchant_loyalty_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("merchant_id", sa.Integer(), nullable=False),
sa.Column(
"staff_pin_policy",
sa.String(length=20),
nullable=False,
server_default="required",
),
sa.Column(
"staff_pin_lockout_attempts",
sa.Integer(),
nullable=False,
server_default="5",
),
sa.Column(
"staff_pin_lockout_minutes",
sa.Integer(),
nullable=False,
server_default="30",
),
sa.Column(
"allow_self_enrollment",
sa.Boolean(),
nullable=False,
server_default=sa.text("true"),
),
sa.Column(
"allow_void_transactions",
sa.Boolean(),
nullable=False,
server_default=sa.text("true"),
),
sa.Column(
"allow_cross_location_redemption",
sa.Boolean(),
nullable=False,
server_default=sa.text("true"),
),
sa.Column(
"require_order_reference",
sa.Boolean(),
nullable=False,
server_default=sa.text("false"),
),
sa.Column(
"log_ip_addresses",
sa.Boolean(),
nullable=False,
server_default=sa.text("true"),
),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["merchant_id"], ["merchants.id"], ondelete="CASCADE"
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_merchant_loyalty_settings_id"),
"merchant_loyalty_settings",
["id"],
unique=False,
)
op.create_index(
op.f("ix_merchant_loyalty_settings_merchant_id"),
"merchant_loyalty_settings",
["merchant_id"],
unique=True,
)
# =========================================================================
# 2. Modify loyalty_programs: store_id -> merchant_id + new columns
# =========================================================================
# Add merchant_id (nullable first for data migration)
op.add_column(
"loyalty_programs", sa.Column("merchant_id", sa.Integer(), nullable=True)
)
# Migrate existing data: derive merchant_id from store_id
op.execute(
"""
UPDATE loyalty_programs lp
SET merchant_id = v.merchant_id
FROM stores v
WHERE v.id = lp.store_id
"""
)
# Make merchant_id non-nullable
op.alter_column("loyalty_programs", "merchant_id", nullable=False)
# Add FK and indexes
op.create_foreign_key(
"fk_loyalty_programs_merchant_id",
"loyalty_programs",
"merchants",
["merchant_id"],
["id"],
ondelete="CASCADE",
)
op.create_index(
op.f("ix_loyalty_programs_merchant_id"),
"loyalty_programs",
["merchant_id"],
unique=True,
)
op.create_index(
"idx_loyalty_program_merchant_active",
"loyalty_programs",
["merchant_id", "is_active"],
)
# Add new Phase 2 columns
op.add_column(
"loyalty_programs",
sa.Column("points_expiration_days", sa.Integer(), nullable=True),
)
op.add_column(
"loyalty_programs",
sa.Column(
"welcome_bonus_points",
sa.Integer(),
nullable=False,
server_default="0",
),
)
op.add_column(
"loyalty_programs",
sa.Column(
"minimum_redemption_points",
sa.Integer(),
nullable=False,
server_default="100",
),
)
op.add_column(
"loyalty_programs",
sa.Column(
"minimum_purchase_cents",
sa.Integer(),
nullable=False,
server_default="0",
),
)
op.add_column(
"loyalty_programs",
sa.Column("tier_config", sa.JSON(), nullable=True),
)
# Drop old store_id column and indexes
op.drop_index("idx_loyalty_program_store_active", table_name="loyalty_programs")
op.drop_index(
op.f("ix_loyalty_programs_store_id"), table_name="loyalty_programs"
)
op.drop_constraint(
"loyalty_programs_store_id_fkey", "loyalty_programs", type_="foreignkey"
)
op.drop_column("loyalty_programs", "store_id")
# =========================================================================
# 3. Modify loyalty_cards: add merchant_id, rename store_id
# =========================================================================
# Add merchant_id
op.add_column(
"loyalty_cards", sa.Column("merchant_id", sa.Integer(), nullable=True)
)
# Migrate data
op.execute(
"""
UPDATE loyalty_cards lc
SET merchant_id = v.merchant_id
FROM stores v
WHERE v.id = lc.store_id
"""
)
op.alter_column("loyalty_cards", "merchant_id", nullable=False)
op.create_foreign_key(
"fk_loyalty_cards_merchant_id",
"loyalty_cards",
"merchants",
["merchant_id"],
["id"],
ondelete="CASCADE",
)
op.create_index(
op.f("ix_loyalty_cards_merchant_id"),
"loyalty_cards",
["merchant_id"],
unique=False,
)
op.create_index(
"idx_loyalty_card_merchant_active",
"loyalty_cards",
["merchant_id", "is_active"],
)
op.create_index(
"idx_loyalty_card_merchant_customer",
"loyalty_cards",
["merchant_id", "customer_id"],
unique=True,
)
# Rename store_id -> enrolled_at_store_id, make nullable, change FK
op.drop_index("idx_loyalty_card_store_active", table_name="loyalty_cards")
op.drop_index(op.f("ix_loyalty_cards_store_id"), table_name="loyalty_cards")
op.drop_constraint(
"loyalty_cards_store_id_fkey", "loyalty_cards", type_="foreignkey"
)
op.alter_column(
"loyalty_cards",
"store_id",
new_column_name="enrolled_at_store_id",
nullable=True,
)
op.create_foreign_key(
"fk_loyalty_cards_enrolled_store",
"loyalty_cards",
"stores",
["enrolled_at_store_id"],
["id"],
ondelete="SET NULL",
)
op.create_index(
op.f("ix_loyalty_cards_enrolled_at_store_id"),
"loyalty_cards",
["enrolled_at_store_id"],
unique=False,
)
# Add last_activity_at
op.add_column(
"loyalty_cards",
sa.Column("last_activity_at", sa.DateTime(timezone=True), nullable=True),
)
# =========================================================================
# 4. Modify loyalty_transactions: add merchant_id, related_transaction_id
# =========================================================================
# Add merchant_id
op.add_column(
"loyalty_transactions",
sa.Column("merchant_id", sa.Integer(), nullable=True),
)
# Migrate data (from card's merchant)
op.execute(
"""
UPDATE loyalty_transactions lt
SET merchant_id = lc.merchant_id
FROM loyalty_cards lc
WHERE lc.id = lt.card_id
"""
)
op.alter_column("loyalty_transactions", "merchant_id", nullable=False)
op.create_foreign_key(
"fk_loyalty_transactions_merchant_id",
"loyalty_transactions",
"merchants",
["merchant_id"],
["id"],
ondelete="CASCADE",
)
op.create_index(
op.f("ix_loyalty_transactions_merchant_id"),
"loyalty_transactions",
["merchant_id"],
unique=False,
)
op.create_index(
"idx_loyalty_tx_merchant_date",
"loyalty_transactions",
["merchant_id", "transaction_at"],
)
op.create_index(
"idx_loyalty_tx_merchant_store",
"loyalty_transactions",
["merchant_id", "store_id"],
)
# Make store_id nullable and change FK to SET NULL
op.drop_constraint(
"loyalty_transactions_store_id_fkey",
"loyalty_transactions",
type_="foreignkey",
)
op.alter_column("loyalty_transactions", "store_id", nullable=True)
op.create_foreign_key(
"fk_loyalty_transactions_store_id",
"loyalty_transactions",
"stores",
["store_id"],
["id"],
ondelete="SET NULL",
)
# Add related_transaction_id (for void linkage)
op.add_column(
"loyalty_transactions",
sa.Column("related_transaction_id", sa.Integer(), nullable=True),
)
op.create_foreign_key(
"fk_loyalty_tx_related",
"loyalty_transactions",
"loyalty_transactions",
["related_transaction_id"],
["id"],
ondelete="SET NULL",
)
op.create_index(
op.f("ix_loyalty_transactions_related_transaction_id"),
"loyalty_transactions",
["related_transaction_id"],
unique=False,
)
# =========================================================================
# 5. Modify staff_pins: add merchant_id
# =========================================================================
op.add_column(
"staff_pins", sa.Column("merchant_id", sa.Integer(), nullable=True)
)
# Migrate data (from store's merchant)
op.execute(
"""
UPDATE staff_pins sp
SET merchant_id = v.merchant_id
FROM stores v
WHERE v.id = sp.store_id
"""
)
op.alter_column("staff_pins", "merchant_id", nullable=False)
op.create_foreign_key(
"fk_staff_pins_merchant_id",
"staff_pins",
"merchants",
["merchant_id"],
["id"],
ondelete="CASCADE",
)
op.create_index(
op.f("ix_staff_pins_merchant_id"),
"staff_pins",
["merchant_id"],
unique=False,
)
op.create_index(
"idx_staff_pin_merchant_active",
"staff_pins",
["merchant_id", "is_active"],
)
def _constraint_exists(table: str, constraint: str) -> bool:
"""Check if a constraint exists on a table."""
conn = op.get_bind()
result = conn.execute(text(
"SELECT 1 FROM pg_constraint c "
"JOIN pg_class r ON c.conrelid = r.oid "
"JOIN pg_namespace n ON r.relnamespace = n.oid "
"WHERE n.nspname = 'public' AND r.relname = :t AND c.conname = :c"
), {"t": table, "c": constraint})
return result.fetchone() is not None
def _col_exists(table: str, col: str) -> bool:
"""Check if column exists."""
conn = op.get_bind()
result = conn.execute(text(
"SELECT 1 FROM information_schema.columns "
"WHERE table_schema='public' AND table_name=:t AND column_name=:c"
), {"t": table, "c": col})
return result.fetchone() is not None
def _index_exists(index: str) -> bool:
"""Check if an index exists."""
conn = op.get_bind()
result = conn.execute(text(
"SELECT 1 FROM pg_indexes WHERE schemaname='public' AND indexname=:i"
), {"i": index})
return result.fetchone() is not None
def _table_exists(table: str) -> bool:
"""Check if table exists."""
conn = op.get_bind()
result = conn.execute(text(
"SELECT 1 FROM information_schema.tables "
"WHERE table_schema='public' AND table_name=:t"
), {"t": table})
return result.fetchone() is not None
def _safe_drop_index(name: str, table: str) -> None:
if _index_exists(name):
op.drop_index(name, table_name=table)
def _safe_drop_constraint(name: str, table: str, type_: str = "foreignkey") -> None:
if _constraint_exists(table, name):
op.drop_constraint(name, table, type_=type_)
def _safe_drop_column(table: str, col: str) -> None:
if _col_exists(table, col):
op.drop_column(table, col)
def downgrade() -> None:
# t001 may have renamed stores→vendors and merchants→companies before us.
# Detect the correct table name for FK references.
stores_ref = "stores" if _table_exists("stores") else "vendors"
# =========================================================================
# 5. Revert staff_pins
# =========================================================================
_safe_drop_index("idx_staff_pin_merchant_active", "staff_pins")
_safe_drop_index("ix_staff_pins_merchant_id", "staff_pins")
_safe_drop_constraint("fk_staff_pins_merchant_id", "staff_pins")
_safe_drop_column("staff_pins", "merchant_id")
# =========================================================================
# 4. Revert loyalty_transactions
# =========================================================================
_safe_drop_index("ix_loyalty_transactions_related_transaction_id", "loyalty_transactions")
_safe_drop_constraint("fk_loyalty_tx_related", "loyalty_transactions")
_safe_drop_column("loyalty_transactions", "related_transaction_id")
_safe_drop_constraint("fk_loyalty_transactions_store_id", "loyalty_transactions")
if _col_exists("loyalty_transactions", "store_id"):
op.alter_column("loyalty_transactions", "store_id", nullable=False)
op.create_foreign_key(
"loyalty_transactions_store_id_fkey",
"loyalty_transactions",
stores_ref,
["store_id"],
["id"],
ondelete="CASCADE",
)
_safe_drop_index("idx_loyalty_tx_merchant_store", "loyalty_transactions")
_safe_drop_index("idx_loyalty_tx_merchant_date", "loyalty_transactions")
_safe_drop_index("ix_loyalty_transactions_merchant_id", "loyalty_transactions")
_safe_drop_constraint("fk_loyalty_transactions_merchant_id", "loyalty_transactions")
_safe_drop_column("loyalty_transactions", "merchant_id")
# =========================================================================
# 3. Revert loyalty_cards
# =========================================================================
_safe_drop_column("loyalty_cards", "last_activity_at")
# Rename enrolled_at_store_id back to store_id
enrolled_col = "enrolled_at_store_id"
if _col_exists("loyalty_cards", enrolled_col):
_safe_drop_index("ix_loyalty_cards_enrolled_at_store_id", "loyalty_cards")
_safe_drop_constraint("fk_loyalty_cards_enrolled_store", "loyalty_cards")
op.alter_column(
"loyalty_cards",
enrolled_col,
new_column_name="store_id",
nullable=False,
)
op.create_foreign_key(
"loyalty_cards_store_id_fkey",
"loyalty_cards",
stores_ref,
["store_id"],
["id"],
ondelete="CASCADE",
)
op.create_index(
op.f("ix_loyalty_cards_store_id"),
"loyalty_cards",
["store_id"],
unique=False,
)
op.create_index(
"idx_loyalty_card_store_active",
"loyalty_cards",
["store_id", "is_active"],
)
_safe_drop_index("idx_loyalty_card_merchant_customer", "loyalty_cards")
_safe_drop_index("idx_loyalty_card_merchant_active", "loyalty_cards")
_safe_drop_index("ix_loyalty_cards_merchant_id", "loyalty_cards")
_safe_drop_constraint("fk_loyalty_cards_merchant_id", "loyalty_cards")
_safe_drop_column("loyalty_cards", "merchant_id")
# =========================================================================
# 2. Revert loyalty_programs
# =========================================================================
if not _col_exists("loyalty_programs", "store_id"):
op.add_column(
"loyalty_programs",
sa.Column("store_id", sa.Integer(), nullable=True),
)
op.create_foreign_key(
"loyalty_programs_store_id_fkey",
"loyalty_programs",
stores_ref,
["store_id"],
["id"],
ondelete="CASCADE",
)
op.create_index(
op.f("ix_loyalty_programs_store_id"),
"loyalty_programs",
["store_id"],
unique=True,
)
op.create_index(
"idx_loyalty_program_store_active",
"loyalty_programs",
["store_id", "is_active"],
)
_safe_drop_column("loyalty_programs", "tier_config")
_safe_drop_column("loyalty_programs", "minimum_purchase_cents")
_safe_drop_column("loyalty_programs", "minimum_redemption_points")
_safe_drop_column("loyalty_programs", "welcome_bonus_points")
_safe_drop_column("loyalty_programs", "points_expiration_days")
_safe_drop_index("idx_loyalty_program_merchant_active", "loyalty_programs")
_safe_drop_index("ix_loyalty_programs_merchant_id", "loyalty_programs")
_safe_drop_constraint("fk_loyalty_programs_merchant_id", "loyalty_programs")
_safe_drop_column("loyalty_programs", "merchant_id")
# =========================================================================
# 1. Drop merchant_loyalty_settings table
# =========================================================================
# t001 may have renamed this to company_loyalty_settings
settings_table = "merchant_loyalty_settings" if _table_exists("merchant_loyalty_settings") else "company_loyalty_settings"
if _table_exists(settings_table):
_safe_drop_index("ix_merchant_loyalty_settings_merchant_id", settings_table)
_safe_drop_index("ix_merchant_loyalty_settings_id", settings_table)
# Also check for indexes under the renamed table name
_safe_drop_index("ix_company_loyalty_settings_merchant_id", settings_table)
_safe_drop_index("ix_company_loyalty_settings_id", settings_table)
op.drop_table(settings_table)

View File

@@ -0,0 +1,292 @@
"""add features table and seed data
Revision ID: n2c3d4e5f6a7
Revises: ba2c0ce78396
Create Date: 2025-12-31 10:00:00.000000
"""
import json
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "n2c3d4e5f6a7"
down_revision: Union[str, None] = "ba2c0ce78396"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
# ============================================================================
# Feature Definitions
# ============================================================================
# category, code, name, description, ui_location, ui_icon, ui_route, display_order
FEATURES = [
# Orders (category: orders)
("orders", "order_management", "Order Management", "View and manage orders", "sidebar", "clipboard-list", "/vendor/{code}/orders", 1),
("orders", "order_bulk_actions", "Bulk Order Actions", "Process multiple orders at once", "inline", None, None, 2),
("orders", "order_export", "Order Export", "Export orders to CSV/Excel", "inline", "download", None, 3),
("orders", "automation_rules", "Automation Rules", "Automatic order processing rules", "sidebar", "cog", "/vendor/{code}/automation", 4),
# Inventory (category: inventory)
("inventory", "inventory_basic", "Basic Inventory", "Track product stock levels", "sidebar", "cube", "/vendor/{code}/inventory", 1),
("inventory", "inventory_locations", "Warehouse Locations", "Manage multiple warehouse locations", "inline", "map-pin", None, 2),
("inventory", "inventory_purchase_orders", "Purchase Orders", "Create and manage purchase orders", "sidebar", "shopping-cart", "/vendor/{code}/purchase-orders", 3),
("inventory", "low_stock_alerts", "Low Stock Alerts", "Get notified when stock is low", "inline", "bell", None, 4),
# Analytics (category: analytics)
("analytics", "basic_reports", "Basic Reports", "Essential sales and order reports", "sidebar", "chart-pie", "/vendor/{code}/reports", 1),
("analytics", "analytics_dashboard", "Analytics Dashboard", "Advanced analytics with charts and trends", "sidebar", "chart-bar", "/vendor/{code}/analytics", 2),
("analytics", "custom_reports", "Custom Reports", "Build custom report configurations", "inline", "document-report", None, 3),
("analytics", "export_reports", "Export Reports", "Export reports to various formats", "inline", "download", None, 4),
# Invoicing (category: invoicing)
("invoicing", "invoice_lu", "Luxembourg Invoicing", "Generate compliant Luxembourg invoices", "sidebar", "document-text", "/vendor/{code}/invoices", 1),
("invoicing", "invoice_eu_vat", "EU VAT Support", "Handle EU VAT for cross-border sales", "inline", "globe", None, 2),
("invoicing", "invoice_bulk", "Bulk Invoicing", "Generate invoices in bulk", "inline", "document-duplicate", None, 3),
("invoicing", "accounting_export", "Accounting Export", "Export to accounting software formats", "inline", "calculator", None, 4),
# Integrations (category: integrations)
("integrations", "letzshop_sync", "Letzshop Sync", "Sync orders and products with Letzshop", "settings", "refresh", None, 1),
("integrations", "api_access", "API Access", "REST API access for custom integrations", "settings", "code", "/vendor/{code}/settings/api", 2),
("integrations", "webhooks", "Webhooks", "Receive real-time event notifications", "settings", "lightning-bolt", "/vendor/{code}/settings/webhooks", 3),
("integrations", "custom_integrations", "Custom Integrations", "Connect with any third-party service", "settings", "puzzle", None, 4),
# Team (category: team)
("team", "single_user", "Single User", "One user account", "api", None, None, 1),
("team", "team_basic", "Team Access", "Invite team members", "sidebar", "users", "/vendor/{code}/team", 2),
("team", "team_roles", "Team Roles", "Role-based permissions for team members", "inline", "shield-check", None, 3),
("team", "audit_log", "Audit Log", "Track all user actions", "sidebar", "clipboard-check", "/vendor/{code}/audit-log", 4),
# Branding (category: branding)
("branding", "basic_shop", "Basic Shop", "Your shop on the platform", "api", None, None, 1),
("branding", "custom_domain", "Custom Domain", "Use your own domain name", "settings", "globe-alt", None, 2),
("branding", "white_label", "White Label", "Remove platform branding entirely", "settings", "color-swatch", None, 3),
# Customers (category: customers)
("customers", "customer_view", "Customer View", "View customer information", "sidebar", "user-group", "/vendor/{code}/customers", 1),
("customers", "customer_export", "Customer Export", "Export customer data", "inline", "download", None, 2),
("customers", "customer_messaging", "Customer Messaging", "Send messages to customers", "inline", "chat", None, 3),
]
# ============================================================================
# Tier Feature Assignments
# ============================================================================
# tier_code -> list of feature codes
TIER_FEATURES = {
"essential": [
"order_management",
"inventory_basic",
"basic_reports",
"invoice_lu",
"letzshop_sync",
"single_user",
"basic_shop",
"customer_view",
],
"professional": [
# All Essential features
"order_management",
"order_bulk_actions",
"order_export",
"inventory_basic",
"inventory_locations",
"inventory_purchase_orders",
"low_stock_alerts",
"basic_reports",
"invoice_lu",
"invoice_eu_vat",
"letzshop_sync",
"team_basic",
"basic_shop",
"customer_view",
"customer_export",
],
"business": [
# All Professional features
"order_management",
"order_bulk_actions",
"order_export",
"automation_rules",
"inventory_basic",
"inventory_locations",
"inventory_purchase_orders",
"low_stock_alerts",
"basic_reports",
"analytics_dashboard",
"custom_reports",
"export_reports",
"invoice_lu",
"invoice_eu_vat",
"invoice_bulk",
"accounting_export",
"letzshop_sync",
"api_access",
"webhooks",
"team_basic",
"team_roles",
"audit_log",
"basic_shop",
"custom_domain",
"customer_view",
"customer_export",
"customer_messaging",
],
"enterprise": [
# All features
"order_management",
"order_bulk_actions",
"order_export",
"automation_rules",
"inventory_basic",
"inventory_locations",
"inventory_purchase_orders",
"low_stock_alerts",
"basic_reports",
"analytics_dashboard",
"custom_reports",
"export_reports",
"invoice_lu",
"invoice_eu_vat",
"invoice_bulk",
"accounting_export",
"letzshop_sync",
"api_access",
"webhooks",
"custom_integrations",
"team_basic",
"team_roles",
"audit_log",
"basic_shop",
"custom_domain",
"white_label",
"customer_view",
"customer_export",
"customer_messaging",
],
}
# Minimum tier for each feature (for upgrade prompts)
# Maps feature_code -> tier_code
MINIMUM_TIER = {
# Essential
"order_management": "essential",
"inventory_basic": "essential",
"basic_reports": "essential",
"invoice_lu": "essential",
"letzshop_sync": "essential",
"single_user": "essential",
"basic_shop": "essential",
"customer_view": "essential",
# Professional
"order_bulk_actions": "professional",
"order_export": "professional",
"inventory_locations": "professional",
"inventory_purchase_orders": "professional",
"low_stock_alerts": "professional",
"invoice_eu_vat": "professional",
"team_basic": "professional",
"customer_export": "professional",
# Business
"automation_rules": "business",
"analytics_dashboard": "business",
"custom_reports": "business",
"export_reports": "business",
"invoice_bulk": "business",
"accounting_export": "business",
"api_access": "business",
"webhooks": "business",
"team_roles": "business",
"audit_log": "business",
"custom_domain": "business",
"customer_messaging": "business",
# Enterprise
"custom_integrations": "enterprise",
"white_label": "enterprise",
}
def upgrade() -> None:
# Create features table
op.create_table(
"features",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(50), nullable=False),
sa.Column("name", sa.String(100), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("category", sa.String(50), nullable=False),
sa.Column("ui_location", sa.String(50), nullable=True),
sa.Column("ui_icon", sa.String(50), nullable=True),
sa.Column("ui_route", sa.String(100), nullable=True),
sa.Column("ui_badge_text", sa.String(20), nullable=True),
sa.Column("minimum_tier_id", sa.Integer(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, default=True),
sa.Column("is_visible", sa.Boolean(), nullable=False, default=True),
sa.Column("display_order", sa.Integer(), nullable=False, default=0),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["minimum_tier_id"], ["subscription_tiers.id"]),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_features_code", "features", ["code"], unique=True)
op.create_index("ix_features_category", "features", ["category"], unique=False)
op.create_index("idx_feature_category_order", "features", ["category", "display_order"])
op.create_index("idx_feature_active_visible", "features", ["is_active", "is_visible"])
# Get connection for data operations
conn = op.get_bind()
# Get tier IDs
tier_ids = {}
result = conn.execute(sa.text("SELECT id, code FROM subscription_tiers"))
for row in result:
tier_ids[row[1]] = row[0]
# Insert features
now = sa.func.now()
for category, code, name, description, ui_location, ui_icon, ui_route, display_order in FEATURES:
minimum_tier_code = MINIMUM_TIER.get(code)
minimum_tier_id = tier_ids.get(minimum_tier_code) if minimum_tier_code else None
conn.execute(
sa.text("""
INSERT INTO features (code, name, description, category, ui_location, ui_icon, ui_route,
minimum_tier_id, is_active, is_visible, display_order, created_at, updated_at)
VALUES (:code, :name, :description, :category, :ui_location, :ui_icon, :ui_route,
:minimum_tier_id, true, true, :display_order, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
"""),
{
"code": code,
"name": name,
"description": description,
"category": category,
"ui_location": ui_location,
"ui_icon": ui_icon,
"ui_route": ui_route,
"minimum_tier_id": minimum_tier_id,
"display_order": display_order,
},
)
# Update subscription_tiers with feature arrays
for tier_code, features in TIER_FEATURES.items():
features_json = json.dumps(features)
conn.execute(
sa.text("UPDATE subscription_tiers SET features = :features WHERE code = :code"),
{"features": features_json, "code": tier_code},
)
def downgrade() -> None:
# Clear features from subscription_tiers
conn = op.get_bind()
conn.execute(sa.text("UPDATE subscription_tiers SET features = '[]'"))
# Drop features table
op.drop_index("idx_feature_active_visible", table_name="features")
op.drop_index("idx_feature_category_order", table_name="features")
op.drop_index("ix_features_category", table_name="features")
op.drop_index("ix_features_code", table_name="features")
op.drop_table("features")

View File

@@ -0,0 +1,144 @@
"""Add inventory_transactions table
Revision ID: o3c4d5e6f7a8
Revises: n2c3d4e5f6a7
Create Date: 2026-01-01
Adds an audit trail for inventory movements:
- Track all stock changes (reserve, fulfill, release, adjust, set)
- Link transactions to orders for traceability
- Store quantity snapshots for historical analysis
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "o3c4d5e6f7a8"
down_revision = "n2c3d4e5f6a7"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create transaction type enum
transaction_type_enum = sa.Enum(
"reserve",
"fulfill",
"release",
"adjust",
"set",
"import",
"return",
name="transactiontype",
)
op.create_table(
"inventory_transactions",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("product_id", sa.Integer(), nullable=False),
sa.Column("inventory_id", sa.Integer(), nullable=True),
sa.Column("transaction_type", transaction_type_enum, nullable=False),
sa.Column("quantity_change", sa.Integer(), nullable=False),
sa.Column("quantity_after", sa.Integer(), nullable=False),
sa.Column("reserved_after", sa.Integer(), nullable=False, server_default="0"),
sa.Column("location", sa.String(), nullable=True),
sa.Column("warehouse", sa.String(), nullable=True),
sa.Column("order_id", sa.Integer(), nullable=True),
sa.Column("order_number", sa.String(), nullable=True),
sa.Column("reason", sa.Text(), nullable=True),
sa.Column("created_by", sa.String(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"]),
sa.ForeignKeyConstraint(["product_id"], ["products.id"]),
sa.ForeignKeyConstraint(["inventory_id"], ["inventory.id"]),
sa.ForeignKeyConstraint(["order_id"], ["orders.id"]),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes
op.create_index(
"ix_inventory_transactions_id",
"inventory_transactions",
["id"],
)
op.create_index(
"ix_inventory_transactions_vendor_id",
"inventory_transactions",
["vendor_id"],
)
op.create_index(
"ix_inventory_transactions_product_id",
"inventory_transactions",
["product_id"],
)
op.create_index(
"ix_inventory_transactions_inventory_id",
"inventory_transactions",
["inventory_id"],
)
op.create_index(
"ix_inventory_transactions_transaction_type",
"inventory_transactions",
["transaction_type"],
)
op.create_index(
"ix_inventory_transactions_order_id",
"inventory_transactions",
["order_id"],
)
op.create_index(
"ix_inventory_transactions_created_at",
"inventory_transactions",
["created_at"],
)
op.create_index(
"idx_inv_tx_vendor_product",
"inventory_transactions",
["vendor_id", "product_id"],
)
op.create_index(
"idx_inv_tx_vendor_created",
"inventory_transactions",
["vendor_id", "created_at"],
)
op.create_index(
"idx_inv_tx_type_created",
"inventory_transactions",
["transaction_type", "created_at"],
)
def downgrade() -> None:
op.drop_index("idx_inv_tx_type_created", table_name="inventory_transactions")
op.drop_index("idx_inv_tx_vendor_created", table_name="inventory_transactions")
op.drop_index("idx_inv_tx_vendor_product", table_name="inventory_transactions")
op.drop_index(
"ix_inventory_transactions_created_at", table_name="inventory_transactions"
)
op.drop_index(
"ix_inventory_transactions_order_id", table_name="inventory_transactions"
)
op.drop_index(
"ix_inventory_transactions_transaction_type", table_name="inventory_transactions"
)
op.drop_index(
"ix_inventory_transactions_inventory_id", table_name="inventory_transactions"
)
op.drop_index(
"ix_inventory_transactions_product_id", table_name="inventory_transactions"
)
op.drop_index(
"ix_inventory_transactions_vendor_id", table_name="inventory_transactions"
)
op.drop_index("ix_inventory_transactions_id", table_name="inventory_transactions")
op.drop_table("inventory_transactions")
# Drop enum
sa.Enum(name="transactiontype").drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,39 @@
# alembic/versions/p4d5e6f7a8b9_add_shipped_quantity_to_order_items.py
"""Add shipped_quantity to order_items for partial shipments.
Revision ID: p4d5e6f7a8b9
Revises: o3c4d5e6f7a8
Create Date: 2026-01-01 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'p4d5e6f7a8b9'
down_revision: Union[str, None] = 'o3c4d5e6f7a8'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add shipped_quantity column to order_items
op.add_column(
'order_items',
sa.Column('shipped_quantity', sa.Integer(), nullable=False, server_default='0')
)
# Set shipped_quantity = quantity for already fulfilled items
# This handles existing data where inventory_fulfilled is True
op.execute("""
UPDATE order_items
SET shipped_quantity = quantity
WHERE inventory_fulfilled = true
""")
def downgrade() -> None:
op.drop_column('order_items', 'shipped_quantity')

View File

@@ -0,0 +1,72 @@
# alembic/versions/q5e6f7a8b9c0_add_vat_fields_to_orders.py
"""Add VAT fields to orders table.
Adds vat_regime, vat_rate, vat_rate_label, and vat_destination_country
to enable proper VAT tracking at order creation time, aligned with
invoice VAT logic.
Revision ID: q5e6f7a8b9c0
Revises: p4d5e6f7a8b9
Create Date: 2026-01-02 10:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'q5e6f7a8b9c0'
down_revision: Union[str, None] = 'p4d5e6f7a8b9'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add VAT regime (domestic, oss, reverse_charge, origin, exempt)
op.add_column(
'orders',
sa.Column('vat_regime', sa.String(20), nullable=True)
)
# Add VAT rate as percentage (e.g., 17.00 for 17%)
op.add_column(
'orders',
sa.Column('vat_rate', sa.Numeric(5, 2), nullable=True)
)
# Add human-readable VAT label (e.g., "Luxembourg VAT 17%")
op.add_column(
'orders',
sa.Column('vat_rate_label', sa.String(100), nullable=True)
)
# Add destination country for cross-border sales (ISO code)
op.add_column(
'orders',
sa.Column('vat_destination_country', sa.String(2), nullable=True)
)
# Populate VAT fields for existing orders based on shipping country
# Default to 'domestic' for LU orders and 'origin' for other EU orders
op.execute("""
UPDATE orders
SET vat_regime = CASE
WHEN ship_country_iso = 'LU' THEN 'domestic'
WHEN ship_country_iso IN ('AT', 'BE', 'BG', 'HR', 'CY', 'CZ', 'DK', 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IE', 'IT', 'LV', 'LT', 'MT', 'NL', 'PL', 'PT', 'RO', 'SK', 'SI', 'ES', 'SE') THEN 'origin'
ELSE 'exempt'
END,
vat_destination_country = CASE
WHEN ship_country_iso != 'LU' AND ship_country_iso IN ('AT', 'BE', 'BG', 'HR', 'CY', 'CZ', 'DK', 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IE', 'IT', 'LV', 'LT', 'MT', 'NL', 'PL', 'PT', 'RO', 'SK', 'SI', 'ES', 'SE') THEN ship_country_iso
ELSE NULL
END
WHERE vat_regime IS NULL
""")
def downgrade() -> None:
op.drop_column('orders', 'vat_destination_country')
op.drop_column('orders', 'vat_rate_label')
op.drop_column('orders', 'vat_rate')
op.drop_column('orders', 'vat_regime')

View File

@@ -0,0 +1,144 @@
"""Add country_iso to customer_addresses
Revision ID: r6f7a8b9c0d1
Revises: q5e6f7a8b9c0
Create Date: 2026-01-02
Adds country_iso field to customer_addresses table and renames
country to country_name for clarity.
This migration is idempotent - it checks for existing columns before
making changes.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision = "r6f7a8b9c0d1"
down_revision = "q5e6f7a8b9c0"
branch_labels = None
depends_on = None
# Country name to ISO code mapping for backfill
COUNTRY_ISO_MAP = {
"Luxembourg": "LU",
"Germany": "DE",
"France": "FR",
"Belgium": "BE",
"Netherlands": "NL",
"Austria": "AT",
"Italy": "IT",
"Spain": "ES",
"Portugal": "PT",
"Poland": "PL",
"Czech Republic": "CZ",
"Czechia": "CZ",
"Slovakia": "SK",
"Hungary": "HU",
"Romania": "RO",
"Bulgaria": "BG",
"Greece": "GR",
"Croatia": "HR",
"Slovenia": "SI",
"Estonia": "EE",
"Latvia": "LV",
"Lithuania": "LT",
"Finland": "FI",
"Sweden": "SE",
"Denmark": "DK",
"Ireland": "IE",
"Cyprus": "CY",
"Malta": "MT",
"United Kingdom": "GB",
"Switzerland": "CH",
"United States": "US",
}
def get_column_names(connection, table_name):
"""Get list of column names for a table (PostgreSQL)."""
result = connection.execute(text(
"SELECT column_name FROM information_schema.columns "
"WHERE table_name = :table AND table_schema = 'public'"
), {"table": table_name})
return [row[0] for row in result]
def upgrade() -> None:
connection = op.get_bind()
columns = get_column_names(connection, "customer_addresses")
# Check if we need to do anything (idempotent check)
has_country = "country" in columns
has_country_name = "country_name" in columns
has_country_iso = "country_iso" in columns
# If already has new columns, nothing to do
if has_country_name and has_country_iso:
print(" Columns country_name and country_iso already exist, skipping")
return
# If has old 'country' column, rename it (PostgreSQL supports direct rename)
if has_country and not has_country_name:
op.alter_column(
"customer_addresses",
"country",
new_column_name="country_name",
)
# Add country_iso if it doesn't exist
if not has_country_iso:
op.add_column(
"customer_addresses",
sa.Column("country_iso", sa.String(5), nullable=True)
)
# Backfill country_iso from country_name
for country_name, iso_code in COUNTRY_ISO_MAP.items():
connection.execute(
text(
"UPDATE customer_addresses SET country_iso = :iso "
"WHERE country_name = :name"
),
{"iso": iso_code, "name": country_name},
)
# Set default for any remaining NULL values
connection.execute(
text(
"UPDATE customer_addresses SET country_iso = 'LU' "
"WHERE country_iso IS NULL"
)
)
# Make country_iso NOT NULL (PostgreSQL supports direct alter)
op.alter_column(
"customer_addresses",
"country_iso",
existing_type=sa.String(5),
nullable=False,
)
def downgrade() -> None:
connection = op.get_bind()
columns = get_column_names(connection, "customer_addresses")
has_country_name = "country_name" in columns
has_country_iso = "country_iso" in columns
has_country = "country" in columns
# Only downgrade if in the new state
if has_country_name and not has_country:
op.alter_column(
"customer_addresses",
"country_name",
new_column_name="country",
)
if has_country_iso:
op.drop_column("customer_addresses", "country_iso")

View File

@@ -0,0 +1,40 @@
# alembic/versions/s7a8b9c0d1e2_add_storefront_locale_to_vendors.py
"""Add storefront_locale to vendors for currency formatting.
Revision ID: s7a8b9c0d1e2
Revises: r6f7a8b9c0d1
Create Date: 2026-01-02 20:00:00.000000
This migration adds a nullable storefront_locale field to vendors.
NULL means the vendor inherits from platform defaults.
Examples: 'fr-LU', 'de-DE', 'en-GB'
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "s7a8b9c0d1e2"
down_revision = "r6f7a8b9c0d1"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add storefront_locale column to vendors table."""
# Nullable - NULL means "inherit from platform default"
op.add_column(
"vendors",
sa.Column(
"storefront_locale",
sa.String(10),
nullable=True,
comment="Currency/number formatting locale (NULL = inherit from platform)",
),
)
def downgrade() -> None:
"""Remove storefront_locale column from vendors table."""
op.drop_column("vendors", "storefront_locale")

View File

@@ -0,0 +1,248 @@
"""Rename Company/Vendor to Merchant/Store terminology.
Revision ID: t001_terminology
Revises: loyalty_003_phase2
Create Date: 2026-02-06 22:00:00.000000
Major terminology migration:
- companies -> merchants
- vendors -> stores
- company_id -> merchant_id (in all child tables)
- vendor_id -> store_id (in all child tables)
- vendor_code -> store_code
- letzshop_vendor_id -> letzshop_store_id
- letzshop_vendor_slug -> letzshop_store_slug
- vendor_name -> store_name (in marketplace_products)
- All vendor-prefixed tables renamed to store-prefixed
- company_loyalty_settings -> merchant_loyalty_settings
- letzshop_vendor_cache -> letzshop_store_cache
"""
from typing import Sequence, Union
from alembic import op
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision: str = "t001_terminology"
down_revision: Union[str, None] = "loyalty_003_phase2"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def _col_exists(table: str, col: str) -> bool:
"""Check if column exists using raw SQL."""
conn = op.get_bind()
result = conn.execute(text(
"SELECT 1 FROM information_schema.columns "
"WHERE table_schema='public' AND table_name=:t AND column_name=:c"
), {"t": table, "c": col})
return result.fetchone() is not None
def _table_exists(table: str) -> bool:
"""Check if table exists using raw SQL."""
conn = op.get_bind()
result = conn.execute(text(
"SELECT 1 FROM information_schema.tables "
"WHERE table_schema='public' AND table_name=:t"
), {"t": table})
return result.fetchone() is not None
def upgrade() -> None:
"""Rename all Company/Vendor references to Merchant/Store."""
# ======================================================================
# STEP 1: Rename columns in child tables FIRST (before renaming parent tables)
# ======================================================================
# --- company_id -> merchant_id ---
op.alter_column("vendors", "company_id", new_column_name="merchant_id")
# Loyalty tables: loyalty_003 already added merchant_id as a new column,
# so company_id only exists on legacy databases (pre-loyalty_003).
if _col_exists("loyalty_programs", "company_id"):
op.alter_column("loyalty_programs", "company_id", new_column_name="merchant_id")
if _col_exists("loyalty_cards", "company_id"):
op.alter_column("loyalty_cards", "company_id", new_column_name="merchant_id")
if _col_exists("loyalty_transactions", "company_id"):
op.alter_column("loyalty_transactions", "company_id", new_column_name="merchant_id")
if _table_exists("company_loyalty_settings"):
op.alter_column("company_loyalty_settings", "company_id", new_column_name="merchant_id")
if _col_exists("staff_pins", "company_id"):
op.alter_column("staff_pins", "company_id", new_column_name="merchant_id")
# --- vendor_id -> store_id (in all child tables) ---
op.alter_column("products", "vendor_id", new_column_name="store_id")
op.alter_column("customers", "vendor_id", new_column_name="store_id")
op.alter_column("customer_addresses", "vendor_id", new_column_name="store_id")
op.alter_column("orders", "vendor_id", new_column_name="store_id")
op.alter_column("order_item_exceptions", "vendor_id", new_column_name="store_id")
op.alter_column("invoices", "vendor_id", new_column_name="store_id")
op.alter_column("inventory", "vendor_id", new_column_name="store_id")
op.alter_column("inventory_transactions", "vendor_id", new_column_name="store_id")
op.alter_column("marketplace_import_jobs", "vendor_id", new_column_name="store_id")
op.alter_column("letzshop_fulfillment_queue", "vendor_id", new_column_name="store_id")
op.alter_column("letzshop_sync_logs", "vendor_id", new_column_name="store_id")
op.alter_column("letzshop_historical_import_jobs", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_users", "vendor_id", new_column_name="store_id")
op.alter_column("roles", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_domains", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_platforms", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_addons", "vendor_id", new_column_name="store_id")
if _table_exists("vendor_subscriptions"):
op.alter_column("vendor_subscriptions", "vendor_id", new_column_name="store_id")
op.alter_column("billing_history", "vendor_id", new_column_name="store_id")
op.alter_column("content_pages", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_themes", "vendor_id", new_column_name="store_id")
op.alter_column("media_files", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_email_templates", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_email_settings", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_letzshop_credentials", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_onboarding", "vendor_id", new_column_name="store_id")
op.alter_column("vendor_invoice_settings", "vendor_id", new_column_name="store_id")
op.alter_column("staff_pins", "vendor_id", new_column_name="store_id")
op.alter_column("loyalty_cards", "enrolled_at_vendor_id", new_column_name="enrolled_at_store_id")
op.alter_column("loyalty_transactions", "vendor_id", new_column_name="store_id")
# Columns that may not exist yet (defined in models but not yet migrated)
if _col_exists("letzshop_fulfillment_queue", "claimed_by_vendor_id"):
op.alter_column("letzshop_fulfillment_queue", "claimed_by_vendor_id", new_column_name="claimed_by_store_id")
if _col_exists("admin_audit_logs", "vendor_id"):
op.alter_column("admin_audit_logs", "vendor_id", new_column_name="store_id")
if _col_exists("messages", "vendor_id"):
op.alter_column("messages", "vendor_id", new_column_name="store_id")
if _col_exists("conversations", "vendor_id"):
op.alter_column("conversations", "vendor_id", new_column_name="store_id")
if _table_exists("emails") and _col_exists("emails", "vendor_id"):
op.alter_column("emails", "vendor_id", new_column_name="store_id")
if _table_exists("carts") and _col_exists("carts", "vendor_id"):
op.alter_column("carts", "vendor_id", new_column_name="store_id")
# --- Other vendor-prefixed columns ---
op.alter_column("vendors", "vendor_code", new_column_name="store_code")
op.alter_column("vendors", "letzshop_vendor_id", new_column_name="letzshop_store_id")
op.alter_column("vendors", "letzshop_vendor_slug", new_column_name="letzshop_store_slug")
op.alter_column("marketplace_products", "vendor_name", new_column_name="store_name")
# ======================================================================
# STEP 2: Rename parent tables
# ======================================================================
op.rename_table("companies", "merchants")
op.rename_table("vendors", "stores")
# ======================================================================
# STEP 3: Rename vendor-prefixed child tables
# ======================================================================
op.rename_table("vendor_users", "store_users")
op.rename_table("vendor_domains", "store_domains")
op.rename_table("vendor_platforms", "store_platforms")
op.rename_table("vendor_themes", "store_themes")
op.rename_table("vendor_email_templates", "store_email_templates")
op.rename_table("vendor_email_settings", "store_email_settings")
op.rename_table("vendor_addons", "store_addons")
if _table_exists("vendor_subscriptions"):
op.rename_table("vendor_subscriptions", "store_subscriptions")
op.rename_table("vendor_letzshop_credentials", "store_letzshop_credentials")
op.rename_table("vendor_onboarding", "store_onboarding")
op.rename_table("vendor_invoice_settings", "store_invoice_settings")
if _table_exists("company_loyalty_settings"):
op.rename_table("company_loyalty_settings", "merchant_loyalty_settings")
op.rename_table("letzshop_vendor_cache", "letzshop_store_cache")
def downgrade() -> None:
"""Revert all Merchant/Store references back to Company/Vendor."""
# STEP 1: Revert table renames
op.rename_table("letzshop_store_cache", "letzshop_vendor_cache")
if _table_exists("merchant_loyalty_settings"):
op.rename_table("merchant_loyalty_settings", "company_loyalty_settings")
op.rename_table("store_invoice_settings", "vendor_invoice_settings")
op.rename_table("store_onboarding", "vendor_onboarding")
op.rename_table("store_letzshop_credentials", "vendor_letzshop_credentials")
if _table_exists("store_subscriptions"):
op.rename_table("store_subscriptions", "vendor_subscriptions")
op.rename_table("store_addons", "vendor_addons")
op.rename_table("store_email_settings", "vendor_email_settings")
op.rename_table("store_email_templates", "vendor_email_templates")
op.rename_table("store_themes", "vendor_themes")
op.rename_table("store_platforms", "vendor_platforms")
op.rename_table("store_domains", "vendor_domains")
op.rename_table("store_users", "vendor_users")
op.rename_table("stores", "vendors")
op.rename_table("merchants", "companies")
# STEP 2: Revert column renames
op.alter_column("vendors", "store_code", new_column_name="vendor_code")
op.alter_column("vendors", "letzshop_store_id", new_column_name="letzshop_vendor_id")
op.alter_column("vendors", "letzshop_store_slug", new_column_name="letzshop_vendor_slug")
op.alter_column("marketplace_products", "store_name", new_column_name="vendor_name")
op.alter_column("vendors", "merchant_id", new_column_name="company_id")
# Loyalty tables: only reverse if t001 upgrade actually renamed company_id.
# On fresh DBs, loyalty_003 owns merchant_id — don't touch it.
if _col_exists("loyalty_programs", "company_id"):
pass # Already reverted or was never renamed
elif _col_exists("loyalty_programs", "merchant_id") and not _table_exists("loyalty_programs"):
pass # Table gone, skip
elif _col_exists("loyalty_programs", "merchant_id"):
# Check if this is a legacy DB where company_id was renamed to merchant_id.
# If loyalty_003 added merchant_id (fresh DB), store_id won't exist
# (loyalty_003 removed it). Only rename if store_id exists (legacy path).
if _col_exists("loyalty_programs", "store_id"):
op.alter_column("loyalty_programs", "merchant_id", new_column_name="company_id")
if _col_exists("loyalty_cards", "merchant_id") and _col_exists("loyalty_cards", "store_id"):
op.alter_column("loyalty_cards", "merchant_id", new_column_name="company_id")
if _col_exists("loyalty_transactions", "merchant_id") and _col_exists("loyalty_transactions", "store_id"):
op.alter_column("loyalty_transactions", "merchant_id", new_column_name="company_id")
if _table_exists("company_loyalty_settings") and _col_exists("company_loyalty_settings", "merchant_id"):
op.alter_column("company_loyalty_settings", "merchant_id", new_column_name="company_id")
if _col_exists("staff_pins", "merchant_id") and _col_exists("staff_pins", "store_id"):
op.alter_column("staff_pins", "merchant_id", new_column_name="company_id")
op.alter_column("products", "store_id", new_column_name="vendor_id")
op.alter_column("customers", "store_id", new_column_name="vendor_id")
op.alter_column("customer_addresses", "store_id", new_column_name="vendor_id")
op.alter_column("orders", "store_id", new_column_name="vendor_id")
op.alter_column("order_item_exceptions", "store_id", new_column_name="vendor_id")
op.alter_column("invoices", "store_id", new_column_name="vendor_id")
op.alter_column("inventory", "store_id", new_column_name="vendor_id")
op.alter_column("inventory_transactions", "store_id", new_column_name="vendor_id")
op.alter_column("marketplace_import_jobs", "store_id", new_column_name="vendor_id")
op.alter_column("letzshop_fulfillment_queue", "store_id", new_column_name="vendor_id")
op.alter_column("letzshop_sync_logs", "store_id", new_column_name="vendor_id")
op.alter_column("letzshop_historical_import_jobs", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_users", "store_id", new_column_name="vendor_id")
op.alter_column("roles", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_domains", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_platforms", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_addons", "store_id", new_column_name="vendor_id")
if _table_exists("vendor_subscriptions"):
op.alter_column("vendor_subscriptions", "store_id", new_column_name="vendor_id")
op.alter_column("billing_history", "store_id", new_column_name="vendor_id")
op.alter_column("content_pages", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_themes", "store_id", new_column_name="vendor_id")
op.alter_column("media_files", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_email_templates", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_email_settings", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_letzshop_credentials", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_onboarding", "store_id", new_column_name="vendor_id")
op.alter_column("vendor_invoice_settings", "store_id", new_column_name="vendor_id")
op.alter_column("staff_pins", "store_id", new_column_name="vendor_id")
op.alter_column("loyalty_cards", "enrolled_at_store_id", new_column_name="enrolled_at_vendor_id")
op.alter_column("loyalty_transactions", "store_id", new_column_name="vendor_id")
# Conditional columns
if _col_exists("letzshop_fulfillment_queue", "claimed_by_store_id"):
op.alter_column("letzshop_fulfillment_queue", "claimed_by_store_id", new_column_name="claimed_by_vendor_id")
if _col_exists("admin_audit_logs", "store_id"):
op.alter_column("admin_audit_logs", "store_id", new_column_name="vendor_id")
if _col_exists("messages", "store_id"):
op.alter_column("messages", "store_id", new_column_name="vendor_id")
if _col_exists("conversations", "store_id"):
op.alter_column("conversations", "store_id", new_column_name="vendor_id")
if _table_exists("emails") and _col_exists("emails", "store_id"):
op.alter_column("emails", "store_id", new_column_name="vendor_id")
if _table_exists("carts") and _col_exists("carts", "store_id"):
op.alter_column("carts", "store_id", new_column_name="vendor_id")

View File

@@ -0,0 +1,61 @@
"""Rename remaining vendor-named constraints and indexes to store.
Revision ID: t002_constraints
Revises: t001_terminology
Create Date: 2026-02-07 10:00:00.000000
Completes the Company/Vendor -> Merchant/Store terminology migration by
renaming 4 constraints and 12 indexes that still used "vendor" in their names.
"""
from typing import Sequence, Union
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "t002_constraints"
down_revision: Union[str, None] = "t001_terminology"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
# (old_name, new_name, table) — table is needed for RENAME CONSTRAINT
CONSTRAINTS = [
("uq_vendor_marketplace_product", "uq_store_marketplace_product", "products"),
("uq_vendor_platform", "uq_store_platform", "store_platforms"),
("uq_vendor_domain", "uq_store_domain", "store_domains"),
("uq_vendor_email_template_code_language", "uq_store_email_template_code_language", "store_email_templates"),
]
# (old_name, new_name) — ALTER INDEX works without table name in PostgreSQL
INDEXES = [
("idx_product_vendor_active", "idx_product_store_active"),
("idx_product_vendor_featured", "idx_product_store_featured"),
("idx_product_vendor_sku", "idx_product_store_sku"),
("idx_vendor_platform_active", "idx_store_platform_active"),
("idx_vendor_platform_primary", "idx_store_platform_primary"),
("idx_vendor_primary", "idx_store_domain_primary"),
("idx_vendor_cache_city", "idx_letzshop_cache_city"),
("idx_vendor_cache_claimed", "idx_letzshop_cache_claimed"),
("idx_vendor_cache_active", "idx_letzshop_cache_active"),
("idx_vendor_addon_status", "idx_store_addon_status"),
("idx_vendor_addon_product", "idx_store_addon_product"),
("idx_vendor_email_settings_configured", "idx_store_email_settings_configured"),
]
def upgrade() -> None:
for old, new, table in CONSTRAINTS:
op.execute(f'ALTER TABLE "{table}" RENAME CONSTRAINT "{old}" TO "{new}"')
for old, new in INDEXES:
op.execute(f'ALTER INDEX "{old}" RENAME TO "{new}"')
def downgrade() -> None:
# Reverse constraint renames
for old, new, table in CONSTRAINTS:
op.execute(f'ALTER TABLE "{table}" RENAME CONSTRAINT "{new}" TO "{old}"')
# Reverse index renames
for old, new in INDEXES:
op.execute(f'ALTER INDEX "{new}" RENAME TO "{old}"')

View File

@@ -0,0 +1,53 @@
"""add password_reset_tokens table
Revision ID: t8b9c0d1e2f3
Revises: s7a8b9c0d1e2
Create Date: 2026-01-03
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "t8b9c0d1e2f3"
down_revision = "s7a8b9c0d1e2"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"password_reset_tokens",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("customer_id", sa.Integer(), nullable=False),
sa.Column("token_hash", sa.String(64), nullable=False),
sa.Column("expires_at", sa.DateTime(), nullable=False),
sa.Column("used_at", sa.DateTime(), nullable=True),
sa.Column(
"created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.ForeignKeyConstraint(
["customer_id"],
["customers.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_password_reset_tokens_customer_id",
"password_reset_tokens",
["customer_id"],
)
op.create_index(
"ix_password_reset_tokens_token_hash",
"password_reset_tokens",
["token_hash"],
)
def downgrade() -> None:
op.drop_index("ix_password_reset_tokens_token_hash", table_name="password_reset_tokens")
op.drop_index("ix_password_reset_tokens_customer_id", table_name="password_reset_tokens")
op.drop_table("password_reset_tokens")

View File

@@ -0,0 +1,114 @@
# alembic/versions/u9c0d1e2f3g4_add_vendor_email_templates.py
"""Add vendor email templates and enhance email_templates table.
Revision ID: u9c0d1e2f3g4
Revises: t8b9c0d1e2f3
Create Date: 2026-01-03
Changes:
- Add is_platform_only column to email_templates (templates that vendors cannot override)
- Add required_variables column to email_templates (JSON list of required variables)
- Create vendor_email_templates table for vendor-specific template overrides
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "u9c0d1e2f3g4"
down_revision = "t8b9c0d1e2f3"
branch_labels = None
depends_on = None
def upgrade():
# Add new columns to email_templates
op.add_column(
"email_templates",
sa.Column("is_platform_only", sa.Boolean(), nullable=False, server_default="0"),
)
op.add_column(
"email_templates",
sa.Column("required_variables", sa.Text(), nullable=True),
)
# Create vendor_email_templates table
op.create_table(
"vendor_email_templates",
sa.Column("id", sa.Integer(), nullable=False, autoincrement=True),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("template_code", sa.String(100), nullable=False),
sa.Column("language", sa.String(5), nullable=False, server_default="en"),
sa.Column("name", sa.String(255), nullable=True),
sa.Column("subject", sa.String(500), nullable=False),
sa.Column("body_html", sa.Text(), nullable=False),
sa.Column("body_text", sa.Text(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="1"),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.text("CURRENT_TIMESTAMP"),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
name="fk_vendor_email_templates_vendor_id",
ondelete="CASCADE",
),
sa.UniqueConstraint(
"vendor_id",
"template_code",
"language",
name="uq_vendor_email_template_code_language",
),
)
# Create indexes for performance
op.create_index(
"ix_vendor_email_templates_vendor_id",
"vendor_email_templates",
["vendor_id"],
)
op.create_index(
"ix_vendor_email_templates_template_code",
"vendor_email_templates",
["template_code"],
)
op.create_index(
"ix_vendor_email_templates_lookup",
"vendor_email_templates",
["vendor_id", "template_code", "language"],
)
# Add unique constraint to email_templates for code+language
# This ensures we can reliably look up platform templates
op.create_index(
"ix_email_templates_code_language",
"email_templates",
["code", "language"],
unique=True,
)
def downgrade():
# Drop indexes
op.drop_index("ix_email_templates_code_language", table_name="email_templates")
op.drop_index("ix_vendor_email_templates_lookup", table_name="vendor_email_templates")
op.drop_index("ix_vendor_email_templates_template_code", table_name="vendor_email_templates")
op.drop_index("ix_vendor_email_templates_vendor_id", table_name="vendor_email_templates")
# Drop vendor_email_templates table
op.drop_table("vendor_email_templates")
# Remove new columns from email_templates
op.drop_column("email_templates", "required_variables")
op.drop_column("email_templates", "is_platform_only")

View File

@@ -0,0 +1,102 @@
# alembic/versions/v0a1b2c3d4e5_add_vendor_email_settings.py
"""Add vendor email settings table.
Revision ID: v0a1b2c3d4e5
Revises: u9c0d1e2f3g4
Create Date: 2026-01-05
Changes:
- Create vendor_email_settings table for vendor SMTP/email provider configuration
- Vendors must configure this to send transactional emails
- Premium providers (SendGrid, Mailgun, SES) are tier-gated (Business+)
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "v0a1b2c3d4e5"
down_revision = "u9c0d1e2f3g4"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create vendor_email_settings table
op.create_table(
"vendor_email_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
# Sender Identity
sa.Column("from_email", sa.String(255), nullable=False),
sa.Column("from_name", sa.String(100), nullable=False),
sa.Column("reply_to_email", sa.String(255), nullable=True),
# Signature/Footer
sa.Column("signature_text", sa.Text(), nullable=True),
sa.Column("signature_html", sa.Text(), nullable=True),
# Provider Configuration
sa.Column("provider", sa.String(20), nullable=False, default="smtp"),
# SMTP Settings
sa.Column("smtp_host", sa.String(255), nullable=True),
sa.Column("smtp_port", sa.Integer(), nullable=True, default=587),
sa.Column("smtp_username", sa.String(255), nullable=True),
sa.Column("smtp_password", sa.String(500), nullable=True),
sa.Column("smtp_use_tls", sa.Boolean(), nullable=False, default=True),
sa.Column("smtp_use_ssl", sa.Boolean(), nullable=False, default=False),
# SendGrid Settings
sa.Column("sendgrid_api_key", sa.String(500), nullable=True),
# Mailgun Settings
sa.Column("mailgun_api_key", sa.String(500), nullable=True),
sa.Column("mailgun_domain", sa.String(255), nullable=True),
# Amazon SES Settings
sa.Column("ses_access_key_id", sa.String(100), nullable=True),
sa.Column("ses_secret_access_key", sa.String(500), nullable=True),
sa.Column("ses_region", sa.String(50), nullable=True, default="eu-west-1"),
# Status & Verification
sa.Column("is_configured", sa.Boolean(), nullable=False, default=False),
sa.Column("is_verified", sa.Boolean(), nullable=False, default=False),
sa.Column("last_verified_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("verification_error", sa.Text(), nullable=True),
# Timestamps
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
# Constraints
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["vendor_id"],
["vendors.id"],
name="fk_vendor_email_settings_vendor_id",
ondelete="CASCADE",
),
sa.UniqueConstraint("vendor_id", name="uq_vendor_email_settings_vendor_id"),
)
# Create indexes
op.create_index(
"ix_vendor_email_settings_id",
"vendor_email_settings",
["id"],
unique=False,
)
op.create_index(
"ix_vendor_email_settings_vendor_id",
"vendor_email_settings",
["vendor_id"],
unique=True,
)
op.create_index(
"idx_vendor_email_settings_configured",
"vendor_email_settings",
["vendor_id", "is_configured"],
)
def downgrade() -> None:
# Drop indexes
op.drop_index("idx_vendor_email_settings_configured", table_name="vendor_email_settings")
op.drop_index("ix_vendor_email_settings_vendor_id", table_name="vendor_email_settings")
op.drop_index("ix_vendor_email_settings_id", table_name="vendor_email_settings")
# Drop table
op.drop_table("vendor_email_settings")

View File

@@ -0,0 +1,108 @@
"""Add media library tables
Revision ID: w1b2c3d4e5f6
Revises: v0a1b2c3d4e5
Create Date: 2026-01-06 10:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "w1b2c3d4e5f6"
down_revision: Union[str, None] = "v0a1b2c3d4e5"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create media_files table
op.create_table(
"media_files",
sa.Column("id", sa.Integer(), primary_key=True, index=True),
sa.Column("vendor_id", sa.Integer(), sa.ForeignKey("vendors.id"), nullable=False),
# File identification
sa.Column("filename", sa.String(255), nullable=False),
sa.Column("original_filename", sa.String(255)),
sa.Column("file_path", sa.String(500), nullable=False),
# File properties
sa.Column("media_type", sa.String(20), nullable=False), # image, video, document
sa.Column("mime_type", sa.String(100)),
sa.Column("file_size", sa.Integer()),
# Image/video dimensions
sa.Column("width", sa.Integer()),
sa.Column("height", sa.Integer()),
# Thumbnail
sa.Column("thumbnail_path", sa.String(500)),
# Metadata
sa.Column("alt_text", sa.String(500)),
sa.Column("description", sa.Text()),
sa.Column("folder", sa.String(100), default="general"),
sa.Column("tags", sa.JSON()),
sa.Column("extra_metadata", sa.JSON()),
# Status
sa.Column("is_optimized", sa.Boolean(), default=False),
sa.Column("optimized_size", sa.Integer()),
# Usage tracking
sa.Column("usage_count", sa.Integer(), default=0),
# Timestamps
sa.Column("created_at", sa.DateTime(), server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(), onupdate=sa.func.now()),
)
# Create indexes for media_files
op.create_index("idx_media_vendor_id", "media_files", ["vendor_id"])
op.create_index("idx_media_vendor_folder", "media_files", ["vendor_id", "folder"])
op.create_index("idx_media_vendor_type", "media_files", ["vendor_id", "media_type"])
op.create_index("idx_media_filename", "media_files", ["filename"])
# Create product_media table (many-to-many relationship)
op.create_table(
"product_media",
sa.Column("id", sa.Integer(), primary_key=True, index=True),
sa.Column(
"product_id",
sa.Integer(),
sa.ForeignKey("products.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"media_id",
sa.Integer(),
sa.ForeignKey("media_files.id", ondelete="CASCADE"),
nullable=False,
),
# Usage type
sa.Column("usage_type", sa.String(50), nullable=False, default="gallery"),
# Display order for galleries
sa.Column("display_order", sa.Integer(), default=0),
# Variant-specific
sa.Column("variant_id", sa.Integer()),
# Timestamps
sa.Column("created_at", sa.DateTime(), server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(), onupdate=sa.func.now()),
# Unique constraint
sa.UniqueConstraint("product_id", "media_id", "usage_type", name="uq_product_media_usage"),
)
# Create indexes for product_media
op.create_index("idx_product_media_product", "product_media", ["product_id"])
op.create_index("idx_product_media_media", "product_media", ["media_id"])
# Note: Unique constraint is defined in the table creation above via SQLAlchemy model
# SQLite doesn't support adding constraints after table creation
def downgrade() -> None:
# Drop product_media table
op.drop_index("idx_product_media_media", table_name="product_media")
op.drop_index("idx_product_media_product", table_name="product_media")
op.drop_table("product_media")
# Drop media_files table
op.drop_index("idx_media_filename", table_name="media_files")
op.drop_index("idx_media_vendor_type", table_name="media_files")
op.drop_index("idx_media_vendor_folder", table_name="media_files")
op.drop_index("idx_media_vendor_id", table_name="media_files")
op.drop_table("media_files")

View File

@@ -0,0 +1,43 @@
# alembic/versions/x2c3d4e5f6g7_make_marketplace_product_id_nullable.py
"""Make marketplace_product_id nullable for direct product creation.
Revision ID: x2c3d4e5f6g7
Revises: w1b2c3d4e5f6
Create Date: 2026-01-06 23:15:00.000000
"""
from collections.abc import Sequence
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "x2c3d4e5f6g7"
down_revision: str = "w1b2c3d4e5f6"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Make marketplace_product_id nullable to allow direct product creation."""
# SQLite doesn't support ALTER COLUMN, so we need to recreate the table
# For SQLite, we use batch mode which handles this automatically
with op.batch_alter_table("products") as batch_op:
batch_op.alter_column(
"marketplace_product_id",
existing_type=sa.Integer(),
nullable=True,
)
def downgrade() -> None:
"""Revert marketplace_product_id to NOT NULL."""
# Note: This will fail if there are any NULL values in the column
with op.batch_alter_table("products") as batch_op:
batch_op.alter_column(
"marketplace_product_id",
existing_type=sa.Integer(),
nullable=False,
)

View File

@@ -0,0 +1,47 @@
# alembic/versions/y3d4e5f6g7h8_add_product_type_columns.py
"""Add is_digital and product_type columns to products table.
Makes Product fully independent from MarketplaceProduct for product type info.
Revision ID: y3d4e5f6g7h8
Revises: x2c3d4e5f6g7
Create Date: 2026-01-07 10:00:00.000000
"""
from collections.abc import Sequence
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "y3d4e5f6g7h8"
down_revision: str = "x2c3d4e5f6g7"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Add is_digital and product_type columns to products table."""
with op.batch_alter_table("products") as batch_op:
batch_op.add_column(
sa.Column("is_digital", sa.Boolean(), nullable=False, server_default="0")
)
batch_op.add_column(
sa.Column(
"product_type",
sa.String(20),
nullable=False,
server_default="physical",
)
)
batch_op.create_index("idx_product_is_digital", ["is_digital"])
def downgrade() -> None:
"""Remove is_digital and product_type columns."""
with op.batch_alter_table("products") as batch_op:
batch_op.drop_index("idx_product_is_digital")
batch_op.drop_column("product_type")
batch_op.drop_column("is_digital")

View File

@@ -0,0 +1,374 @@
"""add multi-platform support
Revision ID: z4e5f6a7b8c9
Revises: 1b398cf45e85
Create Date: 2026-01-18 12:00:00.000000
This migration adds multi-platform support:
1. Creates platforms table for business offerings (OMS, Loyalty, etc.)
2. Creates vendor_platforms junction table for many-to-many relationship
3. Adds platform_id and CMS limits to subscription_tiers
4. Adds platform_id and is_platform_page to content_pages
5. Inserts default "oms" platform
6. Backfills existing data to OMS platform
7. Creates vendor_platforms entries for existing vendors
"""
import json
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "z4e5f6a7b8c9"
down_revision: Union[str, None] = "1b398cf45e85"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
# Platform marketing page slugs (is_platform_page=True)
PLATFORM_PAGE_SLUGS = [
"platform_homepage",
"home",
"pricing",
"about",
"contact",
"faq",
"terms",
"privacy",
"features",
"integrations",
]
# CMS limits per tier
CMS_TIER_LIMITS = {
"essential": {"cms_pages_limit": 3, "cms_custom_pages_limit": 0},
"professional": {"cms_pages_limit": 10, "cms_custom_pages_limit": 5},
"business": {"cms_pages_limit": 30, "cms_custom_pages_limit": 20},
"enterprise": {"cms_pages_limit": None, "cms_custom_pages_limit": None}, # Unlimited
}
# CMS features per tier
CMS_TIER_FEATURES = {
"essential": ["cms_basic"],
"professional": ["cms_basic", "cms_custom_pages", "cms_seo"],
"business": ["cms_basic", "cms_custom_pages", "cms_seo", "cms_templates"],
"enterprise": ["cms_basic", "cms_custom_pages", "cms_unlimited_pages", "cms_templates", "cms_seo", "cms_scheduling"],
}
def upgrade() -> None:
conn = op.get_bind()
# =========================================================================
# 1. Create platforms table
# =========================================================================
op.create_table(
"platforms",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(50), nullable=False),
sa.Column("name", sa.String(100), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("domain", sa.String(255), nullable=True),
sa.Column("path_prefix", sa.String(50), nullable=True),
sa.Column("logo", sa.String(500), nullable=True),
sa.Column("logo_dark", sa.String(500), nullable=True),
sa.Column("favicon", sa.String(500), nullable=True),
sa.Column("theme_config", sa.JSON(), nullable=True),
sa.Column("default_language", sa.String(5), nullable=False, server_default="fr"),
sa.Column("supported_languages", sa.JSON(), nullable=False),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("is_public", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("settings", sa.JSON(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_platforms_code", "platforms", ["code"], unique=True)
op.create_index("ix_platforms_domain", "platforms", ["domain"], unique=True)
op.create_index("ix_platforms_path_prefix", "platforms", ["path_prefix"], unique=True)
op.create_index("idx_platform_active", "platforms", ["is_active"])
op.create_index("idx_platform_public", "platforms", ["is_public", "is_active"])
# =========================================================================
# 2. Create vendor_platforms junction table
# =========================================================================
op.create_table(
"vendor_platforms",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("vendor_id", sa.Integer(), nullable=False),
sa.Column("platform_id", sa.Integer(), nullable=False),
sa.Column("tier_id", sa.Integer(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="true"),
sa.Column("is_primary", sa.Boolean(), nullable=False, server_default="false"),
sa.Column("custom_subdomain", sa.String(100), nullable=True),
sa.Column("settings", sa.JSON(), nullable=True),
sa.Column("joined_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.ForeignKeyConstraint(["vendor_id"], ["vendors.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["platform_id"], ["platforms.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["tier_id"], ["subscription_tiers.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_vendor_platforms_vendor_id", "vendor_platforms", ["vendor_id"])
op.create_index("ix_vendor_platforms_platform_id", "vendor_platforms", ["platform_id"])
op.create_index("ix_vendor_platforms_tier_id", "vendor_platforms", ["tier_id"])
op.create_index("idx_vendor_platform_active", "vendor_platforms", ["vendor_id", "platform_id", "is_active"])
op.create_index("idx_vendor_platform_primary", "vendor_platforms", ["vendor_id", "is_primary"])
op.create_unique_constraint("uq_vendor_platform", "vendor_platforms", ["vendor_id", "platform_id"])
# =========================================================================
# 3. Add platform_id and CMS columns to subscription_tiers
# =========================================================================
# Add platform_id column (nullable for global tiers)
op.add_column(
"subscription_tiers",
sa.Column("platform_id", sa.Integer(), nullable=True),
)
op.create_index("ix_subscription_tiers_platform_id", "subscription_tiers", ["platform_id"])
op.create_foreign_key(
"fk_subscription_tiers_platform_id",
"subscription_tiers",
"platforms",
["platform_id"],
["id"],
ondelete="CASCADE",
)
# Add CMS limit columns
op.add_column(
"subscription_tiers",
sa.Column("cms_pages_limit", sa.Integer(), nullable=True),
)
op.add_column(
"subscription_tiers",
sa.Column("cms_custom_pages_limit", sa.Integer(), nullable=True),
)
op.create_index("idx_tier_platform_active", "subscription_tiers", ["platform_id", "is_active"])
# =========================================================================
# 4. Add platform_id and is_platform_page to content_pages
# =========================================================================
# Add platform_id column (will be set to NOT NULL after backfill)
op.add_column(
"content_pages",
sa.Column("platform_id", sa.Integer(), nullable=True),
)
op.create_index("ix_content_pages_platform_id", "content_pages", ["platform_id"])
# Add is_platform_page column
op.add_column(
"content_pages",
sa.Column("is_platform_page", sa.Boolean(), nullable=False, server_default="false"),
)
op.create_index("idx_platform_page_type", "content_pages", ["platform_id", "is_platform_page"])
# =========================================================================
# 5. Insert default OMS platform
# =========================================================================
conn.execute(
sa.text("""
INSERT INTO platforms (code, name, description, domain, path_prefix, default_language,
supported_languages, is_active, is_public, theme_config, settings,
created_at, updated_at)
VALUES ('oms', 'Wizamart OMS', 'Order Management System for Luxembourg merchants',
'oms.lu', 'oms', 'fr', '["fr", "de", "en"]', true, true, '{}', '{}',
CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
""")
)
# Get OMS platform ID for backfilling
result = conn.execute(sa.text("SELECT id FROM platforms WHERE code = 'oms'"))
oms_platform_id = result.fetchone()[0]
# =========================================================================
# 6. Backfill content_pages with platform_id
# =========================================================================
conn.execute(
sa.text(f"UPDATE content_pages SET platform_id = {oms_platform_id} WHERE platform_id IS NULL")
)
# Set is_platform_page=True for platform marketing page slugs
# Only for pages that have vendor_id=NULL (platform-level pages)
slugs_list = ", ".join([f"'{slug}'" for slug in PLATFORM_PAGE_SLUGS])
conn.execute(
sa.text(f"""
UPDATE content_pages
SET is_platform_page = true
WHERE vendor_id IS NULL AND slug IN ({slugs_list})
""")
)
# Make platform_id NOT NULL after backfill
op.alter_column("content_pages", "platform_id", nullable=False)
# Add foreign key constraint
op.create_foreign_key(
"fk_content_pages_platform_id",
"content_pages",
"platforms",
["platform_id"],
["id"],
ondelete="CASCADE",
)
# =========================================================================
# 7. Update content_pages constraints
# =========================================================================
# Drop old unique constraint
op.drop_constraint("uq_vendor_slug", "content_pages", type_="unique")
# Create new unique constraint including platform_id
op.create_unique_constraint(
"uq_platform_vendor_slug",
"content_pages",
["platform_id", "vendor_id", "slug"],
)
# Update indexes
op.drop_index("idx_vendor_published", table_name="content_pages")
op.drop_index("idx_slug_published", table_name="content_pages")
op.create_index("idx_platform_vendor_published", "content_pages", ["platform_id", "vendor_id", "is_published"])
op.create_index("idx_platform_slug_published", "content_pages", ["platform_id", "slug", "is_published"])
# =========================================================================
# 8. Update subscription_tiers with CMS limits
# =========================================================================
for tier_code, limits in CMS_TIER_LIMITS.items():
cms_pages = limits["cms_pages_limit"] if limits["cms_pages_limit"] is not None else "NULL"
cms_custom = limits["cms_custom_pages_limit"] if limits["cms_custom_pages_limit"] is not None else "NULL"
conn.execute(
sa.text(f"""
UPDATE subscription_tiers
SET cms_pages_limit = {cms_pages},
cms_custom_pages_limit = {cms_custom}
WHERE code = '{tier_code}'
""")
)
# Add CMS features to each tier
for tier_code, cms_features in CMS_TIER_FEATURES.items():
# Get current features
result = conn.execute(
sa.text(f"SELECT features FROM subscription_tiers WHERE code = '{tier_code}'")
)
row = result.fetchone()
if row and row[0]:
current_features = json.loads(row[0]) if isinstance(row[0], str) else row[0]
else:
current_features = []
# Add CMS features that aren't already present
for feature in cms_features:
if feature not in current_features:
current_features.append(feature)
# Update features
features_json = json.dumps(current_features)
conn.execute(
sa.text(f"UPDATE subscription_tiers SET features = '{features_json}' WHERE code = '{tier_code}'")
)
# =========================================================================
# 9. Create vendor_platforms entries for existing vendors
# =========================================================================
# Get all vendors with their subscription tier_id
conn.execute(
sa.text(f"""
INSERT INTO vendor_platforms (vendor_id, platform_id, tier_id, is_active, is_primary,
joined_at, created_at, updated_at)
SELECT v.id, {oms_platform_id}, vs.tier_id, v.is_active, true,
v.created_at, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP
FROM vendors v
LEFT JOIN vendor_subscriptions vs ON vs.vendor_id = v.id
""")
)
# =========================================================================
# 10. Add CMS feature records to features table
# =========================================================================
# Get minimum tier IDs for CMS features
tier_ids = {}
result = conn.execute(sa.text("SELECT id, code FROM subscription_tiers"))
for row in result:
tier_ids[row[1]] = row[0]
cms_features = [
("cms", "cms_basic", "Basic CMS", "Override default pages with custom content", "settings", "document-text", None, tier_ids.get("essential"), 1),
("cms", "cms_custom_pages", "Custom Pages", "Create custom pages beyond defaults", "settings", "document-add", None, tier_ids.get("professional"), 2),
("cms", "cms_unlimited_pages", "Unlimited Pages", "No page limit", "settings", "documents", None, tier_ids.get("enterprise"), 3),
("cms", "cms_templates", "Page Templates", "Access to page templates", "settings", "template", None, tier_ids.get("business"), 4),
("cms", "cms_seo", "Advanced SEO", "SEO metadata and optimization", "settings", "search", None, tier_ids.get("professional"), 5),
("cms", "cms_scheduling", "Page Scheduling", "Schedule page publish/unpublish", "settings", "clock", None, tier_ids.get("enterprise"), 6),
]
for category, code, name, description, ui_location, ui_icon, ui_route, minimum_tier_id, display_order in cms_features:
min_tier_val = minimum_tier_id if minimum_tier_id else "NULL"
conn.execute(
sa.text(f"""
INSERT INTO features (code, name, description, category, ui_location, ui_icon, ui_route,
minimum_tier_id, is_active, is_visible, display_order, created_at, updated_at)
VALUES ('{code}', '{name}', '{description}', '{category}', '{ui_location}', '{ui_icon}', NULL,
{min_tier_val}, true, true, {display_order}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
ON CONFLICT (code) DO NOTHING
""")
)
def downgrade() -> None:
conn = op.get_bind()
# Remove CMS features from features table
conn.execute(
sa.text("""
DELETE FROM features
WHERE code IN ('cms_basic', 'cms_custom_pages', 'cms_unlimited_pages',
'cms_templates', 'cms_seo', 'cms_scheduling')
""")
)
# Remove CMS features from subscription_tiers
result = conn.execute(sa.text("SELECT id, code, features FROM subscription_tiers"))
for row in result:
tier_id, tier_code, features_data = row
if features_data:
current_features = json.loads(features_data) if isinstance(features_data, str) else features_data
# Remove CMS features
updated_features = [f for f in current_features if not f.startswith("cms_")]
features_json = json.dumps(updated_features)
conn.execute(
sa.text(f"UPDATE subscription_tiers SET features = '{features_json}' WHERE id = {tier_id}")
)
# Drop vendor_platforms table
op.drop_table("vendor_platforms")
# Restore old content_pages constraints
op.drop_constraint("uq_platform_vendor_slug", "content_pages", type_="unique")
op.drop_index("idx_platform_vendor_published", table_name="content_pages")
op.drop_index("idx_platform_slug_published", table_name="content_pages")
op.drop_index("idx_platform_page_type", table_name="content_pages")
op.drop_constraint("fk_content_pages_platform_id", "content_pages", type_="foreignkey")
op.drop_index("ix_content_pages_platform_id", table_name="content_pages")
op.drop_column("content_pages", "is_platform_page")
op.drop_column("content_pages", "platform_id")
op.create_unique_constraint("uq_vendor_slug", "content_pages", ["vendor_id", "slug"])
op.create_index("idx_vendor_published", "content_pages", ["vendor_id", "is_published"])
op.create_index("idx_slug_published", "content_pages", ["slug", "is_published"])
# Remove subscription_tiers platform columns
op.drop_index("idx_tier_platform_active", table_name="subscription_tiers")
op.drop_constraint("fk_subscription_tiers_platform_id", "subscription_tiers", type_="foreignkey")
op.drop_index("ix_subscription_tiers_platform_id", table_name="subscription_tiers")
op.drop_column("subscription_tiers", "cms_custom_pages_limit")
op.drop_column("subscription_tiers", "cms_pages_limit")
op.drop_column("subscription_tiers", "platform_id")
# Drop platforms table
op.drop_index("idx_platform_public", table_name="platforms")
op.drop_index("idx_platform_active", table_name="platforms")
op.drop_index("ix_platforms_path_prefix", table_name="platforms")
op.drop_index("ix_platforms_domain", table_name="platforms")
op.drop_index("ix_platforms_code", table_name="platforms")
op.drop_table("platforms")

View File

@@ -0,0 +1,424 @@
"""add loyalty platform
Revision ID: z5f6g7h8i9j0
Revises: z4e5f6a7b8c9
Create Date: 2026-01-19 12:00:00.000000
This migration adds the Loyalty+ platform:
1. Inserts loyalty platform record
2. Creates platform marketing pages (home, pricing, features, how-it-works)
3. Creates vendor default pages (about, rewards-catalog, terms, privacy)
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "z5f6g7h8i9j0"
down_revision: Union[str, None] = "z4e5f6a7b8c9"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
# =========================================================================
# 1. Insert Loyalty platform
# =========================================================================
conn.execute(
sa.text("""
INSERT INTO platforms (code, name, description, domain, path_prefix, default_language,
supported_languages, is_active, is_public, theme_config, settings,
created_at, updated_at)
VALUES ('loyalty', 'Loyalty+', 'Customer loyalty program platform for Luxembourg businesses',
'loyalty.lu', 'loyalty', 'fr', '["fr", "de", "en"]', true, true,
'{"primary_color": "#8B5CF6", "secondary_color": "#A78BFA"}',
'{"features": ["points", "rewards", "tiers", "analytics"]}',
CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
""")
)
# Get the Loyalty platform ID
result = conn.execute(sa.text("SELECT id FROM platforms WHERE code = 'loyalty'"))
loyalty_platform_id = result.fetchone()[0]
# =========================================================================
# 2. Create platform marketing pages (is_platform_page=True)
# =========================================================================
platform_pages = [
{
"slug": "home",
"title": "Loyalty+ - Customer Loyalty Platform",
"content": """<div class="hero-section">
<h1>Build Customer Loyalty That Lasts</h1>
<p class="lead">Reward your customers, increase retention, and grow your business with Loyalty+</p>
</div>
<div class="features-grid">
<div class="feature">
<h3>Points & Rewards</h3>
<p>Create custom point systems that incentivize repeat purchases and customer engagement.</p>
</div>
<div class="feature">
<h3>Member Tiers</h3>
<p>Reward your best customers with exclusive benefits and VIP treatment.</p>
</div>
<div class="feature">
<h3>Real-time Analytics</h3>
<p>Track program performance and customer behavior with detailed insights.</p>
</div>
</div>""",
"meta_description": "Loyalty+ is Luxembourg's leading customer loyalty platform. Build lasting relationships with your customers through points, rewards, and personalized experiences.",
"show_in_header": False,
"show_in_footer": False,
"display_order": 0,
},
{
"slug": "pricing",
"title": "Pricing - Loyalty+",
"content": """<div class="pricing-header">
<h1>Simple, Transparent Pricing</h1>
<p>Choose the plan that fits your business</p>
</div>
<div class="pricing-grid">
<div class="pricing-card">
<h3>Starter</h3>
<div class="price">€49<span>/month</span></div>
<ul>
<li>Up to 500 members</li>
<li>Basic point system</li>
<li>Email support</li>
<li>Standard rewards</li>
</ul>
</div>
<div class="pricing-card featured">
<h3>Growth</h3>
<div class="price">€149<span>/month</span></div>
<ul>
<li>Up to 5,000 members</li>
<li>Advanced point rules</li>
<li>Priority support</li>
<li>Custom rewards</li>
<li>Member tiers</li>
<li>Analytics dashboard</li>
</ul>
</div>
<div class="pricing-card">
<h3>Enterprise</h3>
<div class="price">Custom</div>
<ul>
<li>Unlimited members</li>
<li>Full API access</li>
<li>Dedicated support</li>
<li>Custom integrations</li>
<li>White-label options</li>
</ul>
</div>
</div>""",
"meta_description": "Loyalty+ pricing plans starting at €49/month. Choose Starter, Growth, or Enterprise for your customer loyalty program.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 1,
},
{
"slug": "features",
"title": "Features - Loyalty+",
"content": """<div class="features-header">
<h1>Powerful Features for Modern Loyalty</h1>
<p>Everything you need to build and manage a successful loyalty program</p>
</div>
<div class="feature-section">
<h2>Points & Earning Rules</h2>
<p>Create flexible point systems with custom earning rules based on purchases, actions, or special events.</p>
<ul>
<li>Points per euro spent</li>
<li>Bonus point campaigns</li>
<li>Birthday & anniversary rewards</li>
<li>Referral bonuses</li>
</ul>
</div>
<div class="feature-section">
<h2>Rewards Catalog</h2>
<p>Offer enticing rewards that keep customers coming back.</p>
<ul>
<li>Discount vouchers</li>
<li>Free products</li>
<li>Exclusive experiences</li>
<li>Partner rewards</li>
</ul>
</div>
<div class="feature-section">
<h2>Member Tiers</h2>
<p>Recognize and reward your most loyal customers with tiered benefits.</p>
<ul>
<li>Bronze, Silver, Gold, Platinum levels</li>
<li>Automatic tier progression</li>
<li>Exclusive tier benefits</li>
<li>VIP experiences</li>
</ul>
</div>
<div class="feature-section">
<h2>Analytics & Insights</h2>
<p>Make data-driven decisions with comprehensive analytics.</p>
<ul>
<li>Member activity tracking</li>
<li>Redemption analytics</li>
<li>ROI calculations</li>
<li>Custom reports</li>
</ul>
</div>""",
"meta_description": "Explore Loyalty+ features: points systems, rewards catalog, member tiers, and analytics. Build the perfect loyalty program for your business.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 2,
},
{
"slug": "how-it-works",
"title": "How It Works - Loyalty+",
"content": """<div class="how-header">
<h1>Getting Started is Easy</h1>
<p>Launch your loyalty program in just a few steps</p>
</div>
<div class="steps">
<div class="step">
<div class="step-number">1</div>
<h3>Sign Up</h3>
<p>Create your account and choose your plan. No credit card required for the free trial.</p>
</div>
<div class="step">
<div class="step-number">2</div>
<h3>Configure Your Program</h3>
<p>Set up your point rules, rewards, and member tiers using our intuitive dashboard.</p>
</div>
<div class="step">
<div class="step-number">3</div>
<h3>Integrate</h3>
<p>Connect Loyalty+ to your POS, e-commerce, or app using our APIs and plugins.</p>
</div>
<div class="step">
<div class="step-number">4</div>
<h3>Launch & Grow</h3>
<p>Invite your customers and watch your loyalty program drive results.</p>
</div>
</div>
<div class="cta-section">
<h2>Ready to Build Customer Loyalty?</h2>
<p>Start your free 14-day trial today.</p>
<a href="/loyalty/signup" class="btn-primary">Get Started Free</a>
</div>""",
"meta_description": "Learn how to launch your Loyalty+ program in 4 easy steps. Sign up, configure, integrate, and start building customer loyalty today.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 3,
},
]
for page in platform_pages:
conn.execute(
sa.text("""
INSERT INTO content_pages (platform_id, vendor_id, slug, title, content, content_format,
meta_description, is_published, is_platform_page,
show_in_header, show_in_footer, display_order,
created_at, updated_at)
VALUES (:platform_id, NULL, :slug, :title, :content, 'html',
:meta_description, true, true,
:show_in_header, :show_in_footer, :display_order,
CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
"""),
{
"platform_id": loyalty_platform_id,
"slug": page["slug"],
"title": page["title"],
"content": page["content"],
"meta_description": page["meta_description"],
"show_in_header": page["show_in_header"],
"show_in_footer": page["show_in_footer"],
"display_order": page["display_order"],
}
)
# =========================================================================
# 3. Create vendor default pages (is_platform_page=False)
# =========================================================================
vendor_defaults = [
{
"slug": "about",
"title": "About Us",
"content": """<div class="about-page">
<h1>About Our Loyalty Program</h1>
<p>Welcome to our customer loyalty program! We value your continued support and want to reward you for being part of our community.</p>
<h2>Why Join?</h2>
<ul>
<li><strong>Earn Points:</strong> Get points on every purchase</li>
<li><strong>Exclusive Rewards:</strong> Redeem points for discounts and special offers</li>
<li><strong>Member Benefits:</strong> Access exclusive deals and early sales</li>
<li><strong>Birthday Surprises:</strong> Special rewards on your birthday</li>
</ul>
<h2>How It Works</h2>
<p>Simply sign up, start earning points with every purchase, and redeem them for rewards you'll love.</p>
</div>""",
"meta_description": "Learn about our customer loyalty program. Earn points, unlock rewards, and enjoy exclusive member benefits.",
"show_in_header": False,
"show_in_footer": True,
"display_order": 10,
},
{
"slug": "rewards-catalog",
"title": "Rewards Catalog",
"content": """<div class="rewards-page">
<h1>Rewards Catalog</h1>
<p>Browse our selection of rewards and redeem your hard-earned points!</p>
<div class="rewards-grid">
<div class="reward-placeholder">
<p>Your rewards catalog will appear here once configured.</p>
</div>
</div>
<h2>How to Redeem</h2>
<ol>
<li>Check your point balance in your account</li>
<li>Browse available rewards</li>
<li>Click "Redeem" on your chosen reward</li>
<li>Use your reward code at checkout</li>
</ol>
</div>""",
"meta_description": "Browse and redeem your loyalty points for exclusive rewards, discounts, and special offers.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 11,
},
{
"slug": "terms",
"title": "Loyalty Program Terms & Conditions",
"content": """<div class="terms-page">
<h1>Loyalty Program Terms & Conditions</h1>
<p class="last-updated">Last updated: January 2026</p>
<h2>1. Program Membership</h2>
<p>Membership in our loyalty program is free and open to all customers who meet the eligibility requirements.</p>
<h2>2. Earning Points</h2>
<p>Points are earned on qualifying purchases. The earning rate and qualifying purchases are determined by the program operator and may change with notice.</p>
<h2>3. Redeeming Points</h2>
<p>Points can be redeemed for rewards as shown in the rewards catalog. Minimum point thresholds may apply.</p>
<h2>4. Point Expiration</h2>
<p>Points may expire after a period of account inactivity. Members will be notified before points expire.</p>
<h2>5. Program Changes</h2>
<p>We reserve the right to modify, suspend, or terminate the program with reasonable notice to members.</p>
<h2>6. Privacy</h2>
<p>Your personal information is handled in accordance with our Privacy Policy.</p>
</div>""",
"meta_description": "Read the terms and conditions for our customer loyalty program including earning rules, redemption, and point expiration policies.",
"show_in_header": False,
"show_in_footer": True,
"show_in_legal": True,
"display_order": 20,
},
{
"slug": "privacy",
"title": "Privacy Policy",
"content": """<div class="privacy-page">
<h1>Privacy Policy</h1>
<p class="last-updated">Last updated: January 2026</p>
<h2>Information We Collect</h2>
<p>We collect information you provide when joining our loyalty program, including:</p>
<ul>
<li>Name and contact information</li>
<li>Purchase history and preferences</li>
<li>Point balance and redemption history</li>
</ul>
<h2>How We Use Your Information</h2>
<p>Your information helps us:</p>
<ul>
<li>Manage your loyalty account</li>
<li>Process point earnings and redemptions</li>
<li>Send program updates and personalized offers</li>
<li>Improve our services</li>
</ul>
<h2>Data Protection</h2>
<p>We implement appropriate security measures to protect your personal information in accordance with GDPR and Luxembourg data protection laws.</p>
<h2>Your Rights</h2>
<p>You have the right to access, correct, or delete your personal data. Contact us to exercise these rights.</p>
<h2>Contact</h2>
<p>For privacy inquiries, please contact our data protection officer.</p>
</div>""",
"meta_description": "Our privacy policy explains how we collect, use, and protect your personal information in our loyalty program.",
"show_in_header": False,
"show_in_footer": True,
"show_in_legal": True,
"display_order": 21,
},
]
for page in vendor_defaults:
show_in_legal = page.get("show_in_legal", False)
conn.execute(
sa.text("""
INSERT INTO content_pages (platform_id, vendor_id, slug, title, content, content_format,
meta_description, is_published, is_platform_page,
show_in_header, show_in_footer, show_in_legal, display_order,
created_at, updated_at)
VALUES (:platform_id, NULL, :slug, :title, :content, 'html',
:meta_description, true, false,
:show_in_header, :show_in_footer, :show_in_legal, :display_order,
CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
"""),
{
"platform_id": loyalty_platform_id,
"slug": page["slug"],
"title": page["title"],
"content": page["content"],
"meta_description": page["meta_description"],
"show_in_header": page["show_in_header"],
"show_in_footer": page["show_in_footer"],
"show_in_legal": show_in_legal,
"display_order": page["display_order"],
}
)
def downgrade() -> None:
conn = op.get_bind()
# Get the Loyalty platform ID
result = conn.execute(sa.text("SELECT id FROM platforms WHERE code = 'loyalty'"))
row = result.fetchone()
if row:
loyalty_platform_id = row[0]
# Delete all content pages for loyalty platform
conn.execute(
sa.text("DELETE FROM content_pages WHERE platform_id = :platform_id"),
{"platform_id": loyalty_platform_id}
)
# Delete vendor_platforms entries for loyalty
conn.execute(
sa.text("DELETE FROM vendor_platforms WHERE platform_id = :platform_id"),
{"platform_id": loyalty_platform_id}
)
# Delete loyalty platform
conn.execute(sa.text("DELETE FROM platforms WHERE code = 'loyalty'"))

View File

@@ -0,0 +1,431 @@
"""add main platform for marketing site
Revision ID: z6g7h8i9j0k1
Revises: z5f6g7h8i9j0
Create Date: 2026-01-19 14:00:00.000000
This migration adds the 'main' platform for the main marketing site:
1. Inserts main platform record (wizamart.lu)
2. Creates platform marketing pages (home, about, faq, pricing, contact)
The 'main' platform serves as the marketing homepage at:
- Development: localhost:9999/ (no /platforms/ prefix)
- Production: wizamart.lu/
All other platforms are accessed via:
- Development: localhost:9999/platforms/{code}/
- Production: {code}.lu or custom domain
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "z6g7h8i9j0k1"
down_revision: Union[str, None] = "z5f6g7h8i9j0"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
# =========================================================================
# 1. Insert Main Marketing platform
# =========================================================================
conn.execute(
sa.text("""
INSERT INTO platforms (code, name, description, domain, path_prefix, default_language,
supported_languages, is_active, is_public, theme_config, settings,
created_at, updated_at)
VALUES ('main', 'Wizamart', 'Main marketing site showcasing all Wizamart platforms',
'wizamart.lu', NULL, 'fr', '["fr", "de", "en"]', true, true,
'{"primary_color": "#2563EB", "secondary_color": "#3B82F6"}',
'{"is_marketing_site": true}',
CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
""")
)
# Get the Main platform ID
result = conn.execute(sa.text("SELECT id FROM platforms WHERE code = 'main'"))
main_platform_id = result.fetchone()[0]
# =========================================================================
# 2. Create platform marketing pages (is_platform_page=True)
# =========================================================================
platform_pages = [
{
"slug": "home",
"title": "Wizamart - E-commerce Solutions for Luxembourg",
"content": """<div class="hero-section">
<h1>Build Your Business with Wizamart</h1>
<p class="lead">All-in-one e-commerce, loyalty, and business solutions for Luxembourg merchants</p>
</div>
<div class="platforms-showcase">
<h2>Our Platforms</h2>
<div class="platform-card">
<h3>Wizamart OMS</h3>
<p>Order Management System for multi-channel selling. Manage orders, inventory, and fulfillment from one dashboard.</p>
<a href="/platforms/oms/" class="btn">Learn More</a>
</div>
<div class="platform-card">
<h3>Loyalty+</h3>
<p>Customer loyalty platform to reward your customers and increase retention. Points, rewards, and member tiers.</p>
<a href="/platforms/loyalty/" class="btn">Learn More</a>
</div>
<div class="platform-card">
<h3>Site Builder</h3>
<p>Create beautiful websites for your local business. No coding required.</p>
<span class="badge">Coming Soon</span>
</div>
</div>
<div class="why-wizamart">
<h2>Why Choose Wizamart?</h2>
<ul>
<li><strong>Made for Luxembourg:</strong> Built specifically for Luxembourg businesses with local payment methods, languages, and compliance.</li>
<li><strong>All-in-One:</strong> Use our platforms together or separately - they integrate seamlessly.</li>
<li><strong>Local Support:</strong> Real support from real people in Luxembourg.</li>
</ul>
</div>""",
"meta_description": "Wizamart offers e-commerce, loyalty, and business solutions for Luxembourg merchants. OMS, Loyalty+, and Site Builder platforms.",
"show_in_header": False,
"show_in_footer": False,
"display_order": 0,
},
{
"slug": "about",
"title": "About Wizamart",
"content": """<div class="about-page">
<h1>About Wizamart</h1>
<div class="mission">
<h2>Our Mission</h2>
<p>We're building the tools Luxembourg businesses need to thrive in the digital economy. From order management to customer loyalty, we provide the infrastructure that powers local commerce.</p>
</div>
<div class="story">
<h2>Our Story</h2>
<p>Wizamart was founded with a simple idea: Luxembourg businesses deserve world-class e-commerce tools that understand their unique needs. Local languages, local payment methods, local compliance - built in from the start.</p>
</div>
<div class="team">
<h2>Our Team</h2>
<p>We're a team of developers, designers, and business experts based in Luxembourg. We understand the local market because we're part of it.</p>
</div>
<div class="values">
<h2>Our Values</h2>
<ul>
<li><strong>Simplicity:</strong> Powerful tools that are easy to use</li>
<li><strong>Reliability:</strong> Your business depends on us - we take that seriously</li>
<li><strong>Local First:</strong> Built for Luxembourg, by Luxembourg</li>
<li><strong>Innovation:</strong> Always improving, always evolving</li>
</ul>
</div>
</div>""",
"meta_description": "Learn about Wizamart, the Luxembourg-based company building e-commerce and business solutions for local merchants.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 1,
},
{
"slug": "pricing",
"title": "Pricing - Wizamart",
"content": """<div class="pricing-page">
<h1>Choose Your Platform</h1>
<p class="lead">Each platform has its own pricing. Choose the tools your business needs.</p>
<div class="platform-pricing">
<div class="platform-pricing-card">
<h3>Wizamart OMS</h3>
<p>Order Management System</p>
<div class="price-range">From €49/month</div>
<ul>
<li>Multi-channel order management</li>
<li>Inventory tracking</li>
<li>Shipping integrations</li>
<li>Analytics dashboard</li>
</ul>
<a href="/platforms/oms/pricing" class="btn">View OMS Pricing</a>
</div>
<div class="platform-pricing-card">
<h3>Loyalty+</h3>
<p>Customer Loyalty Platform</p>
<div class="price-range">From €49/month</div>
<ul>
<li>Points & rewards system</li>
<li>Member tiers</li>
<li>Analytics & insights</li>
<li>POS integrations</li>
</ul>
<a href="/platforms/loyalty/pricing" class="btn">View Loyalty+ Pricing</a>
</div>
<div class="platform-pricing-card">
<h3>Bundle & Save</h3>
<p>Use multiple platforms together</p>
<div class="price-range">Save up to 20%</div>
<ul>
<li>Seamless integration</li>
<li>Unified dashboard</li>
<li>Single invoice</li>
<li>Priority support</li>
</ul>
<a href="/contact" class="btn">Contact Sales</a>
</div>
</div>
</div>""",
"meta_description": "Wizamart pricing for OMS, Loyalty+, and bundled solutions. Plans starting at €49/month.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 2,
},
{
"slug": "faq",
"title": "FAQ - Frequently Asked Questions",
"content": """<div class="faq-page">
<h1>Frequently Asked Questions</h1>
<div class="faq-section">
<h2>General</h2>
<div class="faq-item">
<h3>What is Wizamart?</h3>
<p>Wizamart is a suite of business tools for Luxembourg merchants, including order management (OMS), customer loyalty (Loyalty+), and website building (Site Builder).</p>
</div>
<div class="faq-item">
<h3>Do I need to use all platforms?</h3>
<p>No! Each platform works independently. Use one, two, or all three - whatever fits your business needs.</p>
</div>
<div class="faq-item">
<h3>What languages are supported?</h3>
<p>All platforms support French, German, and English - the three main languages of Luxembourg.</p>
</div>
</div>
<div class="faq-section">
<h2>Billing & Pricing</h2>
<div class="faq-item">
<h3>Is there a free trial?</h3>
<p>Yes! All platforms offer a 14-day free trial with no credit card required.</p>
</div>
<div class="faq-item">
<h3>What payment methods do you accept?</h3>
<p>We accept credit cards, SEPA direct debit, and bank transfers.</p>
</div>
<div class="faq-item">
<h3>Can I cancel anytime?</h3>
<p>Yes, you can cancel your subscription at any time. No long-term contracts required.</p>
</div>
</div>
<div class="faq-section">
<h2>Support</h2>
<div class="faq-item">
<h3>How do I get help?</h3>
<p>All plans include email support. Professional and Business plans include priority support with faster response times.</p>
</div>
<div class="faq-item">
<h3>Do you offer onboarding?</h3>
<p>Yes! We offer guided onboarding for all new customers to help you get started quickly.</p>
</div>
</div>
</div>""",
"meta_description": "Frequently asked questions about Wizamart platforms, pricing, billing, and support.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 3,
},
{
"slug": "contact",
"title": "Contact Us - Wizamart",
"content": """<div class="contact-page">
<h1>Contact Us</h1>
<p class="lead">We'd love to hear from you. Get in touch with our team.</p>
<div class="contact-options">
<div class="contact-card">
<h3>Sales</h3>
<p>Interested in our platforms? Let's talk about how we can help your business.</p>
<p><a href="mailto:sales@wizamart.lu">sales@wizamart.lu</a></p>
</div>
<div class="contact-card">
<h3>Support</h3>
<p>Already a customer? Our support team is here to help.</p>
<p><a href="mailto:support@wizamart.lu">support@wizamart.lu</a></p>
</div>
<div class="contact-card">
<h3>General Inquiries</h3>
<p>For everything else, reach out to our general inbox.</p>
<p><a href="mailto:hello@wizamart.lu">hello@wizamart.lu</a></p>
</div>
</div>
<div class="address">
<h3>Office</h3>
<p>Wizamart S.à r.l.<br>
Luxembourg City<br>
Luxembourg</p>
</div>
</div>""",
"meta_description": "Contact Wizamart for sales, support, or general inquiries. We're here to help your Luxembourg business succeed.",
"show_in_header": True,
"show_in_footer": True,
"display_order": 4,
},
{
"slug": "terms",
"title": "Terms of Service - Wizamart",
"content": """<div class="terms-page">
<h1>Terms of Service</h1>
<p class="last-updated">Last updated: January 2026</p>
<p>These Terms of Service govern your use of Wizamart platforms and services.</p>
<h2>1. Acceptance of Terms</h2>
<p>By accessing or using our services, you agree to be bound by these Terms.</p>
<h2>2. Services</h2>
<p>Wizamart provides e-commerce and business management tools including order management, loyalty programs, and website building services.</p>
<h2>3. Account Registration</h2>
<p>You must provide accurate information when creating an account and keep your login credentials secure.</p>
<h2>4. Fees and Payment</h2>
<p>Subscription fees are billed in advance on a monthly or annual basis. Prices are listed in EUR and include applicable VAT for Luxembourg customers.</p>
<h2>5. Data Protection</h2>
<p>We process personal data in accordance with our Privacy Policy and applicable data protection laws including GDPR.</p>
<h2>6. Limitation of Liability</h2>
<p>Our liability is limited to the amount paid for services in the 12 months preceding any claim.</p>
<h2>7. Governing Law</h2>
<p>These Terms are governed by Luxembourg law. Disputes shall be resolved in Luxembourg courts.</p>
<h2>8. Contact</h2>
<p>For questions about these Terms, contact us at legal@wizamart.lu</p>
</div>""",
"meta_description": "Wizamart Terms of Service. Read the terms and conditions for using our e-commerce and business platforms.",
"show_in_header": False,
"show_in_footer": True,
"show_in_legal": True,
"display_order": 10,
},
{
"slug": "privacy",
"title": "Privacy Policy - Wizamart",
"content": """<div class="privacy-page">
<h1>Privacy Policy</h1>
<p class="last-updated">Last updated: January 2026</p>
<h2>Introduction</h2>
<p>Wizamart S.à r.l. ("we", "us") is committed to protecting your privacy. This policy explains how we collect, use, and protect your personal data.</p>
<h2>Data Controller</h2>
<p>Wizamart S.à r.l.<br>Luxembourg City, Luxembourg<br>Email: privacy@wizamart.lu</p>
<h2>Data We Collect</h2>
<ul>
<li>Account information (name, email, company details)</li>
<li>Usage data (how you use our platforms)</li>
<li>Payment information (processed by our payment providers)</li>
<li>Support communications</li>
</ul>
<h2>How We Use Your Data</h2>
<ul>
<li>To provide and improve our services</li>
<li>To process payments and billing</li>
<li>To communicate with you about your account</li>
<li>To send marketing communications (with your consent)</li>
</ul>
<h2>Your Rights</h2>
<p>Under GDPR, you have the right to:</p>
<ul>
<li>Access your personal data</li>
<li>Rectify inaccurate data</li>
<li>Request deletion of your data</li>
<li>Data portability</li>
<li>Object to processing</li>
</ul>
<h2>Contact</h2>
<p>To exercise your rights or ask questions, contact our Data Protection Officer at privacy@wizamart.lu</p>
</div>""",
"meta_description": "Wizamart Privacy Policy. Learn how we collect, use, and protect your personal data in compliance with GDPR.",
"show_in_header": False,
"show_in_footer": True,
"show_in_legal": True,
"display_order": 11,
},
]
for page in platform_pages:
show_in_legal = page.get("show_in_legal", False)
conn.execute(
sa.text("""
INSERT INTO content_pages (platform_id, vendor_id, slug, title, content, content_format,
meta_description, is_published, is_platform_page,
show_in_header, show_in_footer, show_in_legal, display_order,
created_at, updated_at)
VALUES (:platform_id, NULL, :slug, :title, :content, 'html',
:meta_description, true, true,
:show_in_header, :show_in_footer, :show_in_legal, :display_order,
CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
"""),
{
"platform_id": main_platform_id,
"slug": page["slug"],
"title": page["title"],
"content": page["content"],
"meta_description": page["meta_description"],
"show_in_header": page["show_in_header"],
"show_in_footer": page["show_in_footer"],
"show_in_legal": show_in_legal,
"display_order": page["display_order"],
}
)
def downgrade() -> None:
conn = op.get_bind()
# Get the Main platform ID
result = conn.execute(sa.text("SELECT id FROM platforms WHERE code = 'main'"))
row = result.fetchone()
if row:
main_platform_id = row[0]
# Delete all content pages for main platform
conn.execute(
sa.text("DELETE FROM content_pages WHERE platform_id = :platform_id"),
{"platform_id": main_platform_id}
)
# Delete vendor_platforms entries for main (if any)
conn.execute(
sa.text("DELETE FROM vendor_platforms WHERE platform_id = :platform_id"),
{"platform_id": main_platform_id}
)
# Delete main platform
conn.execute(sa.text("DELETE FROM platforms WHERE code = 'main'"))

View File

@@ -0,0 +1,115 @@
"""Fix content_page nullable boolean columns
Revision ID: z7h8i9j0k1l2
Revises: z6g7h8i9j0k1
Create Date: 2026-01-20
This migration:
1. Sets NULL values to defaults for boolean and integer columns
2. Alters columns to be NOT NULL
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "z7h8i9j0k1l2"
down_revision = "z6g7h8i9j0k1"
branch_labels = None
depends_on = None
def upgrade() -> None:
# First, update any NULL values to defaults
op.execute("""
UPDATE content_pages
SET display_order = 0
WHERE display_order IS NULL
""")
op.execute("""
UPDATE content_pages
SET show_in_footer = true
WHERE show_in_footer IS NULL
""")
op.execute("""
UPDATE content_pages
SET show_in_header = false
WHERE show_in_header IS NULL
""")
op.execute("""
UPDATE content_pages
SET show_in_legal = false
WHERE show_in_legal IS NULL
""")
# Now alter columns to be NOT NULL
op.alter_column(
"content_pages",
"display_order",
existing_type=sa.Integer(),
nullable=False,
server_default="0",
)
op.alter_column(
"content_pages",
"show_in_footer",
existing_type=sa.Boolean(),
nullable=False,
server_default="true",
)
op.alter_column(
"content_pages",
"show_in_header",
existing_type=sa.Boolean(),
nullable=False,
server_default="false",
)
op.alter_column(
"content_pages",
"show_in_legal",
existing_type=sa.Boolean(),
nullable=False,
server_default="false",
)
def downgrade() -> None:
# Revert columns to nullable (no server default)
op.alter_column(
"content_pages",
"display_order",
existing_type=sa.Integer(),
nullable=True,
server_default=None,
)
op.alter_column(
"content_pages",
"show_in_footer",
existing_type=sa.Boolean(),
nullable=True,
server_default=None,
)
op.alter_column(
"content_pages",
"show_in_header",
existing_type=sa.Boolean(),
nullable=True,
server_default=None,
)
op.alter_column(
"content_pages",
"show_in_legal",
existing_type=sa.Boolean(),
nullable=True,
server_default=None,
)

View File

@@ -0,0 +1,36 @@
"""Add sections column to content_pages
Revision ID: z8i9j0k1l2m3
Revises: z7h8i9j0k1l2
Create Date: 2026-01-23
Adds sections JSON column for structured homepage editing with multi-language support.
The sections column stores hero, features, pricing, and cta configurations
with TranslatableText pattern for i18n.
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "z8i9j0k1l2m3"
down_revision = "z7h8i9j0k1l2"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"content_pages",
sa.Column(
"sections",
sa.JSON(),
nullable=True,
comment="Structured homepage sections (hero, features, pricing, cta) with i18n",
),
)
def downgrade() -> None:
op.drop_column("content_pages", "sections")

View File

@@ -0,0 +1,148 @@
"""Add admin platform roles (super admin + platform admin)
Revision ID: z9j0k1l2m3n4
Revises: z8i9j0k1l2m3
Create Date: 2026-01-24
Adds support for super admin and platform admin roles:
- is_super_admin column on users table
- admin_platforms junction table for platform admin assignments
Super admins have access to all platforms.
Platform admins are assigned to specific platforms via admin_platforms.
Existing admins are migrated to super admins for backward compatibility.
"""
from datetime import UTC, datetime
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "z9j0k1l2m3n4"
down_revision = "z8i9j0k1l2m3"
branch_labels = None
depends_on = None
def upgrade() -> None:
# 1. Add is_super_admin column to users table
op.add_column(
"users",
sa.Column(
"is_super_admin",
sa.Boolean(),
nullable=False,
server_default="false",
comment="Whether this admin has access to all platforms (super admin)",
),
)
# 2. Create admin_platforms junction table
op.create_table(
"admin_platforms",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"user_id",
sa.Integer(),
nullable=False,
comment="Reference to the admin user",
),
sa.Column(
"platform_id",
sa.Integer(),
nullable=False,
comment="Reference to the platform",
),
sa.Column(
"is_active",
sa.Boolean(),
nullable=False,
server_default="true",
comment="Whether the admin assignment is active",
),
sa.Column(
"assigned_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
comment="When the admin was assigned to this platform",
),
sa.Column(
"assigned_by_user_id",
sa.Integer(),
nullable=True,
comment="Super admin who made this assignment",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
onupdate=sa.func.now(),
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["platform_id"],
["platforms.id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["assigned_by_user_id"],
["users.id"],
ondelete="SET NULL",
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("user_id", "platform_id", name="uq_admin_platform"),
)
# Create indexes for performance
op.create_index(
"idx_admin_platforms_user_id",
"admin_platforms",
["user_id"],
)
op.create_index(
"idx_admin_platforms_platform_id",
"admin_platforms",
["platform_id"],
)
op.create_index(
"idx_admin_platform_active",
"admin_platforms",
["user_id", "platform_id", "is_active"],
)
op.create_index(
"idx_admin_platform_user_active",
"admin_platforms",
["user_id", "is_active"],
)
# 3. Migrate existing admins to super admins for backward compatibility
# All current admins get super admin access to maintain their existing permissions
op.execute("UPDATE users SET is_super_admin = TRUE WHERE role = 'admin'")
def downgrade() -> None:
# Drop indexes
op.drop_index("idx_admin_platform_user_active", table_name="admin_platforms")
op.drop_index("idx_admin_platform_active", table_name="admin_platforms")
op.drop_index("idx_admin_platforms_platform_id", table_name="admin_platforms")
op.drop_index("idx_admin_platforms_user_id", table_name="admin_platforms")
# Drop admin_platforms table
op.drop_table("admin_platforms")
# Drop is_super_admin column
op.drop_column("users", "is_super_admin")

View File

@@ -0,0 +1,128 @@
"""Add admin menu configuration table
Revision ID: za0k1l2m3n4o5
Revises: z9j0k1l2m3n4
Create Date: 2026-01-25
Adds configurable admin sidebar menus:
- Platform-level config: Controls which menu items platform admins see
- User-level config: Controls which menu items super admins see
- Opt-out model: All items visible by default
- Mandatory items enforced at application level (companies, vendors, users, settings)
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "za0k1l2m3n4o5"
down_revision = "z9j0k1l2m3n4"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create admin_menu_configs table
op.create_table(
"admin_menu_configs",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"platform_id",
sa.Integer(),
nullable=True,
comment="Platform scope - applies to all platform admins of this platform",
),
sa.Column(
"user_id",
sa.Integer(),
nullable=True,
comment="User scope - applies to this specific super admin",
),
sa.Column(
"menu_item_id",
sa.String(50),
nullable=False,
comment="Menu item identifier from registry (e.g., 'products', 'inventory')",
),
sa.Column(
"is_visible",
sa.Boolean(),
nullable=False,
server_default="true",
comment="Whether this menu item is visible (False = hidden)",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
onupdate=sa.func.now(),
),
# Foreign keys
sa.ForeignKeyConstraint(
["platform_id"],
["platforms.id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
# Unique constraints
sa.UniqueConstraint("platform_id", "menu_item_id", name="uq_platform_menu_config"),
sa.UniqueConstraint("user_id", "menu_item_id", name="uq_user_menu_config"),
# Check constraint: exactly one scope must be set
sa.CheckConstraint(
"(platform_id IS NOT NULL AND user_id IS NULL) OR "
"(platform_id IS NULL AND user_id IS NOT NULL)",
name="ck_admin_menu_config_scope",
),
)
# Create indexes for performance
op.create_index(
"idx_admin_menu_configs_platform_id",
"admin_menu_configs",
["platform_id"],
)
op.create_index(
"idx_admin_menu_configs_user_id",
"admin_menu_configs",
["user_id"],
)
op.create_index(
"idx_admin_menu_configs_menu_item_id",
"admin_menu_configs",
["menu_item_id"],
)
op.create_index(
"idx_admin_menu_config_platform_visible",
"admin_menu_configs",
["platform_id", "is_visible"],
)
op.create_index(
"idx_admin_menu_config_user_visible",
"admin_menu_configs",
["user_id", "is_visible"],
)
def downgrade() -> None:
# Drop indexes
op.drop_index("idx_admin_menu_config_user_visible", table_name="admin_menu_configs")
op.drop_index("idx_admin_menu_config_platform_visible", table_name="admin_menu_configs")
op.drop_index("idx_admin_menu_configs_menu_item_id", table_name="admin_menu_configs")
op.drop_index("idx_admin_menu_configs_user_id", table_name="admin_menu_configs")
op.drop_index("idx_admin_menu_configs_platform_id", table_name="admin_menu_configs")
# Drop table
op.drop_table("admin_menu_configs")

View File

@@ -0,0 +1,117 @@
"""Add frontend_type to admin_menu_configs
Revision ID: zb1l2m3n4o5p6
Revises: za0k1l2m3n4o5
Create Date: 2026-01-25
Adds frontend_type column to support both admin and vendor menu configuration:
- 'admin': Admin panel menus (super admins, platform admins)
- 'vendor': Vendor dashboard menus (configured per platform)
Also updates unique constraints to include frontend_type and adds
a check constraint ensuring user_id scope is only used for admin frontend.
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "zb1l2m3n4o5p6"
down_revision = "za0k1l2m3n4o5"
branch_labels = None
depends_on = None
def upgrade() -> None:
# 1. Create the enum type for frontend_type
frontend_type_enum = sa.Enum('admin', 'vendor', name='frontendtype')
frontend_type_enum.create(op.get_bind(), checkfirst=True)
# 2. Add frontend_type column with default value
op.add_column(
"admin_menu_configs",
sa.Column(
"frontend_type",
sa.Enum('admin', 'vendor', name='frontendtype'),
nullable=False,
server_default="admin",
comment="Which frontend this config applies to (admin or vendor)",
),
)
# 3. Create index on frontend_type
op.create_index(
"idx_admin_menu_configs_frontend_type",
"admin_menu_configs",
["frontend_type"],
)
# 4. Drop old unique constraints
op.drop_constraint("uq_platform_menu_config", "admin_menu_configs", type_="unique")
op.drop_constraint("uq_user_menu_config", "admin_menu_configs", type_="unique")
# 5. Create new unique constraints that include frontend_type
op.create_unique_constraint(
"uq_frontend_platform_menu_config",
"admin_menu_configs",
["frontend_type", "platform_id", "menu_item_id"],
)
op.create_unique_constraint(
"uq_frontend_user_menu_config",
"admin_menu_configs",
["frontend_type", "user_id", "menu_item_id"],
)
# 6. Add check constraint: user_id scope only allowed for admin frontend
op.create_check_constraint(
"ck_user_scope_admin_only",
"admin_menu_configs",
"(user_id IS NULL) OR (frontend_type = 'admin')",
)
# 7. Create composite indexes for common queries
op.create_index(
"idx_admin_menu_config_frontend_platform",
"admin_menu_configs",
["frontend_type", "platform_id"],
)
op.create_index(
"idx_admin_menu_config_frontend_user",
"admin_menu_configs",
["frontend_type", "user_id"],
)
def downgrade() -> None:
# Drop new indexes
op.drop_index("idx_admin_menu_config_frontend_user", table_name="admin_menu_configs")
op.drop_index("idx_admin_menu_config_frontend_platform", table_name="admin_menu_configs")
# Drop check constraint
op.drop_constraint("ck_user_scope_admin_only", "admin_menu_configs", type_="check")
# Drop new unique constraints
op.drop_constraint("uq_frontend_user_menu_config", "admin_menu_configs", type_="unique")
op.drop_constraint("uq_frontend_platform_menu_config", "admin_menu_configs", type_="unique")
# Restore old unique constraints
op.create_unique_constraint(
"uq_platform_menu_config",
"admin_menu_configs",
["platform_id", "menu_item_id"],
)
op.create_unique_constraint(
"uq_user_menu_config",
"admin_menu_configs",
["user_id", "menu_item_id"],
)
# Drop frontend_type index
op.drop_index("idx_admin_menu_configs_frontend_type", table_name="admin_menu_configs")
# Drop frontend_type column
op.drop_column("admin_menu_configs", "frontend_type")
# Drop the enum type
sa.Enum('admin', 'vendor', name='frontendtype').drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,142 @@
"""Add platform modules table
Revision ID: zc2m3n4o5p6q7
Revises: zb1l2m3n4o5p6
Create Date: 2026-01-26
Adds platform_modules junction table for tracking module enablement per platform:
- Auditability: Track when modules were enabled/disabled and by whom
- Configuration: Per-module settings specific to each platform
- State tracking: Explicit enabled/disabled states with timestamps
This replaces the simpler Platform.settings["enabled_modules"] JSON approach
for better auditability and query capabilities.
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "zc2m3n4o5p6q7"
down_revision = "zb1l2m3n4o5p6"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create platform_modules table
op.create_table(
"platform_modules",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"platform_id",
sa.Integer(),
nullable=False,
comment="Platform this module configuration belongs to",
),
sa.Column(
"module_code",
sa.String(50),
nullable=False,
comment="Module code (e.g., 'billing', 'inventory', 'orders')",
),
sa.Column(
"is_enabled",
sa.Boolean(),
nullable=False,
server_default="true",
comment="Whether this module is currently enabled for the platform",
),
sa.Column(
"enabled_at",
sa.DateTime(timezone=True),
nullable=True,
comment="When the module was last enabled",
),
sa.Column(
"enabled_by_user_id",
sa.Integer(),
nullable=True,
comment="User who enabled the module",
),
sa.Column(
"disabled_at",
sa.DateTime(timezone=True),
nullable=True,
comment="When the module was last disabled",
),
sa.Column(
"disabled_by_user_id",
sa.Integer(),
nullable=True,
comment="User who disabled the module",
),
sa.Column(
"config",
sa.JSON(),
nullable=False,
server_default="{}",
comment="Module-specific configuration for this platform",
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.func.now(),
onupdate=sa.func.now(),
),
# Primary key
sa.PrimaryKeyConstraint("id"),
# Foreign keys
sa.ForeignKeyConstraint(
["platform_id"],
["platforms.id"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["enabled_by_user_id"],
["users.id"],
ondelete="SET NULL",
),
sa.ForeignKeyConstraint(
["disabled_by_user_id"],
["users.id"],
ondelete="SET NULL",
),
# Unique constraint - one config per platform/module pair
sa.UniqueConstraint("platform_id", "module_code", name="uq_platform_module"),
)
# Create indexes for performance
op.create_index(
"idx_platform_module_platform_id",
"platform_modules",
["platform_id"],
)
op.create_index(
"idx_platform_module_code",
"platform_modules",
["module_code"],
)
op.create_index(
"idx_platform_module_enabled",
"platform_modules",
["platform_id", "is_enabled"],
)
def downgrade() -> None:
# Drop indexes
op.drop_index("idx_platform_module_enabled", table_name="platform_modules")
op.drop_index("idx_platform_module_code", table_name="platform_modules")
op.drop_index("idx_platform_module_platform_id", table_name="platform_modules")
# Drop table
op.drop_table("platform_modules")

View File

@@ -0,0 +1,83 @@
# alembic/versions/zd3n4o5p6q7r8_promote_cms_customers_to_core.py
"""Promote CMS and Customers modules to core.
Revision ID: zd3n4o5p6q7r8
Revises: ze4o5p6q7r8s9
Create Date: 2026-01-27 10:10:00.000000
This migration ensures that CMS and Customers modules are enabled for all platforms,
since they are now core modules that cannot be disabled.
"""
from datetime import datetime, timezone
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "zd3n4o5p6q7r8"
down_revision = "ze4o5p6q7r8s9"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Enable CMS and Customers modules for all platforms."""
connection = op.get_bind()
# Get all platform IDs
platforms = connection.execute(
sa.text("SELECT id FROM platforms")
).fetchall()
now = datetime.now(timezone.utc)
core_modules = ["cms", "customers"]
for (platform_id,) in platforms:
for module_code in core_modules:
# Check if record exists
existing = connection.execute(
sa.text(
"""
SELECT id FROM platform_modules
WHERE platform_id = :platform_id AND module_code = :module_code
"""
),
{"platform_id": platform_id, "module_code": module_code},
).fetchone()
if existing:
# Update to enabled
connection.execute(
sa.text(
"""
UPDATE platform_modules
SET is_enabled = true, enabled_at = :now
WHERE platform_id = :platform_id AND module_code = :module_code
"""
),
{"platform_id": platform_id, "module_code": module_code, "now": now},
)
else:
# Insert new enabled record
connection.execute(
sa.text(
"""
INSERT INTO platform_modules (platform_id, module_code, is_enabled, enabled_at, config)
VALUES (:platform_id, :module_code, true, :now, '{}')
"""
),
{"platform_id": platform_id, "module_code": module_code, "now": now},
)
# Note: JSON settings update skipped - platform_modules table is the primary
# mechanism now. Legacy JSON settings will be handled by ModuleService fallback.
def downgrade() -> None:
"""
Note: This doesn't actually disable CMS and Customers since that would
break functionality. It just removes the explicit enabling done by upgrade.
"""
# No-op: We don't want to disable core modules
pass

View File

@@ -0,0 +1,60 @@
# alembic/versions/ze4o5p6q7r8s9_rename_platform_admin_to_tenancy.py
"""Rename platform-admin module to tenancy.
Revision ID: ze4o5p6q7r8s9
Revises: zc2m3n4o5p6q7
Create Date: 2026-01-27 10:00:00.000000
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "ze4o5p6q7r8s9"
down_revision = "zc2m3n4o5p6q7"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Rename platform-admin to tenancy in platform_modules table."""
# Update module_code in platform_modules junction table
op.execute(
"""
UPDATE platform_modules
SET module_code = 'tenancy'
WHERE module_code = 'platform-admin'
"""
)
# Also update any JSON settings that might reference the old module code
# This handles Platform.settings["enabled_modules"] for legacy data
# Note: settings column is JSON type, so we use text replacement approach
op.execute(
"""
UPDATE platforms
SET settings = REPLACE(settings::text, '"platform-admin"', '"tenancy"')::json
WHERE settings::text LIKE '%"platform-admin"%'
"""
)
def downgrade() -> None:
"""Revert tenancy back to platform-admin."""
# Revert module_code in platform_modules junction table
op.execute(
"""
UPDATE platform_modules
SET module_code = 'platform-admin'
WHERE module_code = 'tenancy'
"""
)
# Revert JSON settings
op.execute(
"""
UPDATE platforms
SET settings = REPLACE(settings::text, '"tenancy"', '"platform-admin"')::json
WHERE settings::text LIKE '%"tenancy"%'
"""
)

View File

@@ -28,7 +28,7 @@ cart_module = ModuleDefinition(
description="Session-based shopping cart for storefronts",
version="1.0.0",
is_self_contained=True,
requires=["inventory"], # Checks inventory availability
requires=["inventory", "catalog"], # Checks inventory availability and references Product model
migrations_path="migrations",
features=[
"cart_management", # Basic cart CRUD operations

View File

@@ -34,6 +34,25 @@ store_content_pages_router = APIRouter(prefix="/content-pages")
logger = logging.getLogger(__name__)
def _resolve_platform_id(db: Session, store_id: int) -> int | None:
"""Resolve platform_id from store's primary StorePlatform. Returns None if not found."""
from app.modules.tenancy.models import StorePlatform
primary_sp = (
db.query(StorePlatform)
.filter(StorePlatform.store_id == store_id, StorePlatform.is_primary.is_(True))
.first()
)
if primary_sp:
return primary_sp.platform_id
# Fallback: any active store_platform
any_sp = (
db.query(StorePlatform)
.filter(StorePlatform.store_id == store_id, StorePlatform.is_active.is_(True))
.first()
)
return any_sp.platform_id if any_sp else None
# ============================================================================
# STORE CONTENT PAGES
# ============================================================================
@@ -50,8 +69,9 @@ def list_store_pages(
Returns store-specific overrides + platform defaults (store overrides take precedence).
"""
platform_id = _resolve_platform_id(db, current_user.token_store_id)
pages = content_page_service.list_pages_for_store(
db, store_id=current_user.token_store_id, include_unpublished=include_unpublished
db, platform_id=platform_id, store_id=current_user.token_store_id, include_unpublished=include_unpublished
)
return [page.to_dict() for page in pages]
@@ -148,11 +168,10 @@ def get_platform_default(
Useful for stores to view the original before/after overriding.
"""
# Get store's platform
store = store_service.get_store_by_id_optional(db, current_user.token_store_id)
platform_id = 1 # Default to OMS
if store and store.platforms:
platform_id = store.platforms[0].id
platform_id = _resolve_platform_id(db, current_user.token_store_id)
if platform_id is None:
from fastapi import HTTPException
raise HTTPException(status_code=400, detail="Store is not subscribed to any platform")
# Get platform default (store_id=None)
page = content_page_service.get_store_default_page(
@@ -177,8 +196,10 @@ def get_page(
Returns store override if exists, otherwise platform default.
"""
platform_id = _resolve_platform_id(db, current_user.token_store_id)
page = content_page_service.get_page_for_store_or_raise(
db,
platform_id=platform_id,
slug=slug,
store_id=current_user.token_store_id,
include_unpublished=include_unpublished,

View File

@@ -36,11 +36,13 @@ def get_navigation_pages(request: Request, db: Session = Depends(get_db)):
Returns store overrides + platform defaults.
"""
store = getattr(request.state, "store", None)
platform = getattr(request.state, "platform", None)
store_id = store.id if store else None
platform_id = platform.id if platform else 1
# Get all published pages for this store
pages = content_page_service.list_pages_for_store(
db, store_id=store_id, include_unpublished=False
db, platform_id=platform_id, store_id=store_id, include_unpublished=False
)
return [
@@ -64,10 +66,13 @@ def get_content_page(slug: str, request: Request, db: Session = Depends(get_db))
Returns store override if exists, otherwise platform default.
"""
store = getattr(request.state, "store", None)
platform = getattr(request.state, "platform", None)
store_id = store.id if store else None
platform_id = platform.id if platform else 1
page = content_page_service.get_page_for_store_or_raise(
db,
platform_id=platform_id,
slug=slug,
store_id=store_id,
include_unpublished=False, # Only show published pages

View File

@@ -52,6 +52,7 @@ inventory_module = ModuleDefinition(
"transaction history, and bulk imports."
),
version="1.0.0",
requires=["catalog"], # Depends on catalog module for Product model
# Module-driven permissions
permissions=[
PermissionDefinition(

View File

@@ -56,7 +56,7 @@ marketplace_module = ModuleDefinition(
"and catalog synchronization."
),
version="1.0.0",
requires=["inventory"], # Depends on inventory module
requires=["inventory", "catalog", "orders"], # Depends on inventory, catalog, and orders modules
features=[
"letzshop_sync", # Sync products with Letzshop
"marketplace_import", # Import products from marketplace

View File

@@ -52,7 +52,7 @@ orders_module = ModuleDefinition(
"invoicing, and bulk order operations. Uses the payments module for checkout."
),
version="1.0.0",
requires=["payments"], # Depends on payments module for checkout
requires=["payments", "catalog", "inventory"], # Depends on payments, catalog, and inventory modules
# Module-driven permissions
permissions=[
PermissionDefinition(

View File

@@ -52,14 +52,25 @@ class StoreDomain(Base, TimestampMixin):
is_verified = Column(Boolean, default=False, nullable=False)
verified_at = Column(DateTime(timezone=True), nullable=True)
# Platform association (for platform context resolution from custom domains)
platform_id = Column(
Integer,
ForeignKey("platforms.id", ondelete="SET NULL"),
nullable=True,
index=True,
comment="Platform this domain is associated with (for platform context resolution)",
)
# Relationships
store = relationship("Store", back_populates="domains")
platform = relationship("Platform")
# Constraints
__table_args__ = (
UniqueConstraint("store_id", "domain", name="uq_store_domain"),
Index("idx_domain_active", "domain", "is_active"),
Index("idx_store_domain_primary", "store_id", "is_primary"),
Index("idx_store_domain_platform", "platform_id"),
)
def __repr__(self):

View File

@@ -78,6 +78,7 @@ def add_store_domain(
is_active=domain.is_active,
is_verified=domain.is_verified,
ssl_status=domain.ssl_status,
platform_id=domain.platform_id,
verification_token=domain.verification_token,
verified_at=domain.verified_at,
ssl_verified_at=domain.ssl_verified_at,
@@ -117,6 +118,7 @@ def list_store_domains(
is_active=d.is_active,
is_verified=d.is_verified,
ssl_status=d.ssl_status,
platform_id=d.platform_id,
verification_token=d.verification_token if not d.is_verified else None,
verified_at=d.verified_at,
ssl_verified_at=d.ssl_verified_at,
@@ -151,6 +153,7 @@ def get_domain_details(
is_active=domain.is_active,
is_verified=domain.is_verified,
ssl_status=domain.ssl_status,
platform_id=domain.platform_id,
verification_token=(
domain.verification_token if not domain.is_verified else None
),
@@ -197,6 +200,7 @@ def update_store_domain(
is_active=domain.is_active,
is_verified=domain.is_verified,
ssl_status=domain.ssl_status,
platform_id=domain.platform_id,
verification_token=None, # Don't expose token after updates
verified_at=domain.verified_at,
ssl_verified_at=domain.ssl_verified_at,

View File

@@ -28,6 +28,7 @@ class StoreDomainCreate(BaseModel):
is_primary: bool = Field(
default=False, description="Set as primary domain for the store"
)
platform_id: int | None = Field(None, description="Platform this domain belongs to")
@field_validator("domain")
@classmethod
@@ -86,6 +87,7 @@ class StoreDomainResponse(BaseModel):
is_active: bool
is_verified: bool
ssl_status: str
platform_id: int | None = None
verification_token: str | None = None
verified_at: datetime | None = None
ssl_verified_at: datetime | None = None

View File

@@ -101,11 +101,23 @@ class StoreDomainService:
if domain_data.is_primary:
self._unset_primary_domains(db, store_id)
# Resolve platform_id: use provided value, or auto-resolve from primary StorePlatform
platform_id = domain_data.platform_id
if not platform_id:
from app.modules.tenancy.models import StorePlatform
primary_sp = (
db.query(StorePlatform)
.filter(StorePlatform.store_id == store_id, StorePlatform.is_primary.is_(True))
.first()
)
platform_id = primary_sp.platform_id if primary_sp else None
# Create domain record
new_domain = StoreDomain(
store_id=store_id,
domain=normalized_domain,
is_primary=domain_data.is_primary,
platform_id=platform_id,
verification_token=secrets.token_urlsafe(32),
is_verified=False, # Requires DNS verification
is_active=False, # Cannot be active until verified

View File

@@ -16,6 +16,16 @@ function adminMerchantDetail() {
error: null,
merchantId: null,
// Subscription state
subscription: null,
subscriptionTier: null,
usageMetrics: [],
tiers: [],
platformId: null,
showCreateSubscriptionModal: false,
createForm: { tier_code: 'essential', status: 'trial', trial_days: 14, is_annual: false },
creatingSubscription: false,
// Initialize
async init() {
// Load i18n translations
@@ -38,6 +48,10 @@ function adminMerchantDetail() {
this.merchantId = match[1];
merchantDetailLog.info('Viewing merchant:', this.merchantId);
await this.loadMerchant();
await this.loadPlatforms();
if (this.platformId) {
await this.loadSubscription();
}
} else {
merchantDetailLog.error('No merchant ID in URL');
this.error = 'Invalid merchant URL';
@@ -84,6 +98,128 @@ function adminMerchantDetail() {
}
},
// Load platforms and find OMS platform ID
async loadPlatforms() {
try {
const response = await apiClient.get('/admin/platforms');
const platforms = response.items || response;
const oms = platforms.find(p => p.code === 'oms');
if (oms) {
this.platformId = oms.id;
merchantDetailLog.info('OMS platform resolved:', this.platformId);
} else {
merchantDetailLog.warn('OMS platform not found');
}
} catch (error) {
merchantDetailLog.warn('Failed to load platforms:', error.message);
}
},
// Load subscription for this merchant
async loadSubscription() {
if (!this.merchantId || !this.platformId) return;
merchantDetailLog.info('Loading subscription for merchant:', this.merchantId);
try {
const url = `/admin/subscriptions/merchants/${this.merchantId}/platforms/${this.platformId}`;
window.LogConfig.logApiCall('GET', url, null, 'request');
const response = await apiClient.get(url);
window.LogConfig.logApiCall('GET', url, response, 'response');
this.subscription = response.subscription || response;
this.subscriptionTier = response.tier || null;
this.usageMetrics = response.features || [];
merchantDetailLog.info('Subscription loaded:', {
tier: this.subscription?.tier,
status: this.subscription?.status,
features_count: this.usageMetrics.length
});
} catch (error) {
if (error.status === 404) {
merchantDetailLog.info('No subscription found for merchant');
this.subscription = null;
this.usageMetrics = [];
} else {
merchantDetailLog.warn('Failed to load subscription:', error.message);
}
}
},
// Load available subscription tiers
async loadTiers() {
if (this.tiers.length > 0) return;
try {
const response = await apiClient.get('/admin/subscriptions/tiers');
this.tiers = response.items || response;
merchantDetailLog.info('Loaded tiers:', this.tiers.length);
} catch (error) {
merchantDetailLog.warn('Failed to load tiers:', error.message);
}
},
// Open create subscription modal
async openCreateSubscriptionModal() {
await this.loadTiers();
this.createForm = { tier_code: 'essential', status: 'trial', trial_days: 14, is_annual: false };
this.showCreateSubscriptionModal = true;
},
// Create subscription for this merchant
async createSubscription() {
if (!this.merchantId || !this.platformId) return;
this.creatingSubscription = true;
merchantDetailLog.info('Creating subscription for merchant:', this.merchantId);
try {
const url = `/admin/subscriptions/merchants/${this.merchantId}/platforms/${this.platformId}`;
const payload = {
merchant_id: parseInt(this.merchantId),
platform_id: this.platformId,
tier_code: this.createForm.tier_code,
status: this.createForm.status,
trial_days: this.createForm.status === 'trial' ? parseInt(this.createForm.trial_days) : 0,
is_annual: this.createForm.is_annual
};
window.LogConfig.logApiCall('POST', url, payload, 'request');
const response = await apiClient.post(url, payload);
window.LogConfig.logApiCall('POST', url, response, 'response');
this.showCreateSubscriptionModal = false;
Utils.showToast('Subscription created successfully', 'success');
merchantDetailLog.info('Subscription created');
await this.loadSubscription();
} catch (error) {
window.LogConfig.logError(error, 'Create Subscription');
Utils.showToast(error.message || 'Failed to create subscription', 'error');
} finally {
this.creatingSubscription = false;
}
},
// Get usage bar color based on percentage
getUsageBarColor(current, limit) {
if (!limit || limit === 0) return 'bg-blue-500';
const percent = (current / limit) * 100;
if (percent >= 90) return 'bg-red-500';
if (percent >= 75) return 'bg-yellow-500';
return 'bg-green-500';
},
// Format tier price for display
formatTierPrice(tier) {
if (!tier.price_monthly_cents) return 'Custom';
return `${(tier.price_monthly_cents / 100).toFixed(2)}/mo`;
},
// Format date (matches dashboard pattern)
formatDate(dateString) {
if (!dateString) {
@@ -140,6 +276,9 @@ function adminMerchantDetail() {
async refresh() {
merchantDetailLog.info('=== MERCHANT REFRESH TRIGGERED ===');
await this.loadMerchant();
if (this.platformId) {
await this.loadSubscription();
}
Utils.showToast(I18n.t('tenancy.messages.merchant_details_refreshed'), 'success');
merchantDetailLog.info('=== MERCHANT REFRESH COMPLETE ===');
}

View File

@@ -19,7 +19,6 @@ function adminStoreDetail() {
loading: false,
error: null,
storeCode: null,
showSubscriptionModal: false,
// Initialize
async init() {
@@ -150,39 +149,6 @@ function adminStoreDetail() {
return 'bg-green-500';
},
// Create a new subscription for this store
async createSubscription() {
if (!this.store?.id) {
Utils.showToast(I18n.t('tenancy.messages.no_store_loaded'), 'error');
return;
}
detailLog.info('Creating subscription for store:', this.store.id);
try {
// Create a trial subscription with default tier
const url = `/admin/subscriptions/${this.store.id}`;
const data = {
tier: 'essential',
status: 'trial',
trial_days: 14,
is_annual: false
};
window.LogConfig.logApiCall('POST', url, data, 'request');
const response = await apiClient.post(url, data);
window.LogConfig.logApiCall('POST', url, response, 'response');
this.subscription = response;
Utils.showToast(I18n.t('tenancy.messages.subscription_created_successfully'), 'success');
detailLog.info('Subscription created:', this.subscription);
} catch (error) {
window.LogConfig.logError(error, 'Create Subscription');
Utils.showToast(error.message || 'Failed to create subscription', 'error');
}
},
// Delete store
async deleteStore() {
detailLog.info('Delete store requested:', this.storeCode);

View File

@@ -194,6 +194,167 @@
</div>
</div>
<!-- Subscription Card -->
<div class="px-4 py-3 mb-6 bg-white rounded-lg shadow-md dark:bg-gray-800" x-show="subscription">
<div class="flex items-center justify-between mb-4">
<h3 class="text-lg font-semibold text-gray-700 dark:text-gray-200">
Subscription
</h3>
</div>
<!-- Tier and Status -->
<div class="flex flex-wrap items-center gap-4 mb-4">
<div class="flex items-center gap-2">
<span class="text-sm text-gray-600 dark:text-gray-400">Tier:</span>
<span class="px-2.5 py-0.5 text-sm font-medium rounded-full"
:class="{
'bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-300': subscription?.tier === 'essential',
'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-300': subscription?.tier === 'professional',
'bg-purple-100 text-purple-800 dark:bg-purple-900 dark:text-purple-300': subscription?.tier === 'business',
'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-300': subscription?.tier === 'enterprise'
}"
x-text="subscription?.tier ? subscription.tier.charAt(0).toUpperCase() + subscription.tier.slice(1) : '-'">
</span>
</div>
<div class="flex items-center gap-2">
<span class="text-sm text-gray-600 dark:text-gray-400">Status:</span>
<span class="px-2.5 py-0.5 text-sm font-medium rounded-full"
:class="{
'bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-300': subscription?.status === 'active',
'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-300': subscription?.status === 'trial',
'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-300': subscription?.status === 'past_due',
'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-300': subscription?.status === 'cancelled' || subscription?.status === 'expired'
}"
x-text="subscription?.status ? subscription.status.replace('_', ' ').charAt(0).toUpperCase() + subscription.status.slice(1) : '-'">
</span>
</div>
<template x-if="subscription?.is_annual">
<span class="px-2.5 py-0.5 text-xs font-medium text-purple-800 bg-purple-100 rounded-full dark:bg-purple-900 dark:text-purple-300">
Annual
</span>
</template>
</div>
<!-- Period Info -->
<div class="flex flex-wrap gap-4 mb-4 text-sm">
<div>
<span class="text-gray-600 dark:text-gray-400">Period:</span>
<span class="ml-1 text-gray-700 dark:text-gray-300" x-text="formatDate(subscription?.period_start)"></span>
<span class="text-gray-400">&rarr;</span>
<span class="text-gray-700 dark:text-gray-300" x-text="formatDate(subscription?.period_end)"></span>
</div>
<template x-if="subscription?.trial_ends_at">
<div>
<span class="text-gray-600 dark:text-gray-400">Trial ends:</span>
<span class="ml-1 text-gray-700 dark:text-gray-300" x-text="formatDate(subscription?.trial_ends_at)"></span>
</div>
</template>
</div>
<!-- Usage Meters -->
<div class="grid gap-4 md:grid-cols-3">
<template x-for="metric in usageMetrics" :key="metric.name">
<div class="p-3 bg-gray-50 rounded-lg dark:bg-gray-700">
<div class="flex items-center justify-between mb-2">
<span class="text-xs font-medium text-gray-600 dark:text-gray-400 uppercase" x-text="metric.name"></span>
</div>
<div class="flex items-baseline gap-1">
<span class="text-xl font-bold text-gray-700 dark:text-gray-200" x-text="metric.current"></span>
<span class="text-sm text-gray-500 dark:text-gray-400">
/ <span x-text="metric.is_unlimited ? '∞' : metric.limit"></span>
</span>
</div>
<div class="mt-2 h-1.5 bg-gray-200 rounded-full dark:bg-gray-600" x-show="!metric.is_unlimited">
<div class="h-1.5 rounded-full transition-all"
:class="getUsageBarColor(metric.current, metric.limit)"
:style="`width: ${Math.min(100, metric.percentage || 0)}%`">
</div>
</div>
</div>
</template>
<template x-if="usageMetrics.length === 0">
<div class="p-3 bg-gray-50 rounded-lg dark:bg-gray-700 md:col-span-3">
<p class="text-sm text-gray-500 dark:text-gray-400 text-center">No usage data available</p>
</div>
</template>
</div>
</div>
<!-- No Subscription Notice -->
<div class="px-4 py-3 mb-6 bg-yellow-50 border border-yellow-200 rounded-lg dark:bg-yellow-900/20 dark:border-yellow-800" x-show="!subscription && !loading && platformId">
<div class="flex items-center gap-3">
<span x-html="$icon('exclamation', 'w-5 h-5 text-yellow-600 dark:text-yellow-400')"></span>
<div>
<p class="text-sm font-medium text-yellow-800 dark:text-yellow-200">No Subscription Found</p>
<p class="text-sm text-yellow-700 dark:text-yellow-300">This merchant doesn't have a subscription yet.</p>
</div>
<button
@click="openCreateSubscriptionModal()"
class="ml-auto px-3 py-1.5 text-sm font-medium text-white bg-yellow-600 rounded-lg hover:bg-yellow-700">
Create Subscription
</button>
</div>
</div>
<!-- Create Subscription Modal -->
<div x-show="showCreateSubscriptionModal" class="fixed inset-0 z-50 flex items-center justify-center" x-cloak>
<div class="fixed inset-0 bg-black bg-opacity-50" @click="showCreateSubscriptionModal = false"></div>
<div class="relative z-10 w-full max-w-md p-6 bg-white rounded-lg shadow-xl dark:bg-gray-800">
<h3 class="mb-4 text-lg font-semibold text-gray-700 dark:text-gray-200">Create Subscription</h3>
<!-- Tier Selector -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Subscription Tier</label>
<select x-model="createForm.tier_code"
class="w-full px-3 py-2 text-sm border border-gray-300 rounded-lg dark:border-gray-600 dark:bg-gray-700 dark:text-gray-300 focus:outline-none focus:ring-2 focus:ring-purple-500">
<template x-for="tier in tiers" :key="tier.code">
<option :value="tier.code" x-text="tier.name + ' — ' + formatTierPrice(tier)"></option>
</template>
</select>
</div>
<!-- Status -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Status</label>
<select x-model="createForm.status"
class="w-full px-3 py-2 text-sm border border-gray-300 rounded-lg dark:border-gray-600 dark:bg-gray-700 dark:text-gray-300 focus:outline-none focus:ring-2 focus:ring-purple-500">
<option value="trial">Trial</option>
<option value="active">Active</option>
</select>
</div>
<!-- Trial Days (shown only when status=trial) -->
<div class="mb-4" x-show="createForm.status === 'trial'">
<label class="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Trial Days</label>
<input type="number" x-model="createForm.trial_days" min="1" max="90"
class="w-full px-3 py-2 text-sm border border-gray-300 rounded-lg dark:border-gray-600 dark:bg-gray-700 dark:text-gray-300 focus:outline-none focus:ring-2 focus:ring-purple-500">
</div>
<!-- Annual Billing -->
<div class="mb-6">
<label class="flex items-center gap-2 text-sm text-gray-700 dark:text-gray-300">
<input type="checkbox" x-model="createForm.is_annual"
class="rounded border-gray-300 text-purple-600 focus:ring-purple-500 dark:border-gray-600 dark:bg-gray-700">
Annual billing
</label>
</div>
<!-- Actions -->
<div class="flex justify-end gap-3">
<button @click="showCreateSubscriptionModal = false"
class="px-4 py-2 text-sm font-medium text-gray-700 bg-gray-100 rounded-lg hover:bg-gray-200 dark:bg-gray-700 dark:text-gray-300 dark:hover:bg-gray-600">
Cancel
</button>
<button @click="createSubscription()"
:disabled="creatingSubscription"
class="px-4 py-2 text-sm font-medium text-white bg-purple-600 rounded-lg hover:bg-purple-700 disabled:opacity-50 disabled:cursor-not-allowed">
<span x-show="!creatingSubscription">Create</span>
<span x-show="creatingSubscription">Creating...</span>
</button>
</div>
</div>
</div>
<!-- Stores Section -->
<div class="px-4 py-3 mb-8 bg-white rounded-lg shadow-md dark:bg-gray-800" x-show="merchant?.stores && merchant?.stores.length > 0">
<h3 class="mb-4 text-lg font-semibold text-gray-700 dark:text-gray-200">

View File

@@ -112,12 +112,12 @@
<h3 class="text-lg font-semibold text-gray-700 dark:text-gray-200">
Subscription
</h3>
<button
@click="showSubscriptionModal = true"
<a
:href="'/admin/merchants/' + store?.merchant_id"
class="flex items-center px-3 py-1.5 text-sm font-medium text-purple-600 hover:text-purple-700 dark:text-purple-400 dark:hover:text-purple-300">
<span x-html="$icon('edit', 'w-4 h-4 mr-1')"></span>
Edit
</button>
<span x-html="$icon('external-link', 'w-4 h-4 mr-1')"></span>
Manage on Merchant Page
</a>
</div>
<!-- Tier and Status -->
@@ -206,11 +206,11 @@
<p class="text-sm font-medium text-yellow-800 dark:text-yellow-200">No Subscription Found</p>
<p class="text-sm text-yellow-700 dark:text-yellow-300">This store doesn't have a subscription yet.</p>
</div>
<button
@click="createSubscription()"
<a
:href="'/admin/merchants/' + store?.merchant_id"
class="ml-auto px-3 py-1.5 text-sm font-medium text-white bg-yellow-600 rounded-lg hover:bg-yellow-700">
Create Subscription
</button>
Manage on Merchant Page
</a>
</div>
</div>

View File

@@ -0,0 +1,609 @@
# Loyalty Module - User Journeys
## Personas
| # | Persona | Role / Auth | Description |
|---|---------|-------------|-------------|
| 1 | **Platform Admin** | `admin` role | Oversees all merchants' loyalty programs, views platform-wide stats, manages merchant settings |
| 2 | **Merchant Owner** | `store` role + owns merchant | Manages their merchant-wide loyalty program via the store interface. There is **no separate merchant owner UI** - loyalty programs are merchant-scoped but managed through any of the merchant's stores |
| 3 | **Store Staff / Team Member** | `store` role + store membership | Operates the POS terminal - scans cards, adds stamps/points, redeems rewards |
| 4 | **Customer (authenticated)** | Customer login | Views their loyalty card, balance, and transaction history |
| 5 | **Customer (anonymous)** | No auth | Browses program info, self-enrolls, downloads wallet passes |
!!! note "Merchant Owner vs Store Staff"
The loyalty module does **not** have a dedicated merchant owner interface. The merchant owner
accesses loyalty through the **store interface** (`/store/{store_code}/loyalty/...`). Since the
loyalty program is scoped at the merchant level (one program shared by all stores), the owner
can manage it from any of their stores. The difference is only in **permissions** - owners have
full access, team members have role-based access.
---
## Current Dev Database State
### Merchants & Stores
| Merchant | Owner | Stores |
|----------|-------|--------|
| WizaCorp Ltd. (id=1) | john.owner@wizacorp.com | WIZAMART, WIZAGADGETS, WIZAHOME |
| Fashion Group S.A. (id=2) | jane.owner@fashiongroup.com | FASHIONHUB, FASHIONOUTLET |
| BookWorld Publishing (id=3) | bob.owner@bookworld.com | BOOKSTORE, BOOKDIGITAL |
### Users
| Email | Role | Type |
|-------|------|------|
| admin@wizamart.com | admin | Platform admin |
| samir.boulahtit@gmail.com | admin | Platform admin |
| john.owner@wizacorp.com | store | Owner of WizaCorp (merchant 1) |
| jane.owner@fashiongroup.com | store | Owner of Fashion Group (merchant 2) |
| bob.owner@bookworld.com | store | Owner of BookWorld (merchant 3) |
| alice.manager@wizacorp.com | store | Team member (stores 1, 2) |
| charlie.staff@wizacorp.com | store | Team member (store 3) |
| diana.stylist@fashiongroup.com | store | Team member (stores 4, 5) |
| eric.sales@fashiongroup.com | store | Team member (store 5) |
| fiona.editor@bookworld.com | store | Team member (stores 6, 7) |
### Loyalty Data Status
| Table | Rows |
|-------|------|
| loyalty_programs | 0 |
| loyalty_cards | 0 |
| loyalty_transactions | 0 |
| merchant_loyalty_settings | 0 |
| staff_pins | 0 |
| merchant_subscriptions | 0 |
!!! warning "No loyalty programs exist yet"
All loyalty tables are empty. The first step in testing is to create a loyalty program
via the store interface. There are also **no subscriptions** set up, which may gate access
to the loyalty module depending on feature-gating configuration.
---
## Dev URLs (localhost:9999)
The dev server uses path-based platform routing: `http://localhost:9999/platforms/loyalty/...`
### 1. Platform Admin Pages
Login as: `admin@wizamart.com` or `samir.boulahtit@gmail.com`
| Page | Dev URL |
|------|---------|
| Programs Dashboard | `http://localhost:9999/platforms/loyalty/admin/loyalty/programs` |
| Analytics | `http://localhost:9999/platforms/loyalty/admin/loyalty/analytics` |
| WizaCorp Detail | `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/1` |
| WizaCorp Settings | `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/1/settings` |
| Fashion Group Detail | `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/2` |
| Fashion Group Settings | `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/2/settings` |
| BookWorld Detail | `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/3` |
| BookWorld Settings | `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/3/settings` |
### 2. Merchant Owner / Store Pages
Login as the store owner, then navigate to any of their stores.
**WizaCorp (john.owner@wizacorp.com):**
| Page | Dev URL |
|------|---------|
| Terminal | `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/terminal` |
| Cards | `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/cards` |
| Settings | `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/settings` |
| Stats | `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/stats` |
| Enroll Customer | `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/enroll` |
**Fashion Group (jane.owner@fashiongroup.com):**
| Page | Dev URL |
|------|---------|
| Terminal | `http://localhost:9999/platforms/loyalty/store/FASHIONHUB/loyalty/terminal` |
| Cards | `http://localhost:9999/platforms/loyalty/store/FASHIONHUB/loyalty/cards` |
| Settings | `http://localhost:9999/platforms/loyalty/store/FASHIONHUB/loyalty/settings` |
| Stats | `http://localhost:9999/platforms/loyalty/store/FASHIONHUB/loyalty/stats` |
| Enroll Customer | `http://localhost:9999/platforms/loyalty/store/FASHIONHUB/loyalty/enroll` |
**BookWorld (bob.owner@bookworld.com):**
| Page | Dev URL |
|------|---------|
| Terminal | `http://localhost:9999/platforms/loyalty/store/BOOKSTORE/loyalty/terminal` |
| Cards | `http://localhost:9999/platforms/loyalty/store/BOOKSTORE/loyalty/cards` |
| Settings | `http://localhost:9999/platforms/loyalty/store/BOOKSTORE/loyalty/settings` |
| Stats | `http://localhost:9999/platforms/loyalty/store/BOOKSTORE/loyalty/stats` |
| Enroll Customer | `http://localhost:9999/platforms/loyalty/store/BOOKSTORE/loyalty/enroll` |
### 3. Customer Storefront Pages
Login as a customer (e.g., `customer1@wizamart.example.com`).
!!! note "Store domain required"
Storefront pages require a store domain context. Only WIZAMART (`wizamart.shop`)
and FASHIONHUB (`fashionhub.store`) have domains configured. In dev, storefront
routes may need to be accessed through the store's domain or platform path.
| Page | Dev URL |
|------|---------|
| Loyalty Dashboard | `http://localhost:9999/platforms/loyalty/account/loyalty` |
| Transaction History | `http://localhost:9999/platforms/loyalty/account/loyalty/history` |
### 4. Public Pages (No Auth)
| Page | Dev URL |
|------|---------|
| Self-Enrollment | `http://localhost:9999/platforms/loyalty/loyalty/join` |
| Enrollment Success | `http://localhost:9999/platforms/loyalty/loyalty/join/success` |
### 5. API Endpoints
**Admin API** (prefix: `/platforms/loyalty/api/admin/loyalty/`):
| Method | Dev URL |
|--------|---------|
| GET | `http://localhost:9999/platforms/loyalty/api/admin/loyalty/programs` |
| GET | `http://localhost:9999/platforms/loyalty/api/admin/loyalty/stats` |
**Store API** (prefix: `/platforms/loyalty/api/store/loyalty/`):
| Method | Endpoint | Dev URL |
|--------|----------|---------|
| GET | program | `http://localhost:9999/platforms/loyalty/api/store/loyalty/program` |
| POST | program | `http://localhost:9999/platforms/loyalty/api/store/loyalty/program` |
| POST | stamp | `http://localhost:9999/platforms/loyalty/api/store/loyalty/stamp` |
| POST | points | `http://localhost:9999/platforms/loyalty/api/store/loyalty/points` |
| POST | enroll | `http://localhost:9999/platforms/loyalty/api/store/loyalty/cards/enroll` |
| POST | lookup | `http://localhost:9999/platforms/loyalty/api/store/loyalty/cards/lookup` |
**Storefront API** (prefix: `/platforms/loyalty/api/storefront/`):
| Method | Endpoint | Dev URL |
|--------|----------|---------|
| GET | program | `http://localhost:9999/platforms/loyalty/api/storefront/loyalty/program` |
| POST | enroll | `http://localhost:9999/platforms/loyalty/api/storefront/loyalty/enroll` |
| GET | card | `http://localhost:9999/platforms/loyalty/api/storefront/loyalty/card` |
| GET | transactions | `http://localhost:9999/platforms/loyalty/api/storefront/loyalty/transactions` |
**Public API** (prefix: `/platforms/loyalty/api/loyalty/`):
| Method | Endpoint | Dev URL |
|--------|----------|---------|
| GET | program | `http://localhost:9999/platforms/loyalty/api/loyalty/programs/WIZAMART` |
---
## Production URLs (loyalty.lu)
In production, the platform uses **domain-based routing** instead of the `/platforms/loyalty/` path prefix.
Store context is detected via **custom domains** (registered in `store_domains` table)
or **subdomains** of `loyalty.lu` (from `Store.subdomain`).
### URL Routing Summary
| Routing mode | Pattern | Example |
|-------------|---------|---------|
| Platform domain | `loyalty.lu/...` | Admin pages, public API |
| Custom domain | `{custom_domain}/...` | All store pages (store has custom domain) |
| Store subdomain | `{store_code}.loyalty.lu/...` | All store pages (no custom domain) |
### Case 1: Store with custom domain (e.g., `wizamart.shop`)
The store has a verified entry in the `store_domains` table. **All** store URLs
(storefront, store backend, store APIs) are served from the custom domain.
**Storefront (customer-facing):**
| Page | Production URL |
|------|----------------|
| Loyalty Dashboard | `https://wizamart.shop/account/loyalty` |
| Transaction History | `https://wizamart.shop/account/loyalty/history` |
| Self-Enrollment | `https://wizamart.shop/loyalty/join` |
| Enrollment Success | `https://wizamart.shop/loyalty/join/success` |
**Storefront API:**
| Method | Production URL |
|--------|----------------|
| GET card | `https://wizamart.shop/api/storefront/loyalty/card` |
| GET transactions | `https://wizamart.shop/api/storefront/loyalty/transactions` |
| POST enroll | `https://wizamart.shop/api/storefront/loyalty/enroll` |
| GET program | `https://wizamart.shop/api/storefront/loyalty/program` |
**Store backend (staff/owner):**
| Page | Production URL |
|------|----------------|
| Store Login | `https://wizamart.shop/store/WIZAMART/login` |
| Terminal | `https://wizamart.shop/store/WIZAMART/loyalty/terminal` |
| Cards | `https://wizamart.shop/store/WIZAMART/loyalty/cards` |
| Card Detail | `https://wizamart.shop/store/WIZAMART/loyalty/cards/{card_id}` |
| Settings | `https://wizamart.shop/store/WIZAMART/loyalty/settings` |
| Stats | `https://wizamart.shop/store/WIZAMART/loyalty/stats` |
| Enroll Customer | `https://wizamart.shop/store/WIZAMART/loyalty/enroll` |
**Store API:**
| Method | Production URL |
|--------|----------------|
| GET program | `https://wizamart.shop/api/store/loyalty/program` |
| POST program | `https://wizamart.shop/api/store/loyalty/program` |
| POST stamp | `https://wizamart.shop/api/store/loyalty/stamp` |
| POST points | `https://wizamart.shop/api/store/loyalty/points` |
| POST enroll | `https://wizamart.shop/api/store/loyalty/cards/enroll` |
| POST lookup | `https://wizamart.shop/api/store/loyalty/cards/lookup` |
### Case 2: Store without custom domain (uses platform subdomain)
The store has no entry in `store_domains`. **All** store URLs are served via a
subdomain of the platform domain: `{store_code}.loyalty.lu`.
**Storefront (customer-facing):**
| Page | Production URL |
|------|----------------|
| Loyalty Dashboard | `https://bookstore.loyalty.lu/account/loyalty` |
| Transaction History | `https://bookstore.loyalty.lu/account/loyalty/history` |
| Self-Enrollment | `https://bookstore.loyalty.lu/loyalty/join` |
| Enrollment Success | `https://bookstore.loyalty.lu/loyalty/join/success` |
**Storefront API:**
| Method | Production URL |
|--------|----------------|
| GET card | `https://bookstore.loyalty.lu/api/storefront/loyalty/card` |
| GET transactions | `https://bookstore.loyalty.lu/api/storefront/loyalty/transactions` |
| POST enroll | `https://bookstore.loyalty.lu/api/storefront/loyalty/enroll` |
| GET program | `https://bookstore.loyalty.lu/api/storefront/loyalty/program` |
**Store backend (staff/owner):**
| Page | Production URL |
|------|----------------|
| Store Login | `https://bookstore.loyalty.lu/store/BOOKSTORE/login` |
| Terminal | `https://bookstore.loyalty.lu/store/BOOKSTORE/loyalty/terminal` |
| Cards | `https://bookstore.loyalty.lu/store/BOOKSTORE/loyalty/cards` |
| Settings | `https://bookstore.loyalty.lu/store/BOOKSTORE/loyalty/settings` |
| Stats | `https://bookstore.loyalty.lu/store/BOOKSTORE/loyalty/stats` |
**Store API:**
| Method | Production URL |
|--------|----------------|
| GET program | `https://bookstore.loyalty.lu/api/store/loyalty/program` |
| POST stamp | `https://bookstore.loyalty.lu/api/store/loyalty/stamp` |
| POST points | `https://bookstore.loyalty.lu/api/store/loyalty/points` |
| POST enroll | `https://bookstore.loyalty.lu/api/store/loyalty/cards/enroll` |
| POST lookup | `https://bookstore.loyalty.lu/api/store/loyalty/cards/lookup` |
### Platform Admin & Public API (always on platform domain)
| Page / Endpoint | Production URL |
|-----------------|----------------|
| Admin Programs | `https://loyalty.lu/admin/loyalty/programs` |
| Admin Analytics | `https://loyalty.lu/admin/loyalty/analytics` |
| Admin Merchant Detail | `https://loyalty.lu/admin/loyalty/merchants/{id}` |
| Admin Merchant Settings | `https://loyalty.lu/admin/loyalty/merchants/{id}/settings` |
| Admin API - Programs | `GET https://loyalty.lu/api/admin/loyalty/programs` |
| Admin API - Stats | `GET https://loyalty.lu/api/admin/loyalty/stats` |
| Public API - Program | `GET https://loyalty.lu/api/loyalty/programs/WIZAMART` |
| Apple Wallet Pass | `GET https://loyalty.lu/api/loyalty/passes/apple/{serial}.pkpass` |
### Domain configuration per store (current DB state)
| Store | Custom Domain | Production URL |
|-------|---------------|----------------|
| WIZAMART | `wizamart.shop` | `https://wizamart.shop/...` |
| FASHIONHUB | `fashionhub.store` | `https://fashionhub.store/...` |
| WIZAGADGETS | _(none)_ | `https://wizagadgets.loyalty.lu/...` |
| WIZAHOME | _(none)_ | `https://wizahome.loyalty.lu/...` |
| FASHIONOUTLET | _(none)_ | `https://fashionoutlet.loyalty.lu/...` |
| BOOKSTORE | _(none)_ | `https://bookstore.loyalty.lu/...` |
| BOOKDIGITAL | _(none)_ | `https://bookdigital.loyalty.lu/...` |
!!! info "`{store_domain}` in journey URLs"
In the journeys below, `{store_domain}` refers to the store's resolved domain:
- **Custom domain**: `wizamart.shop` (from `store_domains` table)
- **Subdomain fallback**: `wizamart.loyalty.lu` (from `Store.subdomain` + platform domain)
---
## User Journeys
### Journey 1: Merchant Owner - First-Time Setup
**Persona:** Merchant Owner (e.g., john.owner@wizacorp.com)
**Goal:** Set up a loyalty program for their merchant
```mermaid
flowchart TD
A[Login as store owner] --> B[Navigate to store loyalty settings]
B --> C{Program exists?}
C -->|No| D[Create loyalty program]
D --> E[Choose type: stamps / points / hybrid]
E --> F[Configure program settings]
F --> G[Set branding - colors, logo]
G --> H[Configure anti-fraud settings]
H --> I[Create staff PINs]
I --> J[Program is live]
C -->|Yes| K[View/edit existing program]
```
**Steps:**
1. Login as `john.owner@wizacorp.com` at:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/login`
- Prod (custom domain): `https://wizamart.shop/store/WIZAMART/login`
- Prod (subdomain): `https://wizamart.loyalty.lu/store/WIZAMART/login`
2. Navigate to loyalty settings:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/settings`
- Prod (custom domain): `https://wizamart.shop/store/WIZAMART/loyalty/settings`
- Prod (subdomain): `https://wizamart.loyalty.lu/store/WIZAMART/loyalty/settings`
3. Create a new loyalty program:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/program`
- Prod: `POST https://{store_domain}/api/store/loyalty/program`
4. Choose loyalty type (stamps, points, or hybrid)
5. Configure program parameters (stamp target, points-per-euro, rewards)
6. Set branding (card color, logo, hero image)
7. Configure anti-fraud (cooldown, daily limits, PIN requirements)
8. Create staff PINs:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/pins`
- Prod: `POST https://{store_domain}/api/store/loyalty/pins`
9. Verify program is live - check from another store (same merchant):
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAGADGETS/loyalty/settings`
- Prod (subdomain): `https://wizagadgets.loyalty.lu/store/WIZAGADGETS/loyalty/settings`
**Expected blockers in current state:**
- No subscriptions exist - feature gating may prevent program creation
- No loyalty programs exist - this is the first journey to test
---
### Journey 2: Store Staff - Daily Operations (Stamps)
**Persona:** Store Staff (e.g., alice.manager@wizacorp.com)
**Goal:** Process customer loyalty stamp transactions
```mermaid
flowchart TD
A[Open terminal] --> B[Customer presents card/QR]
B --> C[Scan/lookup card]
C --> D[Enter staff PIN]
D --> E[Add stamp]
E --> F{Target reached?}
F -->|Yes| G[Prompt: Redeem reward?]
G -->|Yes| H[Redeem stamps for reward]
G -->|No| I[Save for later]
F -->|No| J[Done - show updated count]
H --> J
I --> J
```
**Steps:**
1. Login as `alice.manager@wizacorp.com` and open the terminal:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/terminal`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/terminal`
2. Scan customer QR code or enter card number:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/cards/lookup`
- Prod: `POST https://{store_domain}/api/store/loyalty/cards/lookup`
3. Enter staff PIN for verification
4. Add stamp:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/stamp`
- Prod: `POST https://{store_domain}/api/store/loyalty/stamp`
5. If target reached, redeem reward:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/stamp/redeem`
- Prod: `POST https://{store_domain}/api/store/loyalty/stamp/redeem`
6. View updated card:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/cards/{card_id}`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/cards/{card_id}`
7. Browse all cards:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/cards`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/cards`
**Anti-fraud scenarios to test:**
- Cooldown rejection (stamp within 15 min)
- Daily limit hit (max 5 stamps/day)
- PIN lockout (5 failed attempts)
---
### Journey 3: Store Staff - Daily Operations (Points)
**Persona:** Store Staff (e.g., alice.manager@wizacorp.com)
**Goal:** Process customer loyalty points from purchase
```mermaid
flowchart TD
A[Open terminal] --> B[Customer presents card]
B --> C[Scan/lookup card]
C --> D[Enter purchase amount]
D --> E[Enter staff PIN]
E --> F[Points calculated & added]
F --> G{Enough for reward?}
G -->|Yes| H[Offer redemption]
G -->|No| I[Done - show balance]
H --> I
```
**Steps:**
1. Open the terminal:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/terminal`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/terminal`
2. Lookup card:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/cards/lookup`
- Prod: `POST https://{store_domain}/api/store/loyalty/cards/lookup`
3. Enter purchase amount (e.g., 25.00 EUR)
4. Earn points (auto-calculated at 10 pts/EUR = 250 points):
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/points`
- Prod: `POST https://{store_domain}/api/store/loyalty/points`
5. If enough balance, redeem points for reward:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/points/redeem`
- Prod: `POST https://{store_domain}/api/store/loyalty/points/redeem`
6. Check store-level stats:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/stats`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/stats`
---
### Journey 4: Customer Self-Enrollment
**Persona:** Anonymous Customer
**Goal:** Join a merchant's loyalty program
```mermaid
flowchart TD
A[See QR code at store counter] --> B[Scan QR / visit enrollment page]
B --> C[Fill in details - email, name]
C --> D[Submit enrollment]
D --> E[Receive card number]
E --> F[Optional: Add to Apple/Google Wallet]
F --> G[Start collecting stamps/points]
```
**Steps:**
1. Visit the public enrollment page:
- Dev: `http://localhost:9999/platforms/loyalty/loyalty/join`
- Prod (custom domain): `https://wizamart.shop/loyalty/join`
- Prod (subdomain): `https://bookstore.loyalty.lu/loyalty/join`
2. Fill in enrollment form (email, name)
3. Submit enrollment:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/storefront/loyalty/enroll`
- Prod (custom domain): `POST https://wizamart.shop/api/storefront/loyalty/enroll`
- Prod (subdomain): `POST https://bookstore.loyalty.lu/api/storefront/loyalty/enroll`
4. Redirected to success page:
- Dev: `http://localhost:9999/platforms/loyalty/loyalty/join/success?card=XXXX-XXXX-XXXX`
- Prod (custom domain): `https://wizamart.shop/loyalty/join/success?card=XXXX-XXXX-XXXX`
- Prod (subdomain): `https://bookstore.loyalty.lu/loyalty/join/success?card=XXXX-XXXX-XXXX`
5. Optionally download Apple Wallet pass:
- Dev: `GET http://localhost:9999/platforms/loyalty/api/loyalty/passes/apple/{serial_number}.pkpass`
- Prod: `GET https://loyalty.lu/api/loyalty/passes/apple/{serial_number}.pkpass`
---
### Journey 5: Customer - View Loyalty Status
**Persona:** Authenticated Customer (e.g., `customer1@wizamart.example.com`)
**Goal:** Check loyalty balance and history
**Steps:**
1. Login as customer at the storefront
2. View loyalty dashboard (card balance, available rewards):
- Dev: `http://localhost:9999/platforms/loyalty/account/loyalty`
- Prod (custom domain): `https://wizamart.shop/account/loyalty`
- Prod (subdomain): `https://bookstore.loyalty.lu/account/loyalty`
- API Dev: `GET http://localhost:9999/platforms/loyalty/api/storefront/loyalty/card`
- API Prod: `GET https://wizamart.shop/api/storefront/loyalty/card`
3. View full transaction history:
- Dev: `http://localhost:9999/platforms/loyalty/account/loyalty/history`
- Prod (custom domain): `https://wizamart.shop/account/loyalty/history`
- Prod (subdomain): `https://bookstore.loyalty.lu/account/loyalty/history`
- API Dev: `GET http://localhost:9999/platforms/loyalty/api/storefront/loyalty/transactions`
- API Prod: `GET https://wizamart.shop/api/storefront/loyalty/transactions`
---
### Journey 6: Platform Admin - Oversight
**Persona:** Platform Admin (`admin@wizamart.com` or `samir.boulahtit@gmail.com`)
**Goal:** Monitor all loyalty programs across merchants
**Steps:**
1. Login as admin
2. View all programs:
- Dev: `http://localhost:9999/platforms/loyalty/admin/loyalty/programs`
- Prod: `https://loyalty.lu/admin/loyalty/programs`
3. View platform-wide analytics:
- Dev: `http://localhost:9999/platforms/loyalty/admin/loyalty/analytics`
- Prod: `https://loyalty.lu/admin/loyalty/analytics`
4. Drill into WizaCorp's program:
- Dev: `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/1`
- Prod: `https://loyalty.lu/admin/loyalty/merchants/1`
5. Manage WizaCorp's merchant-level settings:
- Dev: `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/1/settings`
- Prod: `https://loyalty.lu/admin/loyalty/merchants/1/settings`
- API Dev: `PATCH http://localhost:9999/platforms/loyalty/api/admin/loyalty/merchants/1/settings`
- API Prod: `PATCH https://loyalty.lu/api/admin/loyalty/merchants/1/settings`
6. Adjust settings: PIN policy, self-enrollment toggle, void permissions
7. Check other merchants:
- Dev: `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/2`
- Prod: `https://loyalty.lu/admin/loyalty/merchants/2`
---
### Journey 7: Void / Return Flow
**Persona:** Store Staff (e.g., alice.manager@wizacorp.com)
**Goal:** Reverse a loyalty transaction (customer return)
**Steps:**
1. Open terminal and lookup card:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/terminal`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/terminal`
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/cards/lookup`
- Prod: `POST https://{store_domain}/api/store/loyalty/cards/lookup`
2. View the card's transaction history to find the transaction to void:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/cards/{card_id}`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/cards/{card_id}`
- API Dev: `GET http://localhost:9999/platforms/loyalty/api/store/loyalty/cards/{card_id}/transactions`
- API Prod: `GET https://{store_domain}/api/store/loyalty/cards/{card_id}/transactions`
3. Void a stamp transaction:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/stamp/void`
- Prod: `POST https://{store_domain}/api/store/loyalty/stamp/void`
4. Or void a points transaction:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/points/void`
- Prod: `POST https://{store_domain}/api/store/loyalty/points/void`
5. Verify: original and void transactions are linked in the audit log
---
### Journey 8: Cross-Store Redemption
**Persona:** Customer + Store Staff at two different stores
**Goal:** Customer earns at Store A, redeems at Store B (same merchant)
**Precondition:** Cross-location redemption must be enabled in merchant settings:
- Dev: `http://localhost:9999/platforms/loyalty/admin/loyalty/merchants/1/settings`
- Prod: `https://loyalty.lu/admin/loyalty/merchants/1/settings`
**Steps:**
1. Staff at WIZAMART adds stamps to customer's card:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAMART/loyalty/terminal`
- Prod: `https://{store_domain}/store/WIZAMART/loyalty/terminal`
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/stamp`
- Prod: `POST https://{store_domain}/api/store/loyalty/stamp`
2. Customer visits WIZAGADGETS
3. Staff at WIZAGADGETS looks up the same card:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAGADGETS/loyalty/terminal`
- Prod: `https://{store_domain}/store/WIZAGADGETS/loyalty/terminal`
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/cards/lookup`
- Prod: `POST https://{store_domain}/api/store/loyalty/cards/lookup`
4. Card is found (same merchant) with accumulated stamps
5. Staff at WIZAGADGETS redeems the reward:
- Dev: `POST http://localhost:9999/platforms/loyalty/api/store/loyalty/stamp/redeem`
- Prod: `POST https://{store_domain}/api/store/loyalty/stamp/redeem`
6. Verify transaction history shows both stores:
- Dev: `http://localhost:9999/platforms/loyalty/store/WIZAGADGETS/loyalty/cards/{card_id}`
- Prod: `https://{store_domain}/store/WIZAGADGETS/loyalty/cards/{card_id}`
---
## Recommended Test Order
1. **Journey 1** - Create a program first (nothing else works without this)
2. **Journey 4** - Enroll a test customer
3. **Journey 2 or 3** - Process stamps/points
4. **Journey 5** - Verify customer can see their data
5. **Journey 7** - Test void/return
6. **Journey 8** - Test cross-store (enroll via WIZAMART, redeem via WIZAGADGETS)
7. **Journey 6** - Admin overview (verify data appears correctly)

View File

@@ -0,0 +1,239 @@
# Session Note: IMPORT-002 Cross-Module Dependency Cleanup
**Date:** 2026-02-09
**Status:** Part A complete, Part B deferred
**Priority:** Medium — architecture cleanup (no runtime crashes, but undeclared dependencies)
**Follows:** SESSION_NOTE_2026-02-03_module-dependency-redesign.md
---
## Context
After fixing all IMPORT-001 violations (core → optional) in previous sessions, the architecture validator now passes with **0 errors and 40 IMPORT-002 warnings**. These are all optional → optional cross-module imports without declared dependencies.
Billing module is fully clean — zero violations.
---
## Current `requires=[]` Declarations
| Module | Currently declares |
|--------|--------------------|
| catalog | `requires=["inventory"]` |
| marketplace | `requires=["inventory"]` |
| orders | `requires=["payments"]` |
| inventory | *(nothing)* |
| cart | `requires=["inventory"]` |
| analytics | *(nothing)* |
---
## Part A: Declare Dependencies (30 warnings — trivial fixes)
These are **hard dependencies** where the source module fundamentally cannot function without the target. Fix is adding to `requires=[]` in each module's `definition.py`.
### A1. marketplace → add `catalog`, `orders`
**Change:** `requires=["inventory"]``requires=["inventory", "catalog", "orders"]`
**File:** `app/modules/marketplace/definition.py`
**Warnings resolved:** 9
| File | Imports from | What |
|------|-------------|------|
| `services/marketplace_product_service.py:862-863` | catalog | `Product` model |
| `services/letzshop_export_service.py:16` | catalog | `Product` model |
| `services/letzshop/order_service.py:26-27` | catalog, orders | `Order`, `OrderItem`, `Product` models |
| `services/letzshop/order_service.py:17` | orders | `order_service` |
| `services/marketplace_product_service.py:1006` | orders | order-related import |
| `routes/api/admin_letzshop.py:23-24` | orders | `OrderHasUnresolvedExceptionsException`, `order_item_exception_service` |
| `routes/api/store_letzshop.py:24-25` | orders | `OrderHasUnresolvedExceptionsException`, `order_item_exception_service` |
**Rationale:** Marketplace syncs products and imports orders from Letzshop. No products or orders = no marketplace.
---
### A2. orders → add `catalog`, `inventory`
**Change:** `requires=["payments"]``requires=["payments", "catalog", "inventory"]`
**File:** `app/modules/orders/definition.py`
**Warnings resolved:** 9
| File | Imports from | What |
|------|-------------|------|
| `services/order_item_exception_service.py:22,26` | catalog | `ProductNotFoundException`, `Product` |
| `services/order_service.py:51` | catalog | `Product` model |
| `services/order_inventory_service.py:17-28` | inventory | `Inventory`, `InventoryTransaction`, `TransactionType`, exceptions, schemas, service (6 imports) |
| `services/order_service.py:29` | inventory | `InsufficientInventoryException` |
**Rationale:** Order line items reference products. Order fulfillment manages stock via `order_inventory_service.py`. Both are fundamental.
---
### A3. inventory → add `catalog`
**Change:** `requires=[]``requires=["catalog"]`
**File:** `app/modules/inventory/definition.py`
**Warnings resolved:** 7
| File | Imports from | What |
|------|-------------|------|
| `services/inventory_service.py:15,33` | catalog | `ProductNotFoundException`, `Product` |
| `services/inventory_transaction_service.py:14,19` | catalog | `ProductNotFoundException`, `Product` |
| `services/inventory_import_service.py:27` | catalog | `Product` |
**Rationale:** Every inventory record tracks stock for a product. No products = no inventory.
---
### A4. cart → add `catalog`
**Change:** `requires=["inventory"]``requires=["inventory", "catalog"]`
**File:** `app/modules/cart/definition.py`
**Warnings resolved:** 2
| File | Imports from | What |
|------|-------------|------|
| `services/cart_service.py:24,27` | catalog | `ProductNotFoundException`, `Product` |
**Rationale:** Cart items are products. Can't add to cart without the Product model.
---
### Part A Subtotal: 4 one-line edits → 27 warnings resolved
*(Note: 3 remaining "declare dependency" warnings are covered by marketplace→analytics which is categorized under Part B as provider pattern instead.)*
---
## Part B: Provider Pattern (10 warnings — moderate refactoring)
These are **optional enrichment** where the source module works without the target. Need actual refactoring to conditionally load/call.
### B1. catalog → marketplace (3 warnings)
| File | What |
|------|------|
| `schemas/product.py:14` | `MarketplaceProductResponse` |
| `schemas/catalog.py:14` | `MarketplaceProductResponse` |
| `services/product_service.py:21` | `MarketplaceProduct` model |
**Why optional:** Products exist independently. Marketplace sync status is display enrichment.
**Fix approach:** Make marketplace fields `Optional` in schemas, populate via provider if marketplace is enabled.
---
### B2. marketplace → analytics (2 warnings)
| File | What |
|------|------|
| `routes/api/admin_marketplace.py:17` | `stats_service` |
| `routes/api/admin_marketplace.py:29` | `ImportStatsResponse` |
**Why optional:** Marketplace import/sync works without analytics. Stats on admin page are dashboard decoration.
**Fix approach:** Conditionally call stats aggregator, return empty stats if analytics disabled.
---
### B3. orders → marketplace (1 warning)
| File | What |
|------|------|
| `services/order_service.py:50` | `MarketplaceProduct`, `MarketplaceProductTranslation` |
**Why optional:** Orders work without marketplace. Enriches order display with Letzshop product info.
**Fix approach:** Conditionally join marketplace data when rendering, skip if module disabled.
---
### B4. inventory → orders (2 warnings)
| File | What |
|------|------|
| `services/inventory_transaction_service.py:15,18` | `OrderNotFoundException`, `Order` |
**Why optional:** Inventory tracks stock independently. Order reference on transactions is an audit back-reference, not functional.
**Fix approach:** Store `order_id` as nullable FK, resolve order details via provider for display.
---
### B5. inventory → marketplace (1 warning)
| File | What |
|------|------|
| `services/inventory_service.py:606` | marketplace import |
**Why optional:** Optional sync enrichment.
**Fix approach:** Conditional import or provider call.
---
### B6. analytics → catalog, orders, inventory, marketplace (4 warnings)
| File | What |
|------|------|
| `services/stats_service.py:23` | `Inventory` model |
| `services/stats_service.py:24` | `MarketplaceImportJob`, `MarketplaceProduct` models |
| `services/stats_service.py:25` | `Order` model |
| `services/stats_service.py:26` | `Product` model |
**Why optional:** Analytics aggregates everything — should report on whatever modules are enabled, not crash if one is disabled. Every module already exposes a `metrics_provider`.
**Fix approach:** Refactor `stats_service` to use module `metrics_provider` pattern instead of direct model imports. Cleanest candidate for provider pattern.
---
### Part B Subtotal: 6 refactors → 13 warnings resolved
---
## Resulting Dependency Tree
After all fixes, the clean module dependency graph:
```
catalog (foundational — products)
├── inventory (requires: catalog)
├── cart (requires: catalog, inventory)
├── orders (requires: catalog, inventory, payments)
├── marketplace (requires: catalog, orders, inventory)
└── analytics (no hard deps — all via providers)
```
---
## Execution Log
### Part A — Completed 2026-02-09
- [x] **A1** — marketplace: declare `catalog`, `orders`
- [x] **A2** — orders: declare `catalog`, `inventory`
- [x] **A3** — inventory: declare `catalog`
- [x] **A4** — cart: declare `catalog`
**Result:** 40 warnings → 13 warnings (27 resolved)
### Part B — Deferred (provider pattern refactors)
Remaining 13 warnings require provider pattern refactoring. To be tackled in a future session.
- [ ] **B6** — analytics: provider pattern (cleanest, biggest impact — 4 warnings)
- [ ] **B1** — catalog: provider pattern for marketplace enrichment (3 warnings)
- [ ] **B2** — marketplace: provider pattern for analytics stats (2 warnings)
- [ ] **B4** — inventory: provider pattern for order back-references (2 warnings)
- [ ] **B3** — orders: provider pattern for marketplace enrichment (1 warning)
- [ ] **B5** — inventory: provider pattern for marketplace sync (1 warning)
---
## Validation Target
Current state:
```
$ python scripts/validate_architecture.py
→ 0 errors, 13 warnings (all IMPORT-002 — provider pattern candidates)
```
After Part B complete:
```
$ python scripts/validate_architecture.py
→ 0 errors, 0 warnings
```

View File

@@ -108,7 +108,10 @@ class PlatformContextManager:
# Method 3: Default platform for localhost without /platforms/ prefix
# This serves the main marketing site
# Store routes require explicit platform via /platforms/{code}/store/...
if host_without_port in ["localhost", "127.0.0.1"]:
if path.startswith(("/store/", "/stores/")):
return None # No platform — handlers will show appropriate error
return {
"path_prefix": DEFAULT_PLATFORM_CODE,
"detection_method": "default",
@@ -138,6 +141,7 @@ class PlatformContextManager:
if context.get("detection_method") == "domain":
domain = context.get("domain")
if domain:
# Try Platform.domain first
platform = (
db.query(Platform)
.filter(Platform.domain == domain)
@@ -150,6 +154,26 @@ class PlatformContextManager:
f"[PLATFORM] Platform found via domain: {domain}{platform.name}"
)
return platform
# Fallback: Check StoreDomain for custom store domains
from app.modules.tenancy.models import StoreDomain
store_domain = (
db.query(StoreDomain)
.filter(StoreDomain.domain == domain, StoreDomain.is_active.is_(True))
.first()
)
if store_domain and store_domain.platform_id:
platform = (
db.query(Platform)
.filter(Platform.id == store_domain.platform_id, Platform.is_active.is_(True))
.first()
)
if platform:
logger.debug(
f"[PLATFORM] Platform found via store domain: {domain}{platform.name}"
)
return platform
logger.debug(f"[PLATFORM] No platform found for domain: {domain}")
# Method 2: Path-prefix lookup
@@ -399,7 +423,10 @@ class PlatformContextMiddleware:
}
# Method 3: Default for localhost - serves main marketing site
# Store routes require explicit platform via /platforms/{code}/store/...
if host_without_port in ["localhost", "127.0.0.1"]:
if path.startswith(("/store/", "/stores/")):
return None # No platform — handlers will show appropriate error
return {
"path_prefix": DEFAULT_PLATFORM_CODE,
"detection_method": "default",

View File

@@ -221,6 +221,8 @@ nav:
- Store Onboarding: features/store-onboarding.md
- Subscription & Billing: features/subscription-billing.md
- Email System: features/email-system.md
- User Journeys:
- Loyalty: features/user-journeys/loyalty.md
# --- User Guides ---
- User Guides:

View File

@@ -40,6 +40,7 @@ from app.modules.tenancy.services.permission_discovery_service import (
from middleware.auth import AuthManager
from app.modules.tenancy.models import AdminSetting, Platform
from app.modules.tenancy.models import User
from app.modules.billing.models.subscription import SubscriptionTier
# Register all models with SQLAlchemy so string-based relationships resolve
for _mod in [
@@ -380,6 +381,72 @@ def create_admin_settings(db: Session) -> int:
return settings_created
def create_subscription_tiers(db: Session, platform: Platform) -> int:
"""Create default subscription tiers for the OMS platform."""
tier_defs = [
{
"code": "essential",
"name": "Essential",
"price_monthly_cents": 2900,
"price_annual_cents": 29000,
"is_public": True,
"display_order": 10,
},
{
"code": "professional",
"name": "Professional",
"price_monthly_cents": 7900,
"price_annual_cents": 79000,
"is_public": True,
"display_order": 20,
},
{
"code": "business",
"name": "Business",
"price_monthly_cents": 14900,
"price_annual_cents": 149000,
"is_public": True,
"display_order": 30,
},
{
"code": "enterprise",
"name": "Enterprise",
"price_monthly_cents": 29900,
"price_annual_cents": None,
"is_public": False,
"display_order": 40,
},
]
tiers_created = 0
for tdef in tier_defs:
existing = db.execute(
select(SubscriptionTier).where(SubscriptionTier.code == tdef["code"])
).scalar_one_or_none()
if existing:
print_warning(f"Tier already exists: {existing.name} ({existing.code})")
continue
tier = SubscriptionTier(
platform_id=platform.id,
code=tdef["code"],
name=tdef["name"],
price_monthly_cents=tdef["price_monthly_cents"],
price_annual_cents=tdef["price_annual_cents"],
is_public=tdef["is_public"],
display_order=tdef["display_order"],
is_active=True,
)
db.add(tier)
db.flush()
tiers_created += 1
print_success(f"Created tier: {tier.name} ({tier.code})")
return tiers_created
def verify_rbac_schema(db: Session) -> bool:
"""Verify that RBAC schema is in place."""
@@ -456,6 +523,14 @@ def initialize_production(db: Session, auth_manager: AuthManager):
print_step(5, "Creating admin settings...")
create_admin_settings(db)
# Step 6: Seed subscription tiers
print_step(6, "Seeding subscription tiers...")
oms_platform = next((p for p in platforms if p.code == "oms"), None)
if oms_platform:
create_subscription_tiers(db, oms_platform)
else:
print_warning("OMS platform not found, skipping tier seeding")
# Commit all changes
db.commit()
print_success("All changes committed")
@@ -470,11 +545,13 @@ def print_summary(db: Session):
user_count = db.query(User).filter(User.role == "admin").count()
setting_count = db.query(AdminSetting).count()
platform_count = db.query(Platform).count()
tier_count = db.query(SubscriptionTier).filter(SubscriptionTier.is_active.is_(True)).count()
print("\n📊 Database Status:")
print(f" Admin users: {user_count}")
print(f" Platforms: {platform_count}")
print(f" Admin settings: {setting_count}")
print(f" Sub. tiers: {tier_count}")
print("\n" + "" * 70)
print("🔐 ADMIN CREDENTIALS")

View File

@@ -9,9 +9,11 @@ Usage:
python scripts/show_urls.py # Show all URLs
python scripts/show_urls.py --dev # Development URLs only
python scripts/show_urls.py --prod # Production URLs only
python scripts/show_urls.py --check # Check dev URLs with curl
"""
import argparse
import subprocess
import sys
from sqlalchemy import text
@@ -60,11 +62,107 @@ def get_store_domains(db):
).fetchall()
def get_store_platform_map(db):
"""
Get store-to-platform mapping.
Returns dict: store_id -> list of platform codes.
Uses store_platforms junction table.
"""
rows = db.execute(
text(
"SELECT sp.store_id, p.code AS platform_code "
"FROM store_platforms sp "
"JOIN platforms p ON p.id = sp.platform_id "
"WHERE sp.is_active = true "
"ORDER BY sp.store_id, sp.is_primary DESC"
)
).fetchall()
mapping = {}
for r in rows:
mapping.setdefault(r.store_id, []).append(r.platform_code)
return mapping
def status_badge(is_active):
return "active" if is_active else "INACTIVE"
def print_dev_urls(platforms, stores, store_domains):
def _store_dev_dashboard_url(store_code, platform_code=None):
"""Build store dashboard URL for dev mode."""
if platform_code and platform_code != "main":
return f"{DEV_BASE}/platforms/{platform_code}/store/{store_code}/"
return f"{DEV_BASE}/store/{store_code}/"
def _store_dev_login_url(store_code, platform_code=None):
"""Build store login URL for dev mode."""
if platform_code and platform_code != "main":
return f"{DEV_BASE}/platforms/{platform_code}/store/{store_code}/login"
return f"{DEV_BASE}/store/{store_code}/login"
def collect_dev_urls(platforms, stores, store_domains, store_platform_map):
"""
Collect all dev URLs with labels and expected status codes.
Returns list of (label, url, expected_codes) tuples.
"""
urls = []
# Admin
urls.append(("Admin Login", f"{DEV_BASE}/admin/login", [200]))
urls.append(("Admin Dashboard", f"{DEV_BASE}/admin/", [200, 302]))
urls.append(("API Docs", f"{DEV_BASE}/docs", [200]))
urls.append(("Health", f"{DEV_BASE}/health", [200]))
# Platforms
for p in platforms:
if not p.is_active:
continue
if p.code == "main":
urls.append((f"Platform: {p.name}", f"{DEV_BASE}/", [200]))
else:
urls.append((f"Platform: {p.name}", f"{DEV_BASE}/platforms/{p.code}/", [200]))
# Store dashboards (redirect to login when unauthenticated)
for v in stores:
if not v.is_active:
continue
platform_codes = store_platform_map.get(v.id, [])
if platform_codes:
for pc in platform_codes:
url = _store_dev_login_url(v.store_code, pc)
urls.append((f"Store Login: {v.name} ({pc})", url, [200]))
else:
url = _store_dev_login_url(v.store_code)
urls.append((f"Store Login: {v.name}", url, [200]))
# Storefronts
for v in stores:
if not v.is_active:
continue
urls.append((
f"Storefront: {v.name}",
f"{DEV_BASE}/stores/{v.store_code}/storefront/",
[200, 302],
))
# Store info API (public, no auth needed)
for v in stores:
if not v.is_active:
continue
urls.append((
f"Store API: {v.name}",
f"{DEV_BASE}/api/v1/store/info/{v.store_code}",
[200],
))
return urls
def print_dev_urls(platforms, stores, store_domains, store_platform_map):
"""Print all development URLs."""
print()
print("DEVELOPMENT URLS")
@@ -84,24 +182,16 @@ def print_dev_urls(platforms, stores, store_domains):
print(" PLATFORMS")
for p in platforms:
tag = f" [{status_badge(p.is_active)}]" if not p.is_active else ""
prefix = p.path_prefix or ""
if p.code == "main":
print(f" {p.name}{tag}")
print(f" Home: {DEV_BASE}/")
else:
print(f" {p.name} ({p.code}){tag}")
if prefix:
print(f" Home: {DEV_BASE}/platforms/{p.code}/")
else:
print(f" Home: {DEV_BASE}/platforms/{p.code}/")
print(f" Home: {DEV_BASE}/platforms/{p.code}/")
# Stores
print()
print(" STORE DASHBOARDS")
domains_by_store = {}
for vd in store_domains:
domains_by_store.setdefault(vd.store_id, []).append(vd)
current_merchant = None
for v in stores:
if v.merchant_name != current_merchant:
@@ -110,9 +200,19 @@ def print_dev_urls(platforms, stores, store_domains):
tag = f" [{status_badge(v.is_active)}]" if not v.is_active else ""
code = v.store_code
# Get platform(s) for this store
platform_codes = store_platform_map.get(v.id, [])
print(f" {v.name} ({code}){tag}")
print(f" Dashboard: {DEV_BASE}/store/{code}/")
print(f" API: {DEV_BASE}/api/v1/store/{code}/")
if platform_codes:
for pc in platform_codes:
print(f" Login: {_store_dev_login_url(code, pc)}")
print(f" Dashboard: {_store_dev_dashboard_url(code, pc)}")
else:
print(f" Login: {_store_dev_login_url(code)}")
print(f" Dashboard: {_store_dev_dashboard_url(code)}")
print(f" (!) No platform assigned - use /platforms/{{code}}/store/{code}/ for platform context")
# Storefronts
print()
@@ -208,20 +308,81 @@ def print_prod_urls(platforms, stores, store_domains):
print(f" Custom: https://{vd.domain}/{suffix}")
def check_dev_urls(platforms, stores, store_domains, store_platform_map):
"""Curl all dev URLs and report reachability."""
urls = collect_dev_urls(platforms, stores, store_domains, store_platform_map)
print()
print("URL HEALTH CHECK")
print(f"Base: {DEV_BASE}")
print(SEPARATOR)
print()
passed = 0
failed = 0
errors = []
for label, url, expected_codes in urls:
try:
result = subprocess.run(
["curl", "-s", "-o", "/dev/null", "-w", "%{http_code}", "-L", "--max-time", "5", url],
capture_output=True, text=True, timeout=10,
)
status = int(result.stdout.strip())
if status in expected_codes:
print(f" PASS {status} {label}")
print(f" {url}")
passed += 1
else:
expected_str = "/".join(str(c) for c in expected_codes)
print(f" FAIL {status} {label} (expected {expected_str})")
print(f" {url}")
failed += 1
errors.append((label, url, status, expected_codes))
except (subprocess.TimeoutExpired, FileNotFoundError, ValueError) as e:
print(f" ERR --- {label} ({e})")
print(f" {url}")
failed += 1
errors.append((label, url, 0, expected_codes))
# Summary
print()
print(SEPARATOR)
total = passed + failed
print(f" Results: {passed}/{total} passed", end="")
if failed:
print(f", {failed} failed")
else:
print()
if errors:
print()
print(" Failed URLs:")
for label, url, status, expected in errors:
expected_str = "/".join(str(c) for c in expected)
print(f" [{status or 'ERR'}] {url} (expected {expected_str})")
print()
return failed == 0
def main():
parser = argparse.ArgumentParser(description="Show all platform URLs")
parser.add_argument("--dev", action="store_true", help="Development URLs only")
parser.add_argument("--prod", action="store_true", help="Production URLs only")
parser.add_argument("--check", action="store_true", help="Check dev URLs with curl")
args = parser.parse_args()
show_dev = args.dev or (not args.dev and not args.prod)
show_prod = args.prod or (not args.dev and not args.prod)
show_dev = args.dev or (not args.dev and not args.prod and not args.check)
show_prod = args.prod or (not args.dev and not args.prod and not args.check)
db = SessionLocal()
try:
platforms = get_platforms(db)
stores = get_stores(db)
store_domains = get_store_domains(db)
store_platform_map = get_store_platform_map(db)
except Exception as e:
print(f"Error querying database: {e}", file=sys.stderr)
sys.exit(1)
@@ -234,8 +395,12 @@ def main():
print(f" {len(platforms)} platform(s), {len(stores)} store(s), {len(store_domains)} custom domain(s)")
print("=" * 72)
if args.check:
success = check_dev_urls(platforms, stores, store_domains, store_platform_map)
sys.exit(0 if success else 1)
if show_dev:
print_dev_urls(platforms, stores, store_domains)
print_dev_urls(platforms, stores, store_domains, store_platform_map)
if show_prod:
print_prod_urls(platforms, stores, store_domains)