chore: PostgreSQL migration compatibility and infrastructure improvements

Database & Migrations:
- Update all Alembic migrations for PostgreSQL compatibility
- Remove SQLite-specific syntax (AUTOINCREMENT, etc.)
- Add database utility helpers for PostgreSQL operations
- Fix services to use PostgreSQL-compatible queries

Documentation:
- Add comprehensive Docker deployment guide
- Add production deployment documentation
- Add infrastructure architecture documentation
- Update database setup guide for PostgreSQL-only
- Expand troubleshooting guide

Architecture & Validation:
- Add migration.yaml rules for SQL compatibility checking
- Enhance validate_architecture.py with migration validation
- Update architecture rules to validate Alembic migrations

Development:
- Fix duplicate install-all target in Makefile
- Add Celery/Redis validation to install.py script
- Add docker-compose.test.yml for CI testing
- Add squash_migrations.py utility script
- Update tests for PostgreSQL compatibility
- Improve test fixtures in conftest.py

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-11 17:52:28 +01:00
parent 2792414395
commit 3614d448e4
45 changed files with 3179 additions and 507 deletions

View File

@@ -9,7 +9,7 @@ from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL)
# revision identifiers, used by Alembic.
revision: str = '204273a59d73'
@@ -34,8 +34,8 @@ def upgrade() -> None:
sa.Column('orders_skipped', sa.Integer(), nullable=True),
sa.Column('products_matched', sa.Integer(), nullable=True),
sa.Column('products_not_found', sa.Integer(), nullable=True),
sa.Column('confirmed_stats', sqlite.JSON(), nullable=True),
sa.Column('declined_stats', sqlite.JSON(), nullable=True),
sa.Column('confirmed_stats', sa.JSON(), nullable=True),
sa.Column('declined_stats', sa.JSON(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),

View File

@@ -10,7 +10,7 @@ from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# Removed: from sqlalchemy.dialects import sqlite (using sa.JSON for PostgreSQL)
# revision identifiers, used by Alembic.
revision: str = '2953ed10d22c'
@@ -36,7 +36,7 @@ def upgrade() -> None:
sa.Column('products_limit', sa.Integer(), nullable=True),
sa.Column('team_members', sa.Integer(), nullable=True),
sa.Column('order_history_months', sa.Integer(), nullable=True),
sa.Column('features', sqlite.JSON(), nullable=True),
sa.Column('features', sa.JSON(), nullable=True),
sa.Column('stripe_product_id', sa.String(length=100), nullable=True),
sa.Column('stripe_price_monthly_id', sa.String(length=100), nullable=True),
sa.Column('stripe_price_annual_id', sa.String(length=100), nullable=True),
@@ -91,7 +91,7 @@ def upgrade() -> None:
sa.Column('invoice_pdf_url', sa.String(length=500), nullable=True),
sa.Column('hosted_invoice_url', sa.String(length=500), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('line_items', sqlite.JSON(), nullable=True),
sa.Column('line_items', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
@@ -182,7 +182,7 @@ def upgrade() -> None:
sa.column('products_limit', sa.Integer),
sa.column('team_members', sa.Integer),
sa.column('order_history_months', sa.Integer),
sa.column('features', sqlite.JSON),
sa.column('features', sa.JSON),
sa.column('display_order', sa.Integer),
sa.column('is_active', sa.Boolean),
sa.column('is_public', sa.Boolean),

View File

@@ -28,7 +28,7 @@ def upgrade() -> None:
sa.Column(
"timestamp",
sa.DateTime(timezone=True),
server_default=sa.text("(datetime('now'))"),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.Column("total_files", sa.Integer(), nullable=True),
@@ -64,13 +64,13 @@ def upgrade() -> None:
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("(datetime('now'))"),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("(datetime('now'))"),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
@@ -107,7 +107,7 @@ def upgrade() -> None:
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("(datetime('now'))"),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.ForeignKeyConstraint(
@@ -170,7 +170,7 @@ def upgrade() -> None:
sa.Column(
"assigned_at",
sa.DateTime(timezone=True),
server_default=sa.text("(datetime('now'))"),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.Column("assigned_by", sa.Integer(), nullable=True),
@@ -215,7 +215,7 @@ def upgrade() -> None:
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("(datetime('now'))"),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.ForeignKeyConstraint(

View File

@@ -29,12 +29,13 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Rename product_id to vendor_sku for clarity
op.alter_column(
"products",
"product_id",
new_column_name="vendor_sku",
)
# Use batch mode for SQLite compatibility
with op.batch_alter_table("products", schema=None) as batch_op:
# Rename product_id to vendor_sku for clarity
batch_op.alter_column(
"product_id",
new_column_name="vendor_sku",
)
# Add new override fields
op.add_column(
@@ -118,9 +119,10 @@ def downgrade() -> None:
op.drop_column("products", "primary_image_url")
op.drop_column("products", "brand")
# Rename vendor_sku back to product_id
op.alter_column(
"products",
"vendor_sku",
new_column_name="product_id",
)
# Use batch mode for SQLite compatibility
with op.batch_alter_table("products", schema=None) as batch_op:
# Rename vendor_sku back to product_id
batch_op.alter_column(
"vendor_sku",
new_column_name="product_id",
)

View File

@@ -29,11 +29,11 @@ def upgrade() -> None:
sa.Column('show_in_legal', sa.Boolean(), nullable=True, default=False)
)
# Set default value for existing rows
op.execute("UPDATE content_pages SET show_in_legal = 0 WHERE show_in_legal IS NULL")
# Set default value for existing rows (PostgreSQL uses true/false for boolean)
op.execute("UPDATE content_pages SET show_in_legal = false WHERE show_in_legal IS NULL")
# Set privacy and terms pages to show in legal by default
op.execute("UPDATE content_pages SET show_in_legal = 1 WHERE slug IN ('privacy', 'terms')")
op.execute("UPDATE content_pages SET show_in_legal = true WHERE slug IN ('privacy', 'terms')")
def downgrade() -> None:

View File

@@ -9,6 +9,7 @@ from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
@@ -19,59 +20,60 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Create email_templates table
op.create_table('email_templates',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=100), nullable=False),
sa.Column('language', sa.String(length=5), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(length=50), nullable=False),
sa.Column('subject', sa.String(length=500), nullable=False),
sa.Column('body_html', sa.Text(), nullable=False),
sa.Column('body_text', sa.Text(), nullable=True),
sa.Column('variables', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sqlite_autoincrement=True
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=100), nullable=False),
sa.Column('language', sa.String(length=5), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category', sa.String(length=50), nullable=False),
sa.Column('subject', sa.String(length=500), nullable=False),
sa.Column('body_html', sa.Text(), nullable=False),
sa.Column('body_text', sa.Text(), nullable=True),
sa.Column('variables', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_email_templates_category'), 'email_templates', ['category'], unique=False)
op.create_index(op.f('ix_email_templates_code'), 'email_templates', ['code'], unique=False)
op.create_index(op.f('ix_email_templates_id'), 'email_templates', ['id'], unique=False)
# Create email_logs table
op.create_table('email_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('template_code', sa.String(length=100), nullable=True),
sa.Column('template_id', sa.Integer(), nullable=True),
sa.Column('recipient_email', sa.String(length=255), nullable=False),
sa.Column('recipient_name', sa.String(length=255), nullable=True),
sa.Column('subject', sa.String(length=500), nullable=False),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('body_text', sa.Text(), nullable=True),
sa.Column('from_email', sa.String(length=255), nullable=False),
sa.Column('from_name', sa.String(length=255), nullable=True),
sa.Column('reply_to', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('sent_at', sa.DateTime(), nullable=True),
sa.Column('delivered_at', sa.DateTime(), nullable=True),
sa.Column('opened_at', sa.DateTime(), nullable=True),
sa.Column('clicked_at', sa.DateTime(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('provider', sa.String(length=50), nullable=True),
sa.Column('provider_message_id', sa.String(length=255), nullable=True),
sa.Column('vendor_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('related_type', sa.String(length=50), nullable=True),
sa.Column('related_id', sa.Integer(), nullable=True),
sa.Column('extra_data', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['template_id'], ['email_templates.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
sa.PrimaryKeyConstraint('id')
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('template_code', sa.String(length=100), nullable=True),
sa.Column('template_id', sa.Integer(), nullable=True),
sa.Column('recipient_email', sa.String(length=255), nullable=False),
sa.Column('recipient_name', sa.String(length=255), nullable=True),
sa.Column('subject', sa.String(length=500), nullable=False),
sa.Column('body_html', sa.Text(), nullable=True),
sa.Column('body_text', sa.Text(), nullable=True),
sa.Column('from_email', sa.String(length=255), nullable=False),
sa.Column('from_name', sa.String(length=255), nullable=True),
sa.Column('reply_to', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('sent_at', sa.DateTime(), nullable=True),
sa.Column('delivered_at', sa.DateTime(), nullable=True),
sa.Column('opened_at', sa.DateTime(), nullable=True),
sa.Column('clicked_at', sa.DateTime(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('provider', sa.String(length=50), nullable=True),
sa.Column('provider_message_id', sa.String(length=255), nullable=True),
sa.Column('vendor_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('related_type', sa.String(length=50), nullable=True),
sa.Column('related_id', sa.Integer(), nullable=True),
sa.Column('extra_data', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['template_id'], ['email_templates.id']),
sa.ForeignKeyConstraint(['user_id'], ['users.id']),
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id']),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_email_logs_id'), 'email_logs', ['id'], unique=False)
op.create_index(op.f('ix_email_logs_provider_message_id'), 'email_logs', ['provider_message_id'], unique=False)
@@ -80,181 +82,242 @@ def upgrade() -> None:
op.create_index(op.f('ix_email_logs_template_code'), 'email_logs', ['template_code'], unique=False)
op.create_index(op.f('ix_email_logs_user_id'), 'email_logs', ['user_id'], unique=False)
op.create_index(op.f('ix_email_logs_vendor_id'), 'email_logs', ['vendor_id'], unique=False)
op.alter_column('application_logs', 'created_at',
existing_type=sa.DATETIME(),
nullable=False)
op.alter_column('application_logs', 'updated_at',
existing_type=sa.DATETIME(),
nullable=False)
op.drop_index(op.f('ix_capacity_snapshots_date'), table_name='capacity_snapshots')
op.create_index('ix_capacity_snapshots_date', 'capacity_snapshots', ['snapshot_date'], unique=False)
op.create_index(op.f('ix_capacity_snapshots_snapshot_date'), 'capacity_snapshots', ['snapshot_date'], unique=True)
op.alter_column('cart_items', 'created_at',
existing_type=sa.DATETIME(),
nullable=False)
op.alter_column('cart_items', 'updated_at',
existing_type=sa.DATETIME(),
nullable=False)
op.drop_index(op.f('ix_customers_addresses_id'), table_name='customer_addresses')
op.create_index(op.f('ix_customer_addresses_id'), 'customer_addresses', ['id'], unique=False)
op.alter_column('inventory', 'warehouse',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('inventory', 'bin_location',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('inventory', 'location',
existing_type=sa.VARCHAR(),
nullable=True)
op.drop_index(op.f('idx_inventory_product_location'), table_name='inventory')
op.drop_constraint(op.f('uq_inventory_product_location'), 'inventory', type_='unique')
op.create_unique_constraint('uq_inventory_product_warehouse_bin', 'inventory', ['product_id', 'warehouse', 'bin_location'])
op.create_index(op.f('ix_marketplace_import_errors_import_job_id'), 'marketplace_import_errors', ['import_job_id'], unique=False)
op.create_index(op.f('ix_marketplace_product_translations_id'), 'marketplace_product_translations', ['id'], unique=False)
op.alter_column('marketplace_products', 'is_digital',
existing_type=sa.BOOLEAN(),
nullable=True,
existing_server_default=sa.text('0'))
op.alter_column('marketplace_products', 'is_active',
existing_type=sa.BOOLEAN(),
nullable=True,
existing_server_default=sa.text('1'))
op.drop_index(op.f('idx_mp_is_active'), table_name='marketplace_products')
op.drop_index(op.f('idx_mp_platform'), table_name='marketplace_products')
op.drop_index(op.f('idx_mp_sku'), table_name='marketplace_products')
op.create_index(op.f('ix_marketplace_products_is_active'), 'marketplace_products', ['is_active'], unique=False)
op.create_index(op.f('ix_marketplace_products_is_digital'), 'marketplace_products', ['is_digital'], unique=False)
op.create_index(op.f('ix_marketplace_products_mpn'), 'marketplace_products', ['mpn'], unique=False)
op.create_index(op.f('ix_marketplace_products_platform'), 'marketplace_products', ['platform'], unique=False)
op.create_index(op.f('ix_marketplace_products_sku'), 'marketplace_products', ['sku'], unique=False)
op.drop_index(op.f('uq_order_item_exception'), table_name='order_item_exceptions')
op.create_index(op.f('ix_order_item_exceptions_original_gtin'), 'order_item_exceptions', ['original_gtin'], unique=False)
op.create_unique_constraint(None, 'order_item_exceptions', ['order_item_id'])
op.alter_column('order_items', 'needs_product_match',
existing_type=sa.BOOLEAN(),
nullable=True,
existing_server_default=sa.text("'0'"))
op.drop_index(op.f('ix_order_items_gtin'), table_name='order_items')
op.drop_index(op.f('ix_order_items_product_id'), table_name='order_items')
op.create_index(op.f('ix_product_translations_id'), 'product_translations', ['id'], unique=False)
op.drop_index(op.f('idx_product_active'), table_name='products')
op.drop_index(op.f('idx_product_featured'), table_name='products')
op.drop_index(op.f('idx_product_gtin'), table_name='products')
op.drop_index(op.f('idx_product_vendor_gtin'), table_name='products')
op.drop_constraint(op.f('uq_product'), 'products', type_='unique')
op.create_index('idx_product_vendor_active', 'products', ['vendor_id', 'is_active'], unique=False)
op.create_index('idx_product_vendor_featured', 'products', ['vendor_id', 'is_featured'], unique=False)
op.create_index(op.f('ix_products_gtin'), 'products', ['gtin'], unique=False)
op.create_index(op.f('ix_products_vendor_sku'), 'products', ['vendor_sku'], unique=False)
op.create_unique_constraint('uq_vendor_marketplace_product', 'products', ['vendor_id', 'marketplace_product_id'])
op.drop_index(op.f('ix_vendors_domains_domain'), table_name='vendor_domains')
op.drop_index(op.f('ix_vendors_domains_id'), table_name='vendor_domains')
op.create_index(op.f('ix_vendor_domains_domain'), 'vendor_domains', ['domain'], unique=True)
op.create_index(op.f('ix_vendor_domains_id'), 'vendor_domains', ['id'], unique=False)
op.alter_column('vendor_subscriptions', 'payment_retry_count',
existing_type=sa.INTEGER(),
nullable=False,
existing_server_default=sa.text('0'))
op.create_foreign_key(None, 'vendor_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
op.drop_index(op.f('ix_vendors_themes_id'), table_name='vendor_themes')
op.create_index(op.f('ix_vendor_themes_id'), 'vendor_themes', ['id'], unique=False)
op.drop_index(op.f('ix_vendors_users_id'), table_name='vendor_users')
op.drop_index(op.f('ix_vendors_users_invitation_token'), table_name='vendor_users')
op.create_index(op.f('ix_vendor_users_id'), 'vendor_users', ['id'], unique=False)
op.create_index(op.f('ix_vendor_users_invitation_token'), 'vendor_users', ['invitation_token'], unique=False)
op.alter_column('vendors', 'company_id',
existing_type=sa.INTEGER(),
nullable=False)
# ### end Alembic commands ###
# application_logs - alter columns
op.alter_column('application_logs', 'created_at', existing_type=sa.DATETIME(), nullable=False)
op.alter_column('application_logs', 'updated_at', existing_type=sa.DATETIME(), nullable=False)
# capacity_snapshots indexes (PostgreSQL IF EXISTS/IF NOT EXISTS)
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_date"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_capacity_snapshots_date ON capacity_snapshots (snapshot_date)"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_snapshot_date ON capacity_snapshots (snapshot_date)"))
# cart_items - alter columns
op.alter_column('cart_items', 'created_at', existing_type=sa.DATETIME(), nullable=False)
op.alter_column('cart_items', 'updated_at', existing_type=sa.DATETIME(), nullable=False)
# customer_addresses index rename
op.execute(text("DROP INDEX IF EXISTS ix_customers_addresses_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_customer_addresses_id ON customer_addresses (id)"))
# inventory - alter columns and constraints
op.alter_column('inventory', 'warehouse', existing_type=sa.VARCHAR(), nullable=False)
op.alter_column('inventory', 'bin_location', existing_type=sa.VARCHAR(), nullable=False)
op.alter_column('inventory', 'location', existing_type=sa.VARCHAR(), nullable=True)
op.execute(text("DROP INDEX IF EXISTS idx_inventory_product_location"))
op.execute(text("ALTER TABLE inventory DROP CONSTRAINT IF EXISTS uq_inventory_product_location"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_inventory_product_warehouse_bin') THEN
ALTER TABLE inventory ADD CONSTRAINT uq_inventory_product_warehouse_bin UNIQUE (product_id, warehouse, bin_location);
END IF;
END $$;
"""))
# marketplace_import_errors and translations indexes
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_import_errors_import_job_id ON marketplace_import_errors (import_job_id)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_product_translations_id ON marketplace_product_translations (id)"))
# marketplace_products - alter columns
op.alter_column('marketplace_products', 'is_digital', existing_type=sa.BOOLEAN(), nullable=True)
op.alter_column('marketplace_products', 'is_active', existing_type=sa.BOOLEAN(), nullable=True)
# marketplace_products indexes
op.execute(text("DROP INDEX IF EXISTS idx_mp_is_active"))
op.execute(text("DROP INDEX IF EXISTS idx_mp_platform"))
op.execute(text("DROP INDEX IF EXISTS idx_mp_sku"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_is_active ON marketplace_products (is_active)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_is_digital ON marketplace_products (is_digital)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_mpn ON marketplace_products (mpn)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_platform ON marketplace_products (platform)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_marketplace_products_sku ON marketplace_products (sku)"))
# order_item_exceptions - constraints and indexes
op.execute(text("DROP INDEX IF EXISTS uq_order_item_exception"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_item_exceptions_original_gtin ON order_item_exceptions (original_gtin)"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_order_item_exceptions_order_item_id') THEN
ALTER TABLE order_item_exceptions ADD CONSTRAINT uq_order_item_exceptions_order_item_id UNIQUE (order_item_id);
END IF;
END $$;
"""))
# order_items - alter column
op.alter_column('order_items', 'needs_product_match', existing_type=sa.BOOLEAN(), nullable=True)
# order_items indexes
op.execute(text("DROP INDEX IF EXISTS ix_order_items_gtin"))
op.execute(text("DROP INDEX IF EXISTS ix_order_items_product_id"))
# product_translations index
op.execute(text("CREATE INDEX IF NOT EXISTS ix_product_translations_id ON product_translations (id)"))
# products indexes
op.execute(text("DROP INDEX IF EXISTS idx_product_active"))
op.execute(text("DROP INDEX IF EXISTS idx_product_featured"))
op.execute(text("DROP INDEX IF EXISTS idx_product_gtin"))
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_gtin"))
# products constraint
op.execute(text("ALTER TABLE products DROP CONSTRAINT IF EXISTS uq_product"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_vendor_marketplace_product') THEN
ALTER TABLE products ADD CONSTRAINT uq_vendor_marketplace_product UNIQUE (vendor_id, marketplace_product_id);
END IF;
END $$;
"""))
# products new indexes
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_active ON products (vendor_id, is_active)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_featured ON products (vendor_id, is_featured)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_products_gtin ON products (gtin)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_products_vendor_sku ON products (vendor_sku)"))
# vendor_domains indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendors_domains_domain"))
op.execute(text("DROP INDEX IF EXISTS ix_vendors_domains_id"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_vendor_domains_domain ON vendor_domains (domain)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_domains_id ON vendor_domains (id)"))
# vendor_subscriptions - alter column and FK
op.alter_column('vendor_subscriptions', 'payment_retry_count', existing_type=sa.INTEGER(), nullable=False)
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_vendor_subscriptions_tier_id') THEN
ALTER TABLE vendor_subscriptions ADD CONSTRAINT fk_vendor_subscriptions_tier_id
FOREIGN KEY (tier_id) REFERENCES subscription_tiers(id);
END IF;
END $$;
"""))
# vendor_themes indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendors_themes_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_themes_id ON vendor_themes (id)"))
# vendor_users indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendors_users_id"))
op.execute(text("DROP INDEX IF EXISTS ix_vendors_users_invitation_token"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_users_id ON vendor_users (id)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendor_users_invitation_token ON vendor_users (invitation_token)"))
# vendors - alter column
op.alter_column('vendors', 'company_id', existing_type=sa.INTEGER(), nullable=False)
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('vendors', 'company_id',
existing_type=sa.INTEGER(),
nullable=True)
op.drop_index(op.f('ix_vendor_users_invitation_token'), table_name='vendor_users')
op.drop_index(op.f('ix_vendor_users_id'), table_name='vendor_users')
op.create_index(op.f('ix_vendors_users_invitation_token'), 'vendor_users', ['invitation_token'], unique=False)
op.create_index(op.f('ix_vendors_users_id'), 'vendor_users', ['id'], unique=False)
op.drop_index(op.f('ix_vendor_themes_id'), table_name='vendor_themes')
op.create_index(op.f('ix_vendors_themes_id'), 'vendor_themes', ['id'], unique=False)
op.drop_constraint(None, 'vendor_subscriptions', type_='foreignkey')
op.alter_column('vendor_subscriptions', 'payment_retry_count',
existing_type=sa.INTEGER(),
nullable=True,
existing_server_default=sa.text('0'))
op.drop_index(op.f('ix_vendor_domains_id'), table_name='vendor_domains')
op.drop_index(op.f('ix_vendor_domains_domain'), table_name='vendor_domains')
op.create_index(op.f('ix_vendors_domains_id'), 'vendor_domains', ['id'], unique=False)
op.create_index(op.f('ix_vendors_domains_domain'), 'vendor_domains', ['domain'], unique=1)
op.drop_constraint('uq_vendor_marketplace_product', 'products', type_='unique')
op.drop_index(op.f('ix_products_vendor_sku'), table_name='products')
op.drop_index(op.f('ix_products_gtin'), table_name='products')
op.drop_index('idx_product_vendor_featured', table_name='products')
op.drop_index('idx_product_vendor_active', table_name='products')
op.create_unique_constraint(op.f('uq_product'), 'products', ['vendor_id', 'marketplace_product_id'])
op.create_index(op.f('idx_product_vendor_gtin'), 'products', ['vendor_id', 'gtin'], unique=False)
op.create_index(op.f('idx_product_gtin'), 'products', ['gtin'], unique=False)
op.create_index(op.f('idx_product_featured'), 'products', ['vendor_id', 'is_featured'], unique=False)
op.create_index(op.f('idx_product_active'), 'products', ['vendor_id', 'is_active'], unique=False)
op.drop_index(op.f('ix_product_translations_id'), table_name='product_translations')
op.create_index(op.f('ix_order_items_product_id'), 'order_items', ['product_id'], unique=False)
op.create_index(op.f('ix_order_items_gtin'), 'order_items', ['gtin'], unique=False)
op.alter_column('order_items', 'needs_product_match',
existing_type=sa.BOOLEAN(),
nullable=False,
existing_server_default=sa.text("'0'"))
op.drop_constraint(None, 'order_item_exceptions', type_='unique')
op.drop_index(op.f('ix_order_item_exceptions_original_gtin'), table_name='order_item_exceptions')
op.create_index(op.f('uq_order_item_exception'), 'order_item_exceptions', ['order_item_id'], unique=1)
op.drop_index(op.f('ix_marketplace_products_sku'), table_name='marketplace_products')
op.drop_index(op.f('ix_marketplace_products_platform'), table_name='marketplace_products')
op.drop_index(op.f('ix_marketplace_products_mpn'), table_name='marketplace_products')
op.drop_index(op.f('ix_marketplace_products_is_digital'), table_name='marketplace_products')
op.drop_index(op.f('ix_marketplace_products_is_active'), table_name='marketplace_products')
op.create_index(op.f('idx_mp_sku'), 'marketplace_products', ['sku'], unique=False)
op.create_index(op.f('idx_mp_platform'), 'marketplace_products', ['platform'], unique=False)
op.create_index(op.f('idx_mp_is_active'), 'marketplace_products', ['is_active'], unique=False)
op.alter_column('marketplace_products', 'is_active',
existing_type=sa.BOOLEAN(),
nullable=False,
existing_server_default=sa.text('1'))
op.alter_column('marketplace_products', 'is_digital',
existing_type=sa.BOOLEAN(),
nullable=False,
existing_server_default=sa.text('0'))
op.drop_index(op.f('ix_marketplace_product_translations_id'), table_name='marketplace_product_translations')
op.drop_index(op.f('ix_marketplace_import_errors_import_job_id'), table_name='marketplace_import_errors')
op.drop_constraint('uq_inventory_product_warehouse_bin', 'inventory', type_='unique')
op.create_unique_constraint(op.f('uq_inventory_product_location'), 'inventory', ['product_id', 'location'])
op.create_index(op.f('idx_inventory_product_location'), 'inventory', ['product_id', 'location'], unique=False)
op.alter_column('inventory', 'location',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('inventory', 'bin_location',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('inventory', 'warehouse',
existing_type=sa.VARCHAR(),
nullable=True)
op.drop_index(op.f('ix_customer_addresses_id'), table_name='customer_addresses')
op.create_index(op.f('ix_customers_addresses_id'), 'customer_addresses', ['id'], unique=False)
op.alter_column('cart_items', 'updated_at',
existing_type=sa.DATETIME(),
nullable=True)
op.alter_column('cart_items', 'created_at',
existing_type=sa.DATETIME(),
nullable=True)
op.drop_index(op.f('ix_capacity_snapshots_snapshot_date'), table_name='capacity_snapshots')
op.drop_index('ix_capacity_snapshots_date', table_name='capacity_snapshots')
op.create_index(op.f('ix_capacity_snapshots_date'), 'capacity_snapshots', ['snapshot_date'], unique=1)
op.alter_column('application_logs', 'updated_at',
existing_type=sa.DATETIME(),
nullable=True)
op.alter_column('application_logs', 'created_at',
existing_type=sa.DATETIME(),
nullable=True)
# vendors
op.alter_column('vendors', 'company_id', existing_type=sa.INTEGER(), nullable=True)
# vendor_users indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendor_users_invitation_token"))
op.execute(text("DROP INDEX IF EXISTS ix_vendor_users_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_users_invitation_token ON vendor_users (invitation_token)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_users_id ON vendor_users (id)"))
# vendor_themes indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendor_themes_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_themes_id ON vendor_themes (id)"))
# vendor_subscriptions
op.execute(text("ALTER TABLE vendor_subscriptions DROP CONSTRAINT IF EXISTS fk_vendor_subscriptions_tier_id"))
op.alter_column('vendor_subscriptions', 'payment_retry_count', existing_type=sa.INTEGER(), nullable=True)
# vendor_domains indexes
op.execute(text("DROP INDEX IF EXISTS ix_vendor_domains_id"))
op.execute(text("DROP INDEX IF EXISTS ix_vendor_domains_domain"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_vendors_domains_id ON vendor_domains (id)"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_vendors_domains_domain ON vendor_domains (domain)"))
# products constraint and indexes
op.execute(text("ALTER TABLE products DROP CONSTRAINT IF EXISTS uq_vendor_marketplace_product"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_product') THEN
ALTER TABLE products ADD CONSTRAINT uq_product UNIQUE (vendor_id, marketplace_product_id);
END IF;
END $$;
"""))
op.execute(text("DROP INDEX IF EXISTS ix_products_vendor_sku"))
op.execute(text("DROP INDEX IF EXISTS ix_products_gtin"))
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_featured"))
op.execute(text("DROP INDEX IF EXISTS idx_product_vendor_active"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_vendor_gtin ON products (vendor_id, gtin)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_gtin ON products (gtin)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_featured ON products (vendor_id, is_featured)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_product_active ON products (vendor_id, is_active)"))
# product_translations
op.execute(text("DROP INDEX IF EXISTS ix_product_translations_id"))
# order_items
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_product_id ON order_items (product_id)"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_order_items_gtin ON order_items (gtin)"))
op.alter_column('order_items', 'needs_product_match', existing_type=sa.BOOLEAN(), nullable=False)
# order_item_exceptions
op.execute(text("ALTER TABLE order_item_exceptions DROP CONSTRAINT IF EXISTS uq_order_item_exceptions_order_item_id"))
op.execute(text("DROP INDEX IF EXISTS ix_order_item_exceptions_original_gtin"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS uq_order_item_exception ON order_item_exceptions (order_item_id)"))
# marketplace_products indexes
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_sku"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_platform"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_mpn"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_is_digital"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_products_is_active"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_sku ON marketplace_products (sku)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_platform ON marketplace_products (platform)"))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_mp_is_active ON marketplace_products (is_active)"))
# marketplace_products columns
op.alter_column('marketplace_products', 'is_active', existing_type=sa.BOOLEAN(), nullable=False)
op.alter_column('marketplace_products', 'is_digital', existing_type=sa.BOOLEAN(), nullable=False)
# marketplace imports
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_product_translations_id"))
op.execute(text("DROP INDEX IF EXISTS ix_marketplace_import_errors_import_job_id"))
# inventory
op.execute(text("ALTER TABLE inventory DROP CONSTRAINT IF EXISTS uq_inventory_product_warehouse_bin"))
op.execute(text("""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'uq_inventory_product_location') THEN
ALTER TABLE inventory ADD CONSTRAINT uq_inventory_product_location UNIQUE (product_id, location);
END IF;
END $$;
"""))
op.execute(text("CREATE INDEX IF NOT EXISTS idx_inventory_product_location ON inventory (product_id, location)"))
op.alter_column('inventory', 'location', existing_type=sa.VARCHAR(), nullable=False)
op.alter_column('inventory', 'bin_location', existing_type=sa.VARCHAR(), nullable=True)
op.alter_column('inventory', 'warehouse', existing_type=sa.VARCHAR(), nullable=True)
# customer_addresses
op.execute(text("DROP INDEX IF EXISTS ix_customer_addresses_id"))
op.execute(text("CREATE INDEX IF NOT EXISTS ix_customers_addresses_id ON customer_addresses (id)"))
# cart_items
op.alter_column('cart_items', 'updated_at', existing_type=sa.DATETIME(), nullable=True)
op.alter_column('cart_items', 'created_at', existing_type=sa.DATETIME(), nullable=True)
# capacity_snapshots
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_snapshot_date"))
op.execute(text("DROP INDEX IF EXISTS ix_capacity_snapshots_date"))
op.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_capacity_snapshots_date ON capacity_snapshots (snapshot_date)"))
# application_logs
op.alter_column('application_logs', 'updated_at', existing_type=sa.DATETIME(), nullable=True)
op.alter_column('application_logs', 'created_at', existing_type=sa.DATETIME(), nullable=True)
# Drop email tables
op.drop_index(op.f('ix_email_logs_vendor_id'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_user_id'), table_name='email_logs')
op.drop_index(op.f('ix_email_logs_template_code'), table_name='email_logs')
@@ -267,4 +330,3 @@ def downgrade() -> None:
op.drop_index(op.f('ix_email_templates_code'), table_name='email_templates')
op.drop_index(op.f('ix_email_templates_category'), table_name='email_templates')
op.drop_table('email_templates')
# ### end Alembic commands ###

View File

@@ -20,8 +20,6 @@ Google Shopping feed value while using 'product_type' for the new enum.
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
from alembic import op
# revision identifiers, used by Alembic.
@@ -56,7 +54,7 @@ def upgrade() -> None:
"is_digital",
sa.Boolean(),
nullable=False,
server_default=sa.text("0"),
server_default=sa.text("false"),
),
)
@@ -113,7 +111,7 @@ def upgrade() -> None:
"is_active",
sa.Boolean(),
nullable=False,
server_default=sa.text("1"),
server_default=sa.text("true"),
),
)

View File

@@ -19,12 +19,29 @@ branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def get_column_names(conn, table_name: str) -> set:
"""Get column names for a table (PostgreSQL)."""
result = conn.execute(text(
"SELECT column_name FROM information_schema.columns "
"WHERE table_name = :table AND table_schema = 'public'"
), {"table": table_name})
return {row[0] for row in result.fetchall()}
def get_index_names(conn, table_name: str) -> set:
"""Get index names for a table (PostgreSQL)."""
result = conn.execute(text(
"SELECT indexname FROM pg_indexes "
"WHERE tablename = :table AND schemaname = 'public'"
), {"table": table_name})
return {row[0] for row in result.fetchall()}
def upgrade() -> None:
conn = op.get_bind()
# Check if columns already exist (idempotent)
result = conn.execute(text("PRAGMA table_info(inventory)"))
columns = {row[1] for row in result.fetchall()}
columns = get_column_names(conn, "inventory")
if 'warehouse' not in columns:
op.add_column('inventory', sa.Column('warehouse', sa.String(), nullable=False, server_default='strassen'))
@@ -41,8 +58,7 @@ def upgrade() -> None:
"""))
# Create indexes if they don't exist
indexes = conn.execute(text("PRAGMA index_list(inventory)"))
existing_indexes = {row[1] for row in indexes.fetchall()}
existing_indexes = get_index_names(conn, "inventory")
if 'idx_inventory_warehouse_bin' not in existing_indexes:
op.create_index('idx_inventory_warehouse_bin', 'inventory', ['warehouse', 'bin_location'], unique=False)
@@ -56,8 +72,7 @@ def downgrade() -> None:
conn = op.get_bind()
# Check which indexes exist before dropping
indexes = conn.execute(text("PRAGMA index_list(inventory)"))
existing_indexes = {row[1] for row in indexes.fetchall()}
existing_indexes = get_index_names(conn, "inventory")
if 'ix_inventory_warehouse' in existing_indexes:
op.drop_index(op.f('ix_inventory_warehouse'), table_name='inventory')
@@ -67,8 +82,7 @@ def downgrade() -> None:
op.drop_index('idx_inventory_warehouse_bin', table_name='inventory')
# Check if columns exist before dropping
result = conn.execute(text("PRAGMA table_info(inventory)"))
columns = {row[1] for row in result.fetchall()}
columns = get_column_names(conn, "inventory")
if 'bin_location' in columns:
op.drop_column('inventory', 'bin_location')

View File

@@ -62,12 +62,12 @@ def upgrade() -> None:
)
# Update existing records to have proper started_at and completed_at
# This is done via raw SQL for efficiency
# This is done via raw SQL for efficiency (PostgreSQL syntax)
op.execute(
"""
UPDATE architecture_scans
SET started_at = timestamp,
completed_at = datetime(timestamp, '+' || CAST(duration_seconds AS TEXT) || ' seconds')
completed_at = timestamp + (COALESCE(duration_seconds, 0) || ' seconds')::interval
WHERE started_at IS NULL
"""
)

View File

@@ -59,13 +59,11 @@ def upgrade() -> None:
def downgrade() -> None:
# In SQLite batch mode, we must explicitly drop the index before dropping
# the column, otherwise batch mode will try to recreate the index on the
# new table (which won't have the column).
with op.batch_alter_table("vendor_subscriptions", schema=None) as batch_op:
# Drop FK constraint
batch_op.drop_constraint(
"fk_vendor_subscriptions_tier_id",
type_="foreignkey",
)
# Drop index
# First drop the index on tier_id
batch_op.drop_index("ix_vendor_subscriptions_tier_id")
# Drop column
# Then drop the column (FK is automatically removed with the column)
batch_op.drop_column("tier_id")

View File

@@ -26,26 +26,26 @@ def upgrade() -> None:
sa.Column('status', sa.String(length=20), nullable=False, server_default='not_started'),
sa.Column('current_step', sa.String(length=30), nullable=False, server_default='company_profile'),
# Step 1: Company Profile
sa.Column('step_company_profile_completed', sa.Boolean(), nullable=False, server_default=sa.text('0')),
sa.Column('step_company_profile_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_company_profile_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_company_profile_data', sa.JSON(), nullable=True),
# Step 2: Letzshop API Configuration
sa.Column('step_letzshop_api_completed', sa.Boolean(), nullable=False, server_default=sa.text('0')),
sa.Column('step_letzshop_api_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_letzshop_api_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_letzshop_api_connection_verified', sa.Boolean(), nullable=False, server_default=sa.text('0')),
sa.Column('step_letzshop_api_connection_verified', sa.Boolean(), nullable=False, server_default=sa.text('false')),
# Step 3: Product Import
sa.Column('step_product_import_completed', sa.Boolean(), nullable=False, server_default=sa.text('0')),
sa.Column('step_product_import_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_product_import_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_product_import_csv_url_set', sa.Boolean(), nullable=False, server_default=sa.text('0')),
sa.Column('step_product_import_csv_url_set', sa.Boolean(), nullable=False, server_default=sa.text('false')),
# Step 4: Order Sync
sa.Column('step_order_sync_completed', sa.Boolean(), nullable=False, server_default=sa.text('0')),
sa.Column('step_order_sync_completed', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('step_order_sync_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('step_order_sync_job_id', sa.Integer(), nullable=True),
# Completion tracking
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
# Admin override
sa.Column('skipped_by_admin', sa.Boolean(), nullable=False, server_default=sa.text('0')),
sa.Column('skipped_by_admin', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('skipped_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('skipped_reason', sa.Text(), nullable=True),
sa.Column('skipped_by_user_id', sa.Integer(), nullable=True),
@@ -56,7 +56,6 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['skipped_by_user_id'], ['users.id']),
sa.PrimaryKeyConstraint('id'),
sqlite_autoincrement=True
)
op.create_index(op.f('ix_vendor_onboarding_id'), 'vendor_onboarding', ['id'], unique=False)
op.create_index(op.f('ix_vendor_onboarding_vendor_id'), 'vendor_onboarding', ['vendor_id'], unique=True)

View File

@@ -255,7 +255,7 @@ def upgrade() -> None:
INSERT INTO features (code, name, description, category, ui_location, ui_icon, ui_route,
minimum_tier_id, is_active, is_visible, display_order, created_at, updated_at)
VALUES (:code, :name, :description, :category, :ui_location, :ui_icon, :ui_route,
:minimum_tier_id, 1, 1, :display_order, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
:minimum_tier_id, true, true, :display_order, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
"""),
{
"code": code,

View File

@@ -31,7 +31,7 @@ def upgrade() -> None:
op.execute("""
UPDATE order_items
SET shipped_quantity = quantity
WHERE inventory_fulfilled = 1
WHERE inventory_fulfilled = true
""")

View File

@@ -13,6 +13,7 @@ making changes.
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
@@ -59,9 +60,12 @@ COUNTRY_ISO_MAP = {
def get_column_names(connection, table_name):
"""Get list of column names for a table."""
result = connection.execute(sa.text(f"PRAGMA table_info({table_name})"))
return [row[1] for row in result]
"""Get list of column names for a table (PostgreSQL)."""
result = connection.execute(text(
"SELECT column_name FROM information_schema.columns "
"WHERE table_name = :table AND table_schema = 'public'"
), {"table": table_name})
return [row[0] for row in result]
def upgrade() -> None:
@@ -78,25 +82,25 @@ def upgrade() -> None:
print(" Columns country_name and country_iso already exist, skipping")
return
# If has old 'country' column, rename it and add country_iso
# If has old 'country' column, rename it (PostgreSQL supports direct rename)
if has_country and not has_country_name:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.alter_column(
"country",
new_column_name="country_name",
)
op.alter_column(
"customer_addresses",
"country",
new_column_name="country_name",
)
# Add country_iso if it doesn't exist
if not has_country_iso:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.add_column(
sa.Column("country_iso", sa.String(5), nullable=True)
)
op.add_column(
"customer_addresses",
sa.Column("country_iso", sa.String(5), nullable=True)
)
# Backfill country_iso from country_name
for country_name, iso_code in COUNTRY_ISO_MAP.items():
connection.execute(
sa.text(
text(
"UPDATE customer_addresses SET country_iso = :iso "
"WHERE country_name = :name"
),
@@ -105,19 +109,19 @@ def upgrade() -> None:
# Set default for any remaining NULL values
connection.execute(
sa.text(
text(
"UPDATE customer_addresses SET country_iso = 'LU' "
"WHERE country_iso IS NULL"
)
)
# Make country_iso NOT NULL using batch operation
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.alter_column(
"country_iso",
existing_type=sa.String(5),
nullable=False,
)
# Make country_iso NOT NULL (PostgreSQL supports direct alter)
op.alter_column(
"customer_addresses",
"country_iso",
existing_type=sa.String(5),
nullable=False,
)
def downgrade() -> None:
@@ -130,12 +134,11 @@ def downgrade() -> None:
# Only downgrade if in the new state
if has_country_name and not has_country:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.alter_column(
"country_name",
new_column_name="country",
)
op.alter_column(
"customer_addresses",
"country_name",
new_column_name="country",
)
if has_country_iso:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.drop_column("country_iso")
op.drop_column("customer_addresses", "country_iso")