chore: PostgreSQL migration compatibility and infrastructure improvements

Database & Migrations:
- Update all Alembic migrations for PostgreSQL compatibility
- Remove SQLite-specific syntax (AUTOINCREMENT, etc.)
- Add database utility helpers for PostgreSQL operations
- Fix services to use PostgreSQL-compatible queries

Documentation:
- Add comprehensive Docker deployment guide
- Add production deployment documentation
- Add infrastructure architecture documentation
- Update database setup guide for PostgreSQL-only
- Expand troubleshooting guide

Architecture & Validation:
- Add migration.yaml rules for SQL compatibility checking
- Enhance validate_architecture.py with migration validation
- Update architecture rules to validate Alembic migrations

Development:
- Fix duplicate install-all target in Makefile
- Add Celery/Redis validation to install.py script
- Add docker-compose.test.yml for CI testing
- Add squash_migrations.py utility script
- Update tests for PostgreSQL compatibility
- Improve test fixtures in conftest.py

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-11 17:52:28 +01:00
parent 2792414395
commit 3614d448e4
45 changed files with 3179 additions and 507 deletions

View File

@@ -13,6 +13,7 @@ making changes.
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
@@ -59,9 +60,12 @@ COUNTRY_ISO_MAP = {
def get_column_names(connection, table_name):
"""Get list of column names for a table."""
result = connection.execute(sa.text(f"PRAGMA table_info({table_name})"))
return [row[1] for row in result]
"""Get list of column names for a table (PostgreSQL)."""
result = connection.execute(text(
"SELECT column_name FROM information_schema.columns "
"WHERE table_name = :table AND table_schema = 'public'"
), {"table": table_name})
return [row[0] for row in result]
def upgrade() -> None:
@@ -78,25 +82,25 @@ def upgrade() -> None:
print(" Columns country_name and country_iso already exist, skipping")
return
# If has old 'country' column, rename it and add country_iso
# If has old 'country' column, rename it (PostgreSQL supports direct rename)
if has_country and not has_country_name:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.alter_column(
"country",
new_column_name="country_name",
)
op.alter_column(
"customer_addresses",
"country",
new_column_name="country_name",
)
# Add country_iso if it doesn't exist
if not has_country_iso:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.add_column(
sa.Column("country_iso", sa.String(5), nullable=True)
)
op.add_column(
"customer_addresses",
sa.Column("country_iso", sa.String(5), nullable=True)
)
# Backfill country_iso from country_name
for country_name, iso_code in COUNTRY_ISO_MAP.items():
connection.execute(
sa.text(
text(
"UPDATE customer_addresses SET country_iso = :iso "
"WHERE country_name = :name"
),
@@ -105,19 +109,19 @@ def upgrade() -> None:
# Set default for any remaining NULL values
connection.execute(
sa.text(
text(
"UPDATE customer_addresses SET country_iso = 'LU' "
"WHERE country_iso IS NULL"
)
)
# Make country_iso NOT NULL using batch operation
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.alter_column(
"country_iso",
existing_type=sa.String(5),
nullable=False,
)
# Make country_iso NOT NULL (PostgreSQL supports direct alter)
op.alter_column(
"customer_addresses",
"country_iso",
existing_type=sa.String(5),
nullable=False,
)
def downgrade() -> None:
@@ -130,12 +134,11 @@ def downgrade() -> None:
# Only downgrade if in the new state
if has_country_name and not has_country:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.alter_column(
"country_name",
new_column_name="country",
)
op.alter_column(
"customer_addresses",
"country_name",
new_column_name="country",
)
if has_country_iso:
with op.batch_alter_table("customer_addresses") as batch_op:
batch_op.drop_column("country_iso")
op.drop_column("customer_addresses", "country_iso")