chore: PostgreSQL migration compatibility and infrastructure improvements

Database & Migrations:
- Update all Alembic migrations for PostgreSQL compatibility
- Remove SQLite-specific syntax (AUTOINCREMENT, etc.)
- Add database utility helpers for PostgreSQL operations
- Fix services to use PostgreSQL-compatible queries

Documentation:
- Add comprehensive Docker deployment guide
- Add production deployment documentation
- Add infrastructure architecture documentation
- Update database setup guide for PostgreSQL-only
- Expand troubleshooting guide

Architecture & Validation:
- Add migration.yaml rules for SQL compatibility checking
- Enhance validate_architecture.py with migration validation
- Update architecture rules to validate Alembic migrations

Development:
- Fix duplicate install-all target in Makefile
- Add Celery/Redis validation to install.py script
- Add docker-compose.test.yml for CI testing
- Add squash_migrations.py utility script
- Update tests for PostgreSQL compatibility
- Improve test fixtures in conftest.py

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-11 17:52:28 +01:00
parent 2792414395
commit 3614d448e4
45 changed files with 3179 additions and 507 deletions

View File

@@ -355,6 +355,52 @@ def validate_configuration(env_vars: dict) -> dict:
"items": ["Set PLATFORM_DOMAIN for your deployment"]
}
# -------------------------------------------------------------------------
# Celery / Redis Task Queue
# -------------------------------------------------------------------------
redis_url = env_vars.get("REDIS_URL", "")
use_celery = env_vars.get("USE_CELERY", "false").lower() == "true"
flower_url = env_vars.get("FLOWER_URL", "")
flower_password = env_vars.get("FLOWER_PASSWORD", "")
if use_celery:
if redis_url:
celery_items = [f"Redis: {redis_url.split('@')[-1] if '@' in redis_url else redis_url}"]
if flower_url:
celery_items.append(f"Flower: {flower_url}")
else:
celery_items.append("FLOWER_URL not set (monitoring disabled)")
if flower_password and flower_password != "changeme":
celery_items.append("Flower password configured")
elif flower_password == "changeme":
celery_items.append("WARNING: Change FLOWER_PASSWORD for production!")
results["celery"] = {
"status": "ok",
"message": "Celery enabled with Redis",
"items": celery_items
}
else:
results["celery"] = {
"status": "missing",
"message": "Celery enabled but Redis not configured",
"items": [
"Set REDIS_URL (e.g., redis://localhost:6379/0)",
"Or disable Celery: USE_CELERY=false"
]
}
else:
results["celery"] = {
"status": "warning",
"message": "Celery disabled (using FastAPI BackgroundTasks)",
"items": [
"Set USE_CELERY=true for production",
"Requires Redis: docker-compose up -d redis"
]
}
return results

View File

@@ -0,0 +1,192 @@
#!/usr/bin/env python3
"""
Migration Squash Script
This script squashes all existing migrations into a single initial migration.
Run this after setting up PostgreSQL to simplify the migration history.
Prerequisites:
- PostgreSQL must be running: make docker-up
- DATABASE_URL environment variable must be set to PostgreSQL
Usage:
python scripts/squash_migrations.py
What this script does:
1. Backs up existing migrations to alembic/versions_backup_YYYYMMDD/
2. Creates a fresh initial migration from current models
3. Stamps the database as being at the new migration
After running:
1. Review the new migration in alembic/versions/
2. Test with: make migrate-up (on a fresh database)
3. If satisfied, delete the backup directory
"""
import os
import shutil
import subprocess
import sys
from datetime import datetime
from pathlib import Path
# Add project root to path
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))
VERSIONS_DIR = project_root / "alembic" / "versions"
def check_prerequisites():
"""Verify PostgreSQL is configured."""
database_url = os.getenv("DATABASE_URL", "")
if not database_url.startswith("postgresql"):
print("ERROR: DATABASE_URL must be a PostgreSQL URL")
print(f"Current: {database_url[:50]}...")
print("")
print("Set DATABASE_URL or start PostgreSQL with: make docker-up")
sys.exit(1)
print(f"Database: {database_url.split('@')[0]}@...")
return True
def backup_migrations():
"""Backup existing migrations."""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_dir = project_root / "alembic" / f"versions_backup_{timestamp}"
if not VERSIONS_DIR.exists():
print("No existing migrations to backup")
return None
migration_files = list(VERSIONS_DIR.glob("*.py"))
if not migration_files:
print("No migration files found")
return None
print(f"Backing up {len(migration_files)} migrations to {backup_dir.name}/")
shutil.copytree(VERSIONS_DIR, backup_dir)
# Clear versions directory (keep __pycache__ if exists)
for f in VERSIONS_DIR.glob("*.py"):
f.unlink()
return backup_dir
def create_fresh_migration():
"""Generate fresh initial migration from models."""
print("Generating fresh initial migration...")
result = subprocess.run(
[
sys.executable, "-m", "alembic", "revision",
"--autogenerate", "-m", "initial_postgresql_schema"
],
cwd=project_root,
capture_output=True,
text=True
)
if result.returncode != 0:
print("ERROR: Failed to generate migration")
print(result.stderr)
sys.exit(1)
print(result.stdout)
# Find the new migration file
new_migrations = list(VERSIONS_DIR.glob("*initial_postgresql_schema*.py"))
if new_migrations:
print(f"Created: {new_migrations[0].name}")
return new_migrations[0]
return None
def clean_migration_file(migration_path: Path):
"""Remove SQLite-specific patterns from migration."""
if not migration_path:
return
content = migration_path.read_text()
# Remove batch_alter_table references (not needed for PostgreSQL)
if "batch_alter_table" in content:
print("Note: Migration contains batch_alter_table - this is not needed for PostgreSQL")
# We don't auto-remove as it might be intentional
print(f"Review migration at: {migration_path}")
def stamp_database():
"""Stamp the database as being at the new migration."""
print("Stamping database with new migration...")
result = subprocess.run(
[sys.executable, "-m", "alembic", "stamp", "head"],
cwd=project_root,
capture_output=True,
text=True
)
if result.returncode != 0:
print("WARNING: Could not stamp database (may need to run migrate-up first)")
print(result.stderr)
else:
print("Database stamped at head")
def main():
print("=" * 60)
print("MIGRATION SQUASH SCRIPT")
print("=" * 60)
print("")
# Check prerequisites
check_prerequisites()
print("")
# Confirm with user
response = input("This will backup and replace all migrations. Continue? [y/N] ")
if response.lower() != 'y':
print("Aborted")
sys.exit(0)
print("")
# Backup existing migrations
backup_dir = backup_migrations()
print("")
# Create fresh migration
new_migration = create_fresh_migration()
print("")
# Clean up the migration file
clean_migration_file(new_migration)
print("")
# Summary
print("=" * 60)
print("SQUASH COMPLETE")
print("=" * 60)
print("")
if backup_dir:
print(f"Backup location: {backup_dir}")
print("")
print("Next steps:")
print("1. Review the new migration file")
print("2. On a fresh database, run: make migrate-up")
print("3. Verify all tables are created correctly")
print("4. If satisfied, delete the backup directory")
print("")
print("To restore from backup:")
print(f" rm -rf alembic/versions/*.py")
print(f" cp -r {backup_dir}/* alembic/versions/")
if __name__ == "__main__":
main()

View File

@@ -380,6 +380,12 @@ class ArchitectureValidator:
suggestion="Keep SQLAlchemy models and Pydantic models separate",
)
# Alembic migrations
elif "/alembic/versions/" in file_path_str or "\\alembic\\versions\\" in file_path_str:
print("🔄 Validating as Alembic migration...")
self._check_migration_batch_mode(file_path, content, lines)
self._check_migration_constraint_names(file_path, content, lines)
# Generic Python file - check exception handling
print("⚠️ Validating exception handling...")
for i, line in enumerate(lines, 1):
@@ -3760,6 +3766,126 @@ class ArchitectureValidator:
suggestion="Fix JSON syntax error (check for trailing commas, missing quotes)",
)
def _check_migration_batch_mode(
self, file_path: Path, content: str, lines: list[str]
):
"""MIG-001: Check that alter_column, drop_constraint, create_foreign_key use batch mode"""
# Track if we're inside a batch_alter_table context
in_batch_context = False
batch_indent = 0
for i, line in enumerate(lines, 1):
stripped = line.strip()
# Track batch_alter_table context entry
if "batch_alter_table(" in line or "with op.batch_alter_table" in line:
in_batch_context = True
# Get indent level of the 'with' statement
batch_indent = len(line) - len(line.lstrip())
continue
# Track batch_alter_table context exit (dedent)
if in_batch_context and stripped and not stripped.startswith("#"):
current_indent = len(line) - len(line.lstrip())
# If we're back at or before the 'with' indent level, we've exited
if current_indent <= batch_indent and not line.strip().startswith(
"with"
):
in_batch_context = False
# Skip comments
if stripped.startswith("#"):
continue
# Check for direct op.alter_column (not batch_op.alter_column)
if re.search(r"\bop\.alter_column\(", line):
self._add_violation(
rule_id="MIG-001",
rule_name="Use batch_alter_table for column modifications",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="op.alter_column() not supported by SQLite - use batch mode",
context=stripped[:100],
suggestion="Use: with op.batch_alter_table('table') as batch_op: batch_op.alter_column(...)",
)
# Check for direct op.drop_constraint (not batch_op.drop_constraint)
if re.search(r"\bop\.drop_constraint\(", line):
self._add_violation(
rule_id="MIG-001",
rule_name="Use batch_alter_table for constraint modifications",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="op.drop_constraint() not supported by SQLite - use batch mode",
context=stripped[:100],
suggestion="Use: with op.batch_alter_table('table') as batch_op: batch_op.drop_constraint(...)",
)
# Check for direct op.create_foreign_key (not batch_op.create_foreign_key)
if re.search(r"\bop\.create_foreign_key\(", line):
self._add_violation(
rule_id="MIG-001",
rule_name="Use batch_alter_table for foreign key creation",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="op.create_foreign_key() not supported by SQLite - use batch mode",
context=stripped[:100],
suggestion="Use: with op.batch_alter_table('table') as batch_op: batch_op.create_foreign_key(...)",
)
def _check_migration_constraint_names(
self, file_path: Path, content: str, lines: list[str]
):
"""MIG-002: Check that constraints have explicit names (not None)"""
for i, line in enumerate(lines, 1):
stripped = line.strip()
# Skip comments
if stripped.startswith("#"):
continue
# Check for create_foreign_key(None, ...)
if re.search(r"create_foreign_key\s*\(\s*None\s*,", line):
self._add_violation(
rule_id="MIG-002",
rule_name="Constraints must have explicit names",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Foreign key constraint must have an explicit name, not None",
context=stripped[:100],
suggestion="Use: create_foreign_key('fk_table_column', ...)",
)
# Check for create_unique_constraint(None, ...)
if re.search(r"create_unique_constraint\s*\(\s*None\s*,", line):
self._add_violation(
rule_id="MIG-002",
rule_name="Constraints must have explicit names",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Unique constraint must have an explicit name, not None",
context=stripped[:100],
suggestion="Use: create_unique_constraint('uq_table_columns', ...)",
)
# Check for drop_constraint(None, ...)
if re.search(r"drop_constraint\s*\(\s*None\s*,", line):
self._add_violation(
rule_id="MIG-002",
rule_name="Constraints must have explicit names",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Cannot drop constraint with None name",
context=stripped[:100],
suggestion="Specify the constraint name to drop",
)
def _get_rule(self, rule_id: str) -> dict[str, Any]:
"""Get rule configuration by ID"""
# Look in different rule categories
@@ -3772,6 +3898,7 @@ class ArchitectureValidator:
"template_rules",
"frontend_component_rules",
"language_rules",
"migration_rules",
]:
rules = self.config.get(category, [])
for rule in rules: