Adds SoftDeleteMixin (deleted_at + deleted_by_id) with automatic query
filtering via do_orm_execute event. Soft-deleted records are invisible
by default; bypass with execution_options={"include_deleted": True}.
Models: User, Merchant, Store, StoreUser, Customer, Order, Product,
LoyaltyProgram, LoyaltyCard.
Infrastructure:
- SoftDeleteMixin in models/database/base.py
- Auto query filter registered on SessionLocal and test sessions
- soft_delete(), restore(), soft_delete_cascade() in app/core/soft_delete.py
- Alembic migration adding columns to 9 tables
- Partial unique indexes on users.email/username, stores.store_code/subdomain
Service changes:
- admin_service: delete_user, delete_store → soft_delete/soft_delete_cascade
- merchant_service: delete_merchant → soft_delete_cascade (stores→children)
- store_team_service: remove_team_member → soft_delete (fixes is_active bug)
- product_service: delete_product → soft_delete
- program_service: delete_program → soft_delete_cascade
Admin API:
- include_deleted/only_deleted query params on admin list endpoints
- PUT restore endpoints for users, merchants, stores
Tests: 9 unit tests for soft-delete infrastructure.
Docs: docs/backend/soft-delete.md + follow-up proposals.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
119 lines
4.3 KiB
Python
119 lines
4.3 KiB
Python
"""Add soft delete columns (deleted_at, deleted_by_id) to business-critical tables.
|
|
|
|
Also converts unique constraints on users.email, users.username,
|
|
stores.store_code, stores.subdomain to partial unique indexes
|
|
that only apply to non-deleted rows.
|
|
|
|
Revision ID: softdelete_001
|
|
Revises: remove_is_primary_001, customers_002, dev_tools_002, orders_002, tenancy_004
|
|
Create Date: 2026-03-28
|
|
"""
|
|
|
|
from alembic import op
|
|
import sqlalchemy as sa
|
|
|
|
revision = "softdelete_001"
|
|
down_revision = (
|
|
"remove_is_primary_001",
|
|
"customers_002",
|
|
"dev_tools_002",
|
|
"orders_002",
|
|
"tenancy_004",
|
|
)
|
|
branch_labels = None
|
|
depends_on = None
|
|
|
|
# Tables receiving soft-delete columns
|
|
SOFT_DELETE_TABLES = [
|
|
"users",
|
|
"merchants",
|
|
"stores",
|
|
"customers",
|
|
"store_users",
|
|
"orders",
|
|
"products",
|
|
"loyalty_programs",
|
|
"loyalty_cards",
|
|
]
|
|
|
|
|
|
def upgrade() -> None:
|
|
# ======================================================================
|
|
# Step 1: Add deleted_at and deleted_by_id to all soft-delete tables
|
|
# ======================================================================
|
|
for table in SOFT_DELETE_TABLES:
|
|
op.add_column(table, sa.Column("deleted_at", sa.DateTime(), nullable=True))
|
|
op.add_column(
|
|
table,
|
|
sa.Column(
|
|
"deleted_by_id",
|
|
sa.Integer(),
|
|
sa.ForeignKey("users.id", ondelete="SET NULL"),
|
|
nullable=True,
|
|
),
|
|
)
|
|
op.create_index(f"ix_{table}_deleted_at", table, ["deleted_at"])
|
|
|
|
# ======================================================================
|
|
# Step 2: Replace simple unique constraints with partial unique indexes
|
|
# (only enforce uniqueness among non-deleted rows)
|
|
# ======================================================================
|
|
|
|
# users.email: drop old unique index, create partial
|
|
op.drop_index("ix_users_email", table_name="users")
|
|
op.execute(
|
|
'CREATE UNIQUE INDEX uq_users_email_active ON users (email) '
|
|
'WHERE deleted_at IS NULL'
|
|
)
|
|
# Keep a non-unique index for lookups on all rows (including deleted)
|
|
op.create_index("ix_users_email", "users", ["email"])
|
|
|
|
# users.username: drop old unique index, create partial
|
|
op.drop_index("ix_users_username", table_name="users")
|
|
op.execute(
|
|
'CREATE UNIQUE INDEX uq_users_username_active ON users (username) '
|
|
'WHERE deleted_at IS NULL'
|
|
)
|
|
op.create_index("ix_users_username", "users", ["username"])
|
|
|
|
# stores.store_code: drop old unique index, create partial
|
|
op.drop_index("ix_stores_store_code", table_name="stores")
|
|
op.execute(
|
|
'CREATE UNIQUE INDEX uq_stores_store_code_active ON stores (store_code) '
|
|
'WHERE deleted_at IS NULL'
|
|
)
|
|
op.create_index("ix_stores_store_code", "stores", ["store_code"])
|
|
|
|
# stores.subdomain: drop old unique index, create partial
|
|
op.drop_index("ix_stores_subdomain", table_name="stores")
|
|
op.execute(
|
|
'CREATE UNIQUE INDEX uq_stores_subdomain_active ON stores (subdomain) '
|
|
'WHERE deleted_at IS NULL'
|
|
)
|
|
op.create_index("ix_stores_subdomain", "stores", ["subdomain"])
|
|
|
|
|
|
def downgrade() -> None:
|
|
# Reverse partial unique indexes back to simple unique indexes
|
|
op.drop_index("ix_stores_subdomain", table_name="stores")
|
|
op.execute("DROP INDEX IF EXISTS uq_stores_subdomain_active")
|
|
op.create_index("ix_stores_subdomain", "stores", ["subdomain"], unique=True)
|
|
|
|
op.drop_index("ix_stores_store_code", table_name="stores")
|
|
op.execute("DROP INDEX IF EXISTS uq_stores_store_code_active")
|
|
op.create_index("ix_stores_store_code", "stores", ["store_code"], unique=True)
|
|
|
|
op.drop_index("ix_users_username", table_name="users")
|
|
op.execute("DROP INDEX IF EXISTS uq_users_username_active")
|
|
op.create_index("ix_users_username", "users", ["username"], unique=True)
|
|
|
|
op.drop_index("ix_users_email", table_name="users")
|
|
op.execute("DROP INDEX IF EXISTS uq_users_email_active")
|
|
op.create_index("ix_users_email", "users", ["email"], unique=True)
|
|
|
|
# Remove soft-delete columns from all tables
|
|
for table in reversed(SOFT_DELETE_TABLES):
|
|
op.drop_index(f"ix_{table}_deleted_at", table_name=table)
|
|
op.drop_column(table, "deleted_by_id")
|
|
op.drop_column(table, "deleted_at")
|