Files
orion/scripts/validate_architecture.py
Samir Boulahtit 95a8ffc645 docs: add architecture rules and docs for e-commerce components
Architecture rules added:
- FE-008: Use number_stepper macro for quantity inputs
- FE-009: Use product_card macro for product displays
- FE-010: Use product_grid macro for product listings
- FE-011: Use add_to_cart macros for cart interactions
- FE-012: Use mini_cart macro for cart dropdown

Documentation:
- Update ui-components-quick-reference.md with e-commerce section
- Add component-standards.md for standardization guidelines
- Add ecommerce-components-proposal.md with full 20-component roadmap
- Update validate_architecture.py with FE-008 detection

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-07 17:04:28 +01:00

1636 lines
65 KiB
Python
Executable File

#!/usr/bin/env python3
"""
Architecture Validator
======================
Validates code against architectural rules defined in .architecture-rules.yaml
This script checks that the codebase follows key architectural decisions:
- Separation of concerns (routes vs services)
- Proper exception handling (domain exceptions vs HTTPException)
- Correct use of Pydantic vs SQLAlchemy models
- Service layer patterns
- API endpoint patterns
Usage:
python scripts/validate_architecture.py # Check all files in current directory
python scripts/validate_architecture.py -d app/api/ # Check specific directory
python scripts/validate_architecture.py -f app/api/v1/vendors.py # Check single file
python scripts/validate_architecture.py -o company # Check all company-related files
python scripts/validate_architecture.py -o vendor --verbose # Check vendor files with details
python scripts/validate_architecture.py --json # JSON output
Options:
-f, --file PATH Validate a single file (.py, .js, or .html)
-d, --folder PATH Validate all files in a directory (recursive)
-o, --object NAME Validate all files related to an entity (e.g., company, vendor, order)
-c, --config PATH Path to architecture rules config
-v, --verbose Show detailed output including context
--errors-only Only show errors, suppress warnings
--json Output results as JSON
"""
import argparse
import ast
import re
import sys
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import Any
import yaml
class Severity(Enum):
"""Validation severity levels"""
ERROR = "error"
WARNING = "warning"
INFO = "info"
@dataclass
class Violation:
"""Represents an architectural rule violation"""
rule_id: str
rule_name: str
severity: Severity
file_path: Path
line_number: int
message: str
context: str = ""
suggestion: str = ""
@dataclass
class FileResult:
"""Results for a single file validation"""
file_path: Path
errors: int = 0
warnings: int = 0
@property
def passed(self) -> bool:
return self.errors == 0
@property
def status(self) -> str:
if self.errors > 0:
return "FAILED"
elif self.warnings > 0:
return "PASSED*"
return "PASSED"
@property
def status_icon(self) -> str:
if self.errors > 0:
return ""
elif self.warnings > 0:
return "⚠️"
return ""
@dataclass
class ValidationResult:
"""Results of architecture validation"""
violations: list[Violation] = field(default_factory=list)
files_checked: int = 0
rules_applied: int = 0
file_results: list[FileResult] = field(default_factory=list)
def has_errors(self) -> bool:
"""Check if there are any error-level violations"""
return any(v.severity == Severity.ERROR for v in self.violations)
def has_warnings(self) -> bool:
"""Check if there are any warning-level violations"""
return any(v.severity == Severity.WARNING for v in self.violations)
class ArchitectureValidator:
"""Main validator class"""
def __init__(self, config_path: Path, verbose: bool = False):
"""Initialize validator with configuration"""
self.config_path = config_path
self.verbose = verbose
self.config = self._load_config()
self.result = ValidationResult()
self.project_root = Path.cwd()
def _load_config(self) -> dict[str, Any]:
"""Load validation rules from YAML config"""
if not self.config_path.exists():
print(f"❌ Configuration file not found: {self.config_path}")
sys.exit(1)
with open(self.config_path) as f:
config = yaml.safe_load(f)
print(f"📋 Loaded architecture rules: {config.get('project', 'unknown')}")
return config
def validate_all(self, target_path: Path = None) -> ValidationResult:
"""Validate all files in a directory"""
print("\n🔍 Starting architecture validation...\n")
target = target_path or self.project_root
# Validate API endpoints
self._validate_api_endpoints(target)
# Validate service layer
self._validate_service_layer(target)
# Validate models
self._validate_models(target)
# Validate exception handling
self._validate_exceptions(target)
# Validate JavaScript
self._validate_javascript(target)
# Validate templates
self._validate_templates(target)
return self.result
def validate_file(
self, file_path: Path, quiet: bool = False
) -> ValidationResult:
"""Validate a single file"""
if not file_path.exists():
if not quiet:
print(f"❌ File not found: {file_path}")
return self.result
if not file_path.is_file():
if not quiet:
print(f"❌ Not a file: {file_path}")
return self.result
if not quiet:
print(f"\n🔍 Validating single file: {file_path}\n")
# Resolve file path to absolute
file_path = file_path.resolve()
file_path_str = str(file_path)
if self._should_ignore_file(file_path):
if not quiet:
print("⏭️ File is in ignore list, skipping")
return self.result
self.result.files_checked += 1
# Track violations before this file
violations_before = len(self.result.violations)
content = file_path.read_text()
lines = content.split("\n")
# Determine file type and run appropriate validators
if file_path.suffix == ".py":
self._validate_python_file(file_path, content, lines, file_path_str)
elif file_path.suffix == ".js":
self._validate_js_file(file_path, content, lines)
elif file_path.suffix == ".html":
self._validate_html_file(file_path, content, lines)
else:
if not quiet:
print(f"⚠️ Unsupported file type: {file_path.suffix}")
# Calculate violations for this file
file_violations = self.result.violations[violations_before:]
errors = sum(1 for v in file_violations if v.severity == Severity.ERROR)
warnings = sum(1 for v in file_violations if v.severity == Severity.WARNING)
# Track file result
self.result.file_results.append(
FileResult(file_path=file_path, errors=errors, warnings=warnings)
)
return self.result
def validate_object(self, object_name: str) -> ValidationResult:
"""Validate all files related to an entity (e.g., company, vendor, order)"""
print(f"\n🔍 Searching for '{object_name}'-related files...\n")
# Generate name variants (singular/plural forms)
name = object_name.lower()
variants = {name}
# Handle common plural patterns
if name.endswith("ies"):
# companies -> company
variants.add(name[:-3] + "y")
elif name.endswith("s"):
# vendors -> vendor
variants.add(name[:-1])
else:
# company -> companies, vendor -> vendors
if name.endswith("y"):
variants.add(name[:-1] + "ies")
variants.add(name + "s")
# Search patterns for different file types
patterns = []
for variant in variants:
patterns.extend([
f"app/api/**/*{variant}*.py",
f"app/services/*{variant}*.py",
f"app/exceptions/*{variant}*.py",
f"models/database/*{variant}*.py",
f"models/schema/*{variant}*.py",
f"static/admin/js/*{variant}*.js",
f"app/templates/admin/*{variant}*.html",
])
# Find all matching files
found_files: set[Path] = set()
for pattern in patterns:
matches = list(self.project_root.glob(pattern))
for match in matches:
if match.is_file() and not self._should_ignore_file(match):
found_files.add(match)
if not found_files:
print(f"❌ No files found matching '{object_name}'")
return self.result
# Sort files by type for better readability
sorted_files = sorted(found_files, key=lambda p: (p.suffix, str(p)))
print(f"📁 Found {len(sorted_files)} files:\n")
for f in sorted_files:
rel_path = f.relative_to(self.project_root)
print(f"{rel_path}")
print("\n" + "-" * 60 + "\n")
# Validate each file
for file_path in sorted_files:
rel_path = file_path.relative_to(self.project_root)
print(f"📄 {rel_path}")
self.validate_file(file_path, quiet=True)
return self.result
def _validate_python_file(
self, file_path: Path, content: str, lines: list[str], file_path_str: str
):
"""Validate a single Python file based on its location"""
# API endpoints
if "/app/api/" in file_path_str or "\\app\\api\\" in file_path_str:
print("📡 Validating as API endpoint...")
self._check_pydantic_usage(file_path, content, lines)
self._check_no_business_logic_in_endpoints(file_path, content, lines)
self._check_endpoint_exception_handling(file_path, content, lines)
self._check_endpoint_authentication(file_path, content, lines)
# Service layer
elif "/app/services/" in file_path_str or "\\app\\services\\" in file_path_str:
print("🔧 Validating as service layer...")
self._check_no_http_exception_in_services(file_path, content, lines)
self._check_service_exceptions(file_path, content, lines)
self._check_db_session_parameter(file_path, content, lines)
self._check_no_commit_in_services(file_path, content, lines)
# Models
elif "/app/models/" in file_path_str or "\\app\\models\\" in file_path_str:
print("📦 Validating as model...")
for i, line in enumerate(lines, 1):
if re.search(r"class.*\(Base.*,.*BaseModel.*\)", line):
self._add_violation(
rule_id="MDL-002",
rule_name="Separate SQLAlchemy and Pydantic models",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Model mixes SQLAlchemy Base and Pydantic BaseModel",
context=line.strip(),
suggestion="Keep SQLAlchemy models and Pydantic models separate",
)
# Generic Python file - check exception handling
print("⚠️ Validating exception handling...")
for i, line in enumerate(lines, 1):
if re.match(r"\s*except\s*:", line):
self._add_violation(
rule_id="EXC-002",
rule_name="No bare except clauses",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Bare except clause catches all exceptions including system exits",
context=line.strip(),
suggestion="Specify exception type: except ValueError: or except Exception:",
)
def _validate_js_file(self, file_path: Path, content: str, lines: list[str]):
"""Validate a single JavaScript file"""
print("🟨 Validating JavaScript...")
# JS-001: Check for console usage (must use centralized logger)
# Skip init-*.js files - they run before logger is available
if not file_path.name.startswith("init-"):
for i, line in enumerate(lines, 1):
if re.search(r"console\.(log|warn|error)", line):
if "//" in line or "" in line or "eslint-disable" in line:
continue
self._add_violation(
rule_id="JS-001",
rule_name="Use centralized logger",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Use centralized logger instead of console",
context=line.strip()[:80],
suggestion="Use window.LogConfig.createLogger('moduleName')",
)
# JS-002: Check for window.apiClient (must use lowercase apiClient)
for i, line in enumerate(lines, 1):
if "window.apiClient" in line:
before_occurrence = line[: line.find("window.apiClient")]
if "//" not in before_occurrence:
self._add_violation(
rule_id="JS-002",
rule_name="Use lowercase apiClient",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Use apiClient directly instead of window.apiClient",
context=line.strip(),
suggestion="Replace window.apiClient with apiClient",
)
def _validate_html_file(self, file_path: Path, content: str, lines: list[str]):
"""Validate a single HTML template file"""
print("📄 Validating template...")
file_path_str = str(file_path)
# Skip base template and partials for extends check
is_base_or_partial = "base.html" in file_path.name or "partials" in file_path_str
# Skip macros directory for FE rules
is_macro = "shared/macros/" in file_path_str or "shared\\macros\\" in file_path_str
# Skip components showcase page
is_components_page = "components.html" in file_path.name
if is_base_or_partial:
print("⏭️ Skipping base/partial template")
elif is_macro:
print("⏭️ Skipping macro file")
else:
# FE-001: Check for inline pagination (should use macro)
if not is_components_page:
self._check_pagination_macro_usage(file_path, content, lines)
# FE-002: Check for inline SVGs (should use $icon())
if not is_components_page and not is_macro:
self._check_icon_helper_usage(file_path, content, lines)
# FE-008: Check for raw number inputs (should use number_stepper)
if not is_components_page and not is_macro:
self._check_number_stepper_macro_usage(file_path, content, lines)
# Only check admin templates for extends
if "/admin/" not in file_path_str and "\\admin\\" not in file_path_str:
return
if is_base_or_partial:
return
# Check for standalone marker in template (first 5 lines)
# Supports: {# standalone #}, {# noqa: TPL-001 #}, <!-- standalone -->
first_lines = "\n".join(lines[:5]).lower()
if "standalone" in first_lines or "noqa: tpl-001" in first_lines:
print("⏭️ Template marked as standalone, skipping extends check")
return
# Check exclusion patterns for TPL-001
# These are templates that intentionally don't extend admin/base.html
tpl_001_exclusions = [
"login.html", # Standalone login page
"errors/", # Error pages extend errors/base.html
"test-", # Test templates
]
for exclusion in tpl_001_exclusions:
if exclusion in file_path_str:
print(f"⏭️ Template matches exclusion pattern '{exclusion}', skipping")
return
# TPL-001: Check for extends
has_extends = any(
"{% extends" in line and "admin/base.html" in line for line in lines
)
if not has_extends:
self._add_violation(
rule_id="TPL-001",
rule_name="Templates must extend base",
severity=Severity.ERROR,
file_path=file_path,
line_number=1,
message="Admin template does not extend admin/base.html",
context=file_path.name,
suggestion="Add {% extends 'admin/base.html' %} at the top, or add {# standalone #} if intentional",
)
def _check_pagination_macro_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-001: Check for inline pagination that should use macro"""
# Check if already using the pagination macro
uses_macro = any("from 'shared/macros/pagination.html'" in line for line in lines)
if uses_macro:
return
# Check for noqa: FE-001 comment
has_noqa = any("noqa: fe-001" in line.lower() for line in lines)
if has_noqa:
return
# Look for signs of inline pagination
pagination_indicators = [
('aria-label="Table navigation"', "Inline table navigation found"),
("previousPage()" , "Inline pagination controls found"),
("nextPage()" , "Inline pagination controls found"),
("goToPage(" , "Inline pagination controls found"),
]
for i, line in enumerate(lines, 1):
for pattern, message in pagination_indicators:
if pattern in line:
# Skip if it's in a comment
stripped = line.strip()
if stripped.startswith("{#") or stripped.startswith("<!--"):
continue
self._add_violation(
rule_id="FE-001",
rule_name="Use pagination macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message=message + " - use shared macro instead",
context=stripped[:60],
suggestion="{% from 'shared/macros/pagination.html' import pagination %}\n{{ pagination() }}",
)
return # Only report once per file
def _check_icon_helper_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-002: Check for inline SVGs that should use $icon() helper"""
# Check for noqa: FE-002 comment
has_noqa = any("noqa: fe-002" in line.lower() for line in lines)
if has_noqa:
return
# Pattern to find inline SVGs
svg_pattern = re.compile(r'<svg[^>]*viewBox[^>]*>.*?</svg>', re.DOTALL | re.IGNORECASE)
# Find all SVG occurrences
for match in svg_pattern.finditer(content):
# Find line number
line_num = content[:match.start()].count('\n') + 1
# Skip if this is likely in a code example (inside <pre> or <code>)
context_before = content[max(0, match.start()-200):match.start()]
if '<pre' in context_before or '<code' in context_before:
continue
self._add_violation(
rule_id="FE-002",
rule_name="Use $icon() helper",
severity=Severity.WARNING,
file_path=file_path,
line_number=line_num,
message="Inline SVG found - use $icon() helper for consistency",
context="<svg...>",
suggestion='<span x-html="$icon(\'icon-name\', \'w-4 h-4\')"></span>',
)
def _check_alerts_macro_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-003: Check for inline loading/error states that should use alerts macro"""
# Check if already using the alerts macro
uses_macro = any("from 'shared/macros/alerts.html'" in line for line in lines)
if uses_macro:
return
# Check for noqa comment
has_noqa = any("noqa: fe-003" in line.lower() for line in lines)
if has_noqa:
return
# Look for inline loading states
for i, line in enumerate(lines, 1):
# Loading state pattern: text-center py-12 with loading content
if 'x-show="loading"' in line or "x-show='loading'" in line:
# Check if next few lines have spinner pattern
context_lines = "\n".join(lines[i-1:i+3])
if "text-center" in context_lines and "py-12" in context_lines:
self._add_violation(
rule_id="FE-003",
rule_name="Use alerts macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Inline loading state found - use loading_state macro",
context=line.strip()[:60],
suggestion="{% from 'shared/macros/alerts.html' import loading_state %}\n{{ loading_state('Loading...') }}",
)
return # Only report once per file
# Error state pattern: bg-red-100 border-red-400
if "bg-red-100" in line and "border-red-400" in line:
self._add_violation(
rule_id="FE-003",
rule_name="Use alerts macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Inline error state found - use error_state macro",
context=line.strip()[:60],
suggestion="{% from 'shared/macros/alerts.html' import error_state %}\n{{ error_state('Error', 'error') }}",
)
return
def _check_modals_macro_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-004: Check for inline modals that should use modals macro"""
# Check if already using the modals macro
uses_macro = any("from 'shared/macros/modals.html'" in line for line in lines)
if uses_macro:
return
# Check for noqa comment
has_noqa = any("noqa: fe-004" in line.lower() for line in lines)
if has_noqa:
return
# Look for modal patterns: fixed inset-0 with role="dialog" or modal backdrop
for i, line in enumerate(lines, 1):
if "fixed inset-0" in line and ("z-50" in line or "z-30" in line or "z-40" in line):
# Check context for modal indicators
context_lines = "\n".join(lines[max(0, i-1):min(len(lines), i+5)])
if 'role="dialog"' in context_lines or "bg-opacity-50" in context_lines or "bg-black/50" in context_lines:
self._add_violation(
rule_id="FE-004",
rule_name="Use modals macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Inline modal found - use modal macro for consistency",
context=line.strip()[:60],
suggestion="{% from 'shared/macros/modals.html' import modal %}\n{% call modal('myModal', 'Title', 'isModalOpen') %}...{% endcall %}",
)
return
def _check_tables_macro_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-005: Check for inline table wrappers that should use tables macro"""
# Check if already using the tables macro
uses_macro = any("from 'shared/macros/tables.html'" in line for line in lines)
if uses_macro:
return
# Check for noqa comment
has_noqa = any("noqa: fe-005" in line.lower() for line in lines)
if has_noqa:
return
# Look for table wrapper pattern: overflow-hidden rounded-lg shadow-xs
for i, line in enumerate(lines, 1):
if "overflow-hidden" in line and "rounded-lg" in line and "shadow-xs" in line:
# Check if there's a table nearby
context_lines = "\n".join(lines[i-1:min(len(lines), i+10)])
if "<table" in context_lines:
self._add_violation(
rule_id="FE-005",
rule_name="Use tables macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Inline table wrapper found - use table_wrapper macro",
context=line.strip()[:60],
suggestion="{% from 'shared/macros/tables.html' import table_wrapper, table_header %}\n{% call table_wrapper() %}...{% endcall %}",
)
return
def _check_dropdowns_macro_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-006: Check for inline dropdowns that should use dropdowns macro"""
# Check if already using the dropdowns macro
uses_macro = any("from 'shared/macros/dropdowns.html'" in line for line in lines)
if uses_macro:
return
# Check for noqa comment
has_noqa = any("noqa: fe-006" in line.lower() for line in lines)
if has_noqa:
return
# Look for dropdown patterns: @click.outside or @click.away with menu positioning
for i, line in enumerate(lines, 1):
# Match @click.away="something = false" or @click.outside="..."
if "@click.away=" in line or "@click.outside=" in line:
# Skip if this is a modal (modals also use @click.away but have different structure)
context_lines = "\n".join(lines[max(0, i-5):min(len(lines), i+10)])
# Skip if it looks like a modal (fixed inset-0)
if "fixed inset-0" in context_lines:
continue
# Look for dropdown menu styling: absolute positioning with z-index
if "absolute" in context_lines and ("z-10" in context_lines or "z-20" in context_lines or "z-50" in context_lines):
# Additional indicators: shadow, border, bg-white/dark, rounded
if "shadow" in context_lines or "border" in context_lines:
self._add_violation(
rule_id="FE-006",
rule_name="Use dropdowns macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Inline dropdown menu found - use dropdown macro",
context=line.strip()[:60],
suggestion="{% from 'shared/macros/dropdowns.html' import dropdown, dropdown_item %}\n{% call dropdown('Label', 'isOpen') %}...{% endcall %}",
)
return
def _check_headers_macro_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-007: Check for inline page headers that should use headers macro"""
# Check if already using the headers macro
uses_macro = any("from 'shared/macros/headers.html'" in line for line in lines)
if uses_macro:
return
# Check for noqa comment
has_noqa = any("noqa: fe-007" in line.lower() for line in lines)
if has_noqa:
return
# Look for page header pattern: flex with h2 text-2xl font-semibold
for i, line in enumerate(lines, 1):
if '<h2' in line and 'text-2xl' in line and 'font-semibold' in line:
# Check context for typical page header pattern
context_before = "\n".join(lines[max(0, i-3):i])
if "flex" in context_before and ("justify-between" in context_before or "items-center" in context_before):
self._add_violation(
rule_id="FE-007",
rule_name="Use headers macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Inline page header found - use page_header macro",
context=line.strip()[:60],
suggestion="{% from 'shared/macros/headers.html' import page_header %}\n{{ page_header('Title', action_label='Create', action_url='/create') }}",
)
return
def _check_number_stepper_macro_usage(self, file_path: Path, content: str, lines: list[str]):
"""FE-008: Check for raw number inputs that should use number_stepper macro
Detects <input type="number"> that should use the number_stepper macro for
consistent styling and dark mode support.
Exceptions:
- ID fields (placeholder contains 'id' or 'ID')
- Files already importing number_stepper
- Lines with noqa: FE-008 comment
"""
# Check if already using the number_stepper macro
uses_macro = any("number_stepper" in line for line in lines)
if uses_macro:
return
# Check for file-level noqa comment
has_noqa = any("noqa: fe-008" in line.lower() for line in lines)
if has_noqa:
return
# Look for raw number inputs
for i, line in enumerate(lines, 1):
if 'type="number"' in line or "type='number'" in line:
# Skip if line has noqa comment
if "noqa" in line.lower():
continue
# Skip if it looks like an ID field (check surrounding lines for context)
context_lines = "\n".join(lines[max(0, i-3):min(len(lines), i+2)]).lower()
if "user id" in context_lines or "placeholder" in context_lines and "id" in context_lines:
continue
# Skip if it's in a comment
stripped = line.strip()
if stripped.startswith("{#") or stripped.startswith("<!--") or stripped.startswith("//"):
continue
self._add_violation(
rule_id="FE-008",
rule_name="Use number_stepper macro",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Raw number input found - use number_stepper macro for consistent styling",
context=stripped[:70],
suggestion="{% from 'shared/macros/inputs.html' import number_stepper %}\n{{ number_stepper(model='fieldName', min=1, max=100) }}\nOr add {# noqa: FE-008 #} if this is intentional (e.g., ID field)",
)
return # Only report once per file
def _validate_api_endpoints(self, target_path: Path):
"""Validate API endpoint rules (API-001, API-002, API-003, API-004)"""
print("📡 Validating API endpoints...")
api_files = list(target_path.glob("app/api/v1/**/*.py"))
self.result.files_checked += len(api_files)
for file_path in api_files:
if self._should_ignore_file(file_path):
continue
content = file_path.read_text()
lines = content.split("\n")
# API-001: Check for Pydantic model usage
self._check_pydantic_usage(file_path, content, lines)
# API-002: Check for business logic in endpoints
self._check_no_business_logic_in_endpoints(file_path, content, lines)
# API-003: Check exception handling
self._check_endpoint_exception_handling(file_path, content, lines)
# API-004: Check authentication
self._check_endpoint_authentication(file_path, content, lines)
def _check_pydantic_usage(self, file_path: Path, content: str, lines: list[str]):
"""API-001: Ensure endpoints use Pydantic models"""
rule = self._get_rule("API-001")
if not rule:
return
# Check for response_model in route decorators
route_pattern = r"@router\.(get|post|put|delete|patch)"
dict_return_pattern = r"return\s+\{.*\}"
for i, line in enumerate(lines, 1):
# Check for dict returns in endpoints
if re.search(route_pattern, line):
# Look ahead for function body
func_start = i
indent = len(line) - len(line.lstrip())
# Find function body
for j in range(func_start, min(func_start + 20, len(lines))):
if j >= len(lines):
break
func_line = lines[j]
if re.search(dict_return_pattern, func_line):
self._add_violation(
rule_id="API-001",
rule_name=rule["name"],
severity=Severity.ERROR,
file_path=file_path,
line_number=j + 1,
message="Endpoint returns raw dict instead of Pydantic model",
context=func_line.strip(),
suggestion="Define a Pydantic response model and use response_model parameter",
)
def _check_no_business_logic_in_endpoints(
self, file_path: Path, content: str, lines: list[str]
):
"""API-002: Ensure no business logic in endpoints"""
rule = self._get_rule("API-002")
if not rule:
return
# NOTE: db.commit() is intentionally NOT included here
# Transaction control (commit) is allowed at endpoint level
# Only business logic operations are flagged
anti_patterns = [
(r"db\.add\(", "Creating entities should be in service layer"),
(r"db\.delete\(", "Deleting entities should be in service layer"),
(r"db\.query\(", "Database queries should be in service layer"),
(r"db\.execute\(", "Database operations should be in service layer"),
]
for i, line in enumerate(lines, 1):
# Skip service method calls (allowed)
if "_service." in line or "service." in line:
continue
for pattern, message in anti_patterns:
if re.search(pattern, line):
self._add_violation(
rule_id="API-002",
rule_name=rule["name"],
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message=message,
context=line.strip(),
suggestion="Move database operations to service layer",
)
def _check_endpoint_exception_handling(
self, file_path: Path, content: str, lines: list[str]
):
"""API-003: Check that endpoints do NOT raise exceptions directly.
The architecture uses:
- Dependencies (deps.py) for authentication/authorization validation
- Services for business logic validation
- Global exception handler that catches WizamartException subclasses
Endpoints should be a thin orchestration layer that trusts dependencies
and services to handle all validation. They should NOT raise exceptions.
"""
rule = self._get_rule("API-003")
if not rule:
return
# Skip exception handler file and deps.py - they're allowed to raise exceptions
file_path_str = str(file_path)
if "exceptions/handler.py" in file_path_str or file_path_str.endswith("deps.py"):
return
# Patterns that indicate endpoints are raising exceptions (BAD)
exception_patterns = [
("raise HTTPException", "Endpoint raises HTTPException directly"),
("raise InvalidTokenException", "Endpoint raises InvalidTokenException - move to dependency"),
("raise InsufficientPermissionsException", "Endpoint raises permission exception - move to dependency"),
("raise UnauthorizedVendorAccessException", "Endpoint raises auth exception - move to dependency or service"),
]
# Pattern that indicates redundant validation (BAD)
redundant_patterns = [
(r"if not hasattr\(current_user.*token_vendor", "Redundant token_vendor check - get_current_vendor_api guarantees this"),
(r"if not hasattr\(current_user.*token_vendor_id", "Redundant token_vendor_id check - dependency guarantees this"),
]
for i, line in enumerate(lines, 1):
# Skip comments
stripped = line.strip()
if stripped.startswith("#"):
continue
# Check for direct exception raising
for pattern, message in exception_patterns:
if pattern in line:
self._add_violation(
rule_id="API-003",
rule_name=rule["name"],
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message=message,
context=stripped[:80],
suggestion="Let dependencies or services handle validation and raise exceptions",
)
# Check for redundant validation patterns
for pattern, message in redundant_patterns:
if re.search(pattern, line):
self._add_violation(
rule_id="API-003",
rule_name=rule["name"],
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message=message,
context=stripped[:80],
suggestion="Remove redundant check - auth dependency guarantees this attribute is present",
)
def _check_endpoint_authentication(
self, file_path: Path, content: str, lines: list[str]
):
"""API-004: Check authentication on endpoints
Automatically skips:
- Auth endpoint files (*/auth.py) - login/logout are intentionally public
- Endpoints marked with '# public' comment
"""
rule = self._get_rule("API-004")
if not rule:
return
# Skip auth endpoint files entirely - they are intentionally public
file_path_str = str(file_path)
if file_path_str.endswith("/auth.py") or file_path_str.endswith("\\auth.py"):
return
# This is a warning-level check
# Look for endpoints without proper authentication
# Valid auth patterns:
# - Depends(get_current_*) - direct user authentication
# - Depends(require_vendor_*) - vendor permission dependencies
# - Depends(require_any_vendor_*) - any permission check
# - Depends(require_all_vendor*) - all permissions check
# - Depends(get_user_permissions) - permission fetching
auth_patterns = [
"Depends(get_current_",
"Depends(require_vendor_",
"Depends(require_any_vendor_",
"Depends(require_all_vendor",
"Depends(get_user_permissions",
]
for i, line in enumerate(lines, 1):
if "@router." in line and (
"post" in line or "put" in line or "delete" in line
):
# Check next 15 lines for auth or public marker
# (increased from 5 to handle multi-line decorators and long function signatures)
has_auth = False
is_public = False
context_lines = lines[i - 1 : i + 15] # Include line before decorator
for ctx_line in context_lines:
# Check for any valid auth pattern
if any(pattern in ctx_line for pattern in auth_patterns):
has_auth = True
break
# Check for public endpoint markers
if "# public" in ctx_line.lower() or "# noqa: api-004" in ctx_line.lower():
is_public = True
break
if not has_auth and not is_public and "include_in_schema=False" not in " ".join(
lines[i : i + 15]
):
# Determine appropriate suggestion based on file path
file_path_str = str(file_path)
if "/vendor/" in file_path_str:
suggestion = "Add Depends(get_current_vendor_api) or permission dependency, or mark as '# public'"
elif "/admin/" in file_path_str:
suggestion = "Add Depends(get_current_admin_api), or mark as '# public'"
elif "/shop/" in file_path_str:
suggestion = "Add Depends(get_current_customer_api), or mark as '# public'"
else:
suggestion = "Add authentication dependency or mark as '# public' if intentionally unauthenticated"
self._add_violation(
rule_id="API-004",
rule_name=rule["name"],
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Endpoint may be missing authentication",
context=line.strip(),
suggestion=suggestion,
)
def _validate_service_layer(self, target_path: Path):
"""Validate service layer rules (SVC-001, SVC-002, SVC-003, SVC-004, SVC-006)"""
print("🔧 Validating service layer...")
service_files = list(target_path.glob("app/services/**/*.py"))
self.result.files_checked += len(service_files)
for file_path in service_files:
if self._should_ignore_file(file_path):
continue
content = file_path.read_text()
lines = content.split("\n")
# SVC-001: No HTTPException in services
self._check_no_http_exception_in_services(file_path, content, lines)
# SVC-002: Proper exception handling
self._check_service_exceptions(file_path, content, lines)
# SVC-003: DB session as parameter
self._check_db_session_parameter(file_path, content, lines)
# SVC-006: No db.commit() in services
self._check_no_commit_in_services(file_path, content, lines)
def _check_no_http_exception_in_services(
self, file_path: Path, content: str, lines: list[str]
):
"""SVC-001: Services must not raise HTTPException"""
rule = self._get_rule("SVC-001")
if not rule:
return
for i, line in enumerate(lines, 1):
if "raise HTTPException" in line:
self._add_violation(
rule_id="SVC-001",
rule_name=rule["name"],
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Service raises HTTPException - use domain exceptions instead",
context=line.strip(),
suggestion="Create custom exception class (e.g., VendorNotFoundError) and raise that",
)
if (
"from fastapi import HTTPException" in line
or "from fastapi.exceptions import HTTPException" in line
):
self._add_violation(
rule_id="SVC-001",
rule_name=rule["name"],
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Service imports HTTPException - services should not know about HTTP",
context=line.strip(),
suggestion="Remove HTTPException import and use domain exceptions",
)
def _check_service_exceptions(
self, file_path: Path, content: str, lines: list[str]
):
"""SVC-002: Check for proper exception handling"""
rule = self._get_rule("SVC-002")
if not rule:
return
for i, line in enumerate(lines, 1):
# Check for generic Exception raises
if re.match(r"\s*raise Exception\(", line):
self._add_violation(
rule_id="SVC-002",
rule_name=rule["name"],
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Service raises generic Exception - use specific domain exception",
context=line.strip(),
suggestion="Create custom exception class for this error case",
)
def _check_db_session_parameter(
self, file_path: Path, content: str, lines: list[str]
):
"""SVC-003: Service methods should accept db session as parameter"""
rule = self._get_rule("SVC-003")
if not rule:
return
# Check for SessionLocal() creation in service files
for i, line in enumerate(lines, 1):
if "SessionLocal()" in line and "class" not in line:
self._add_violation(
rule_id="SVC-003",
rule_name=rule["name"],
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Service creates database session internally",
context=line.strip(),
suggestion="Accept db: Session as method parameter instead",
)
def _check_no_commit_in_services(
self, file_path: Path, content: str, lines: list[str]
):
"""SVC-006: Services should NOT call db.commit()
Transaction control belongs at the API endpoint level.
Exception: log_service.py may need immediate commits for audit logs.
"""
rule = self._get_rule("SVC-006")
if not rule:
return
# Exception: log_service.py is allowed to commit (audit logs)
if "log_service.py" in str(file_path):
return
for i, line in enumerate(lines, 1):
if "db.commit()" in line:
# Skip if it's a comment
stripped = line.strip()
if stripped.startswith("#"):
continue
self._add_violation(
rule_id="SVC-006",
rule_name=rule["name"],
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Service calls db.commit() - transaction control should be at endpoint level",
context=stripped,
suggestion="Remove db.commit() from service; let endpoint handle transaction",
)
def _validate_models(self, target_path: Path):
"""Validate model rules"""
print("📦 Validating models...")
model_files = list(target_path.glob("app/models/**/*.py"))
self.result.files_checked += len(model_files)
# Basic validation - can be extended
for file_path in model_files:
if self._should_ignore_file(file_path):
continue
content = file_path.read_text()
lines = content.split("\n")
# Check for mixing SQLAlchemy and Pydantic
for i, line in enumerate(lines, 1):
if re.search(r"class.*\(Base.*,.*BaseModel.*\)", line):
self._add_violation(
rule_id="MDL-002",
rule_name="Separate SQLAlchemy and Pydantic models",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Model mixes SQLAlchemy Base and Pydantic BaseModel",
context=line.strip(),
suggestion="Keep SQLAlchemy models and Pydantic models separate",
)
def _validate_exceptions(self, target_path: Path):
"""Validate exception handling patterns"""
print("⚠️ Validating exception handling...")
py_files = list(target_path.glob("**/*.py"))
for file_path in py_files:
if self._should_ignore_file(file_path):
continue
content = file_path.read_text()
lines = content.split("\n")
# EXC-002: Check for bare except
for i, line in enumerate(lines, 1):
if re.match(r"\s*except\s*:", line):
self._add_violation(
rule_id="EXC-002",
rule_name="No bare except clauses",
severity=Severity.ERROR,
file_path=file_path,
line_number=i,
message="Bare except clause catches all exceptions including system exits",
context=line.strip(),
suggestion="Specify exception type: except ValueError: or except Exception:",
)
def _validate_javascript(self, target_path: Path):
"""Validate JavaScript patterns"""
print("🟨 Validating JavaScript...")
js_files = list(target_path.glob("static/admin/js/**/*.js"))
self.result.files_checked += len(js_files)
for file_path in js_files:
content = file_path.read_text()
lines = content.split("\n")
# JS-001: Check for console usage (must use centralized logger)
# Skip init-*.js files - they run before logger is available
if not file_path.name.startswith("init-"):
for i, line in enumerate(lines, 1):
if re.search(r"console\.(log|warn|error)", line):
# Skip if it's a comment or bootstrap message
if "//" in line or "" in line or "eslint-disable" in line:
continue
self._add_violation(
rule_id="JS-001",
rule_name="Use centralized logger",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Use centralized logger instead of console",
context=line.strip()[:80],
suggestion="Use window.LogConfig.createLogger('moduleName')",
)
# JS-002: Check for window.apiClient (must use lowercase apiClient)
for i, line in enumerate(lines, 1):
if "window.apiClient" in line:
# Check if it's not in a comment
before_occurrence = line[: line.find("window.apiClient")]
if "//" not in before_occurrence:
self._add_violation(
rule_id="JS-002",
rule_name="Use lowercase apiClient",
severity=Severity.WARNING,
file_path=file_path,
line_number=i,
message="Use apiClient directly instead of window.apiClient",
context=line.strip(),
suggestion="Replace window.apiClient with apiClient",
)
def _validate_templates(self, target_path: Path):
"""Validate template patterns"""
print("📄 Validating templates...")
template_files = list(target_path.glob("app/templates/admin/**/*.html"))
self.result.files_checked += len(template_files)
# TPL-001 exclusion patterns
tpl_001_exclusions = [
"login.html", # Standalone login page
"errors/", # Error pages extend errors/base.html
"test-", # Test templates
]
for file_path in template_files:
file_path_str = str(file_path)
# Skip base template and partials
is_base_or_partial = "base.html" in file_path.name or "partials" in file_path_str
# Skip macros directory for FE rules
is_macro = "shared/macros/" in file_path_str or "shared\\macros\\" in file_path_str
# Skip components showcase page
is_components_page = "components.html" in file_path.name
content = file_path.read_text()
lines = content.split("\n")
# FE-001: Check for inline pagination (should use macro)
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_pagination_macro_usage(file_path, content, lines)
# FE-002: Check for inline SVGs (should use $icon())
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_icon_helper_usage(file_path, content, lines)
# FE-003: Check for inline loading/error states (should use alerts macro)
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_alerts_macro_usage(file_path, content, lines)
# FE-004: Check for inline modals (should use modals macro)
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_modals_macro_usage(file_path, content, lines)
# FE-005: Check for inline table wrappers (should use tables macro)
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_tables_macro_usage(file_path, content, lines)
# FE-006: Check for inline dropdowns (should use dropdowns macro)
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_dropdowns_macro_usage(file_path, content, lines)
# FE-007: Check for inline page headers (should use headers macro)
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_headers_macro_usage(file_path, content, lines)
# FE-008: Check for raw number inputs (should use number_stepper)
if not is_base_or_partial and not is_macro and not is_components_page:
self._check_number_stepper_macro_usage(file_path, content, lines)
# Skip base/partials for TPL-001 check
if is_base_or_partial:
continue
# Check exclusion patterns for TPL-001
skip = False
for exclusion in tpl_001_exclusions:
if exclusion in file_path_str:
skip = True
break
if skip:
continue
# Check for standalone marker in template (first 5 lines)
first_lines = "\n".join(lines[:5]).lower()
if "standalone" in first_lines or "noqa: tpl-001" in first_lines:
continue
# TPL-001: Check for extends
has_extends = any(
"{% extends" in line and "admin/base.html" in line for line in lines
)
if not has_extends:
self._add_violation(
rule_id="TPL-001",
rule_name="Templates must extend base",
severity=Severity.ERROR,
file_path=file_path,
line_number=1,
message="Admin template does not extend admin/base.html",
context=file_path.name,
suggestion="Add {% extends 'admin/base.html' %} at the top, or add {# standalone #} if intentional",
)
def _get_rule(self, rule_id: str) -> dict[str, Any]:
"""Get rule configuration by ID"""
# Look in different rule categories
for category in [
"api_endpoint_rules",
"service_layer_rules",
"model_rules",
"exception_rules",
"javascript_rules",
"template_rules",
"frontend_component_rules",
]:
rules = self.config.get(category, [])
for rule in rules:
if rule.get("id") == rule_id:
return rule
return None
def _should_ignore_file(self, file_path: Path) -> bool:
"""Check if file should be ignored"""
ignore_patterns = self.config.get("ignore", {}).get("files", [])
# Convert to string for easier matching
file_path_str = str(file_path)
for pattern in ignore_patterns:
# Check if any part of the path matches the pattern
if file_path.match(pattern):
return True
# Also check if pattern appears in the path (for .venv, venv, etc.)
if "/.venv/" in file_path_str or file_path_str.startswith(".venv/"):
return True
if "/venv/" in file_path_str or file_path_str.startswith("venv/"):
return True
return False
def _add_violation(
self,
rule_id: str,
rule_name: str,
severity: Severity,
file_path: Path,
line_number: int,
message: str,
context: str = "",
suggestion: str = "",
):
"""Add a violation to results"""
violation = Violation(
rule_id=rule_id,
rule_name=rule_name,
severity=severity,
file_path=file_path,
line_number=line_number,
message=message,
context=context,
suggestion=suggestion,
)
self.result.violations.append(violation)
def print_report(self):
"""Print validation report"""
print("\n" + "=" * 80)
print("📊 ARCHITECTURE VALIDATION REPORT")
print("=" * 80 + "\n")
print(f"Files checked: {self.result.files_checked}")
print(f"Total violations: {len(self.result.violations)}\n")
# Print file summary table if we have file results
if self.result.file_results:
self._print_summary_table()
# Group by severity
errors = [v for v in self.result.violations if v.severity == Severity.ERROR]
warnings = [v for v in self.result.violations if v.severity == Severity.WARNING]
if errors:
print(f"\n❌ ERRORS ({len(errors)}):")
print("-" * 80)
for violation in errors:
self._print_violation(violation)
if warnings:
print(f"\n⚠️ WARNINGS ({len(warnings)}):")
print("-" * 80)
for violation in warnings:
self._print_violation(violation)
# Summary
print("\n" + "=" * 80)
if self.result.has_errors():
print("❌ VALIDATION FAILED - Fix errors before committing")
print("=" * 80)
return 1
if self.result.has_warnings():
print("⚠️ VALIDATION PASSED WITH WARNINGS")
print("=" * 80)
return 0
print("✅ VALIDATION PASSED - No violations found")
print("=" * 80)
return 0
def _print_summary_table(self):
"""Print a summary table of file results"""
print("📋 FILE SUMMARY:")
print("-" * 80)
# Calculate column widths
max_path_len = max(
len(
str(
fr.file_path.relative_to(self.project_root)
if self.project_root in fr.file_path.parents
else fr.file_path
)
)
for fr in self.result.file_results
)
max_path_len = min(max_path_len, 55) # Cap at 55 chars
# Print header
print(f" {'File':<{max_path_len}} {'Status':<8} {'Errors':<7} {'Warnings':<8}")
print(f" {'-' * max_path_len} {'-' * 8} {'-' * 7} {'-' * 8}")
# Print each file
for fr in self.result.file_results:
rel_path = (
fr.file_path.relative_to(self.project_root)
if self.project_root in fr.file_path.parents
else fr.file_path
)
path_str = str(rel_path)
if len(path_str) > max_path_len:
path_str = "..." + path_str[-(max_path_len - 3) :]
print(
f" {path_str:<{max_path_len}} "
f"{fr.status_icon} {fr.status:<5} "
f"{fr.errors:<7} "
f"{fr.warnings:<8}"
)
print("-" * 80)
# Print totals
total_errors = sum(fr.errors for fr in self.result.file_results)
total_warnings = sum(fr.warnings for fr in self.result.file_results)
passed = sum(1 for fr in self.result.file_results if fr.passed)
failed = len(self.result.file_results) - passed
print(f"\n Total: {len(self.result.file_results)} files | "
f"{passed} passed | ❌ {failed} failed | "
f"{total_errors} errors | {total_warnings} warnings\n")
def print_json(self) -> int:
"""Print validation results as JSON"""
import json
violations_json = []
for v in self.result.violations:
rel_path = (
str(v.file_path.relative_to(self.project_root))
if self.project_root in v.file_path.parents
else str(v.file_path)
)
violations_json.append(
{
"rule_id": v.rule_id,
"rule_name": v.rule_name,
"severity": v.severity.value,
"file_path": rel_path,
"line_number": v.line_number,
"message": v.message,
"context": v.context or "",
"suggestion": v.suggestion or "",
}
)
output = {
"files_checked": self.result.files_checked,
"total_violations": len(self.result.violations),
"errors": len(
[v for v in self.result.violations if v.severity == Severity.ERROR]
),
"warnings": len(
[v for v in self.result.violations if v.severity == Severity.WARNING]
),
"violations": violations_json,
}
print(json.dumps(output, indent=2))
return 1 if self.result.has_errors() else 0
def _print_violation(self, v: Violation):
"""Print a single violation"""
rel_path = (
v.file_path.relative_to(self.project_root)
if self.project_root in v.file_path.parents
else v.file_path
)
print(f"\n [{v.rule_id}] {v.rule_name}")
print(f" File: {rel_path}:{v.line_number}")
print(f" Issue: {v.message}")
if v.context and self.verbose:
print(f" Context: {v.context}")
if v.suggestion:
print(f" 💡 Suggestion: {v.suggestion}")
def main():
"""Main entry point"""
parser = argparse.ArgumentParser(
description="Validate architecture patterns in codebase",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=__doc__,
)
# Target options (mutually exclusive)
target_group = parser.add_mutually_exclusive_group()
target_group.add_argument(
"-f",
"--file",
type=Path,
metavar="PATH",
help="Validate a single file (.py, .js, or .html)",
)
target_group.add_argument(
"-d",
"--folder",
type=Path,
metavar="PATH",
help="Validate all files in a directory (recursive)",
)
target_group.add_argument(
"-o",
"--object",
type=str,
metavar="NAME",
help="Validate all files related to an entity (e.g., company, vendor, order)",
)
parser.add_argument(
"-c",
"--config",
type=Path,
default=Path.cwd() / ".architecture-rules.yaml",
help="Path to architecture rules config (default: .architecture-rules.yaml)",
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Show detailed output including context",
)
parser.add_argument(
"--errors-only", action="store_true", help="Only show errors, suppress warnings"
)
parser.add_argument(
"--json",
action="store_true",
help="Output results as JSON (for programmatic use)",
)
args = parser.parse_args()
# Create validator
validator = ArchitectureValidator(args.config, verbose=args.verbose)
# Determine validation mode
if args.file:
# Validate single file
result = validator.validate_file(args.file)
elif args.folder:
# Validate directory
if not args.folder.is_dir():
print(f"❌ Not a directory: {args.folder}")
sys.exit(1)
result = validator.validate_all(args.folder)
elif args.object:
# Validate all files related to an entity
result = validator.validate_object(args.object)
else:
# Default: validate current directory
result = validator.validate_all(Path.cwd())
# Output results
if args.json:
exit_code = validator.print_json()
else:
exit_code = validator.print_report()
sys.exit(exit_code)
if __name__ == "__main__":
main()