- Update .architecture-rules.yaml to ignore venv/.venv directories - Improve _should_ignore_file() method to handle venv path exclusions - Add explicit checks for .venv/ and venv/ in file paths This prevents the architecture validator from scanning thousands of files in virtual environment directories, reducing validation time from scanning all dependency files to just project files. Before: Scanned venv files (thousands of violations in dependencies) After: Only scans project files (123 files checked) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
757 lines
27 KiB
Python
Executable File
757 lines
27 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Architecture Validator
|
|
======================
|
|
Validates code against architectural rules defined in .architecture-rules.yaml
|
|
|
|
This script checks that the codebase follows key architectural decisions:
|
|
- Separation of concerns (routes vs services)
|
|
- Proper exception handling (domain exceptions vs HTTPException)
|
|
- Correct use of Pydantic vs SQLAlchemy models
|
|
- Service layer patterns
|
|
- API endpoint patterns
|
|
|
|
Usage:
|
|
python scripts/validate_architecture.py # Check all files
|
|
python scripts/validate_architecture.py --fix # Auto-fix where possible
|
|
python scripts/validate_architecture.py --verbose # Detailed output
|
|
python scripts/validate_architecture.py app/api/ # Check specific directory
|
|
"""
|
|
|
|
import argparse
|
|
import ast
|
|
import re
|
|
import sys
|
|
from dataclasses import dataclass, field
|
|
from enum import Enum
|
|
from pathlib import Path
|
|
from typing import Any
|
|
|
|
import yaml
|
|
|
|
|
|
class Severity(Enum):
|
|
"""Validation severity levels"""
|
|
|
|
ERROR = "error"
|
|
WARNING = "warning"
|
|
INFO = "info"
|
|
|
|
|
|
@dataclass
|
|
class Violation:
|
|
"""Represents an architectural rule violation"""
|
|
|
|
rule_id: str
|
|
rule_name: str
|
|
severity: Severity
|
|
file_path: Path
|
|
line_number: int
|
|
message: str
|
|
context: str = ""
|
|
suggestion: str = ""
|
|
|
|
|
|
@dataclass
|
|
class ValidationResult:
|
|
"""Results of architecture validation"""
|
|
|
|
violations: list[Violation] = field(default_factory=list)
|
|
files_checked: int = 0
|
|
rules_applied: int = 0
|
|
|
|
def has_errors(self) -> bool:
|
|
"""Check if there are any error-level violations"""
|
|
return any(v.severity == Severity.ERROR for v in self.violations)
|
|
|
|
def has_warnings(self) -> bool:
|
|
"""Check if there are any warning-level violations"""
|
|
return any(v.severity == Severity.WARNING for v in self.violations)
|
|
|
|
|
|
class ArchitectureValidator:
|
|
"""Main validator class"""
|
|
|
|
def __init__(self, config_path: Path, verbose: bool = False):
|
|
"""Initialize validator with configuration"""
|
|
self.config_path = config_path
|
|
self.verbose = verbose
|
|
self.config = self._load_config()
|
|
self.result = ValidationResult()
|
|
self.project_root = Path.cwd()
|
|
|
|
def _load_config(self) -> dict[str, Any]:
|
|
"""Load validation rules from YAML config"""
|
|
if not self.config_path.exists():
|
|
print(f"❌ Configuration file not found: {self.config_path}")
|
|
sys.exit(1)
|
|
|
|
with open(self.config_path) as f:
|
|
config = yaml.safe_load(f)
|
|
|
|
print(f"📋 Loaded architecture rules: {config.get('project', 'unknown')}")
|
|
return config
|
|
|
|
def validate_all(self, target_path: Path = None) -> ValidationResult:
|
|
"""Validate all files or specific path"""
|
|
print("\n🔍 Starting architecture validation...\n")
|
|
|
|
target = target_path or self.project_root
|
|
|
|
# Validate API endpoints
|
|
self._validate_api_endpoints(target)
|
|
|
|
# Validate service layer
|
|
self._validate_service_layer(target)
|
|
|
|
# Validate models
|
|
self._validate_models(target)
|
|
|
|
# Validate exception handling
|
|
self._validate_exceptions(target)
|
|
|
|
# Validate JavaScript
|
|
self._validate_javascript(target)
|
|
|
|
# Validate templates
|
|
self._validate_templates(target)
|
|
|
|
return self.result
|
|
|
|
def _validate_api_endpoints(self, target_path: Path):
|
|
"""Validate API endpoint rules (API-001, API-002, API-003, API-004)"""
|
|
print("📡 Validating API endpoints...")
|
|
|
|
api_files = list(target_path.glob("app/api/v1/**/*.py"))
|
|
self.result.files_checked += len(api_files)
|
|
|
|
for file_path in api_files:
|
|
if self._should_ignore_file(file_path):
|
|
continue
|
|
|
|
content = file_path.read_text()
|
|
lines = content.split("\n")
|
|
|
|
# API-001: Check for Pydantic model usage
|
|
self._check_pydantic_usage(file_path, content, lines)
|
|
|
|
# API-002: Check for business logic in endpoints
|
|
self._check_no_business_logic_in_endpoints(file_path, content, lines)
|
|
|
|
# API-003: Check exception handling
|
|
self._check_endpoint_exception_handling(file_path, content, lines)
|
|
|
|
# API-004: Check authentication
|
|
self._check_endpoint_authentication(file_path, content, lines)
|
|
|
|
def _check_pydantic_usage(self, file_path: Path, content: str, lines: list[str]):
|
|
"""API-001: Ensure endpoints use Pydantic models"""
|
|
rule = self._get_rule("API-001")
|
|
if not rule:
|
|
return
|
|
|
|
# Check for response_model in route decorators
|
|
route_pattern = r"@router\.(get|post|put|delete|patch)"
|
|
dict_return_pattern = r"return\s+\{.*\}"
|
|
|
|
for i, line in enumerate(lines, 1):
|
|
# Check for dict returns in endpoints
|
|
if re.search(route_pattern, line):
|
|
# Look ahead for function body
|
|
func_start = i
|
|
indent = len(line) - len(line.lstrip())
|
|
|
|
# Find function body
|
|
for j in range(func_start, min(func_start + 20, len(lines))):
|
|
if j >= len(lines):
|
|
break
|
|
|
|
func_line = lines[j]
|
|
if re.search(dict_return_pattern, func_line):
|
|
self._add_violation(
|
|
rule_id="API-001",
|
|
rule_name=rule["name"],
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=j + 1,
|
|
message="Endpoint returns raw dict instead of Pydantic model",
|
|
context=func_line.strip(),
|
|
suggestion="Define a Pydantic response model and use response_model parameter",
|
|
)
|
|
|
|
def _check_no_business_logic_in_endpoints(
|
|
self, file_path: Path, content: str, lines: list[str]
|
|
):
|
|
"""API-002: Ensure no business logic in endpoints"""
|
|
rule = self._get_rule("API-002")
|
|
if not rule:
|
|
return
|
|
|
|
anti_patterns = [
|
|
(r"db\.add\(", "Database operations should be in service layer"),
|
|
(r"db\.commit\(\)", "Database commits should be in service layer"),
|
|
(r"db\.query\(", "Database queries should be in service layer"),
|
|
(r"db\.execute\(", "Database operations should be in service layer"),
|
|
]
|
|
|
|
for i, line in enumerate(lines, 1):
|
|
# Skip service method calls (allowed)
|
|
if "_service." in line or "service." in line:
|
|
continue
|
|
|
|
for pattern, message in anti_patterns:
|
|
if re.search(pattern, line):
|
|
self._add_violation(
|
|
rule_id="API-002",
|
|
rule_name=rule["name"],
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message=message,
|
|
context=line.strip(),
|
|
suggestion="Move database operations to service layer",
|
|
)
|
|
|
|
def _check_endpoint_exception_handling(
|
|
self, file_path: Path, content: str, lines: list[str]
|
|
):
|
|
"""API-003: Check proper exception handling in endpoints"""
|
|
rule = self._get_rule("API-003")
|
|
if not rule:
|
|
return
|
|
|
|
# Parse file to check for try/except in route handlers
|
|
try:
|
|
tree = ast.parse(content)
|
|
except SyntaxError:
|
|
return
|
|
|
|
for node in ast.walk(tree):
|
|
if isinstance(node, ast.FunctionDef):
|
|
# Check if it's a route handler
|
|
has_router_decorator = any(
|
|
isinstance(d, ast.Call)
|
|
and isinstance(d.func, ast.Attribute)
|
|
and getattr(d.func.value, "id", None) == "router"
|
|
for d in node.decorator_list
|
|
)
|
|
|
|
if has_router_decorator:
|
|
# Check if function body has try/except
|
|
has_try_except = any(
|
|
isinstance(child, ast.Try) for child in ast.walk(node)
|
|
)
|
|
|
|
# Check if function calls service methods
|
|
has_service_call = any(
|
|
isinstance(child, ast.Call)
|
|
and isinstance(child.func, ast.Attribute)
|
|
and "service" in getattr(child.func.value, "id", "").lower()
|
|
for child in ast.walk(node)
|
|
)
|
|
|
|
if has_service_call and not has_try_except:
|
|
self._add_violation(
|
|
rule_id="API-003",
|
|
rule_name=rule["name"],
|
|
severity=Severity.WARNING,
|
|
file_path=file_path,
|
|
line_number=node.lineno,
|
|
message=f"Endpoint '{node.name}' calls service but lacks exception handling",
|
|
context=f"def {node.name}(...)",
|
|
suggestion="Wrap service calls in try/except and convert to HTTPException",
|
|
)
|
|
|
|
def _check_endpoint_authentication(
|
|
self, file_path: Path, content: str, lines: list[str]
|
|
):
|
|
"""API-004: Check authentication on endpoints"""
|
|
rule = self._get_rule("API-004")
|
|
if not rule:
|
|
return
|
|
|
|
# This is a warning-level check
|
|
# Look for endpoints without Depends(get_current_*)
|
|
for i, line in enumerate(lines, 1):
|
|
if "@router." in line and (
|
|
"post" in line or "put" in line or "delete" in line
|
|
):
|
|
# Check next 5 lines for auth
|
|
has_auth = False
|
|
for j in range(i, min(i + 5, len(lines))):
|
|
if "Depends(get_current_" in lines[j]:
|
|
has_auth = True
|
|
break
|
|
|
|
if not has_auth and "include_in_schema=False" not in " ".join(
|
|
lines[i : i + 5]
|
|
):
|
|
self._add_violation(
|
|
rule_id="API-004",
|
|
rule_name=rule["name"],
|
|
severity=Severity.WARNING,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Endpoint may be missing authentication",
|
|
context=line.strip(),
|
|
suggestion="Add Depends(get_current_user) or similar if endpoint should be protected",
|
|
)
|
|
|
|
def _validate_service_layer(self, target_path: Path):
|
|
"""Validate service layer rules (SVC-001, SVC-002, SVC-003, SVC-004)"""
|
|
print("🔧 Validating service layer...")
|
|
|
|
service_files = list(target_path.glob("app/services/**/*.py"))
|
|
self.result.files_checked += len(service_files)
|
|
|
|
for file_path in service_files:
|
|
if self._should_ignore_file(file_path):
|
|
continue
|
|
|
|
content = file_path.read_text()
|
|
lines = content.split("\n")
|
|
|
|
# SVC-001: No HTTPException in services
|
|
self._check_no_http_exception_in_services(file_path, content, lines)
|
|
|
|
# SVC-002: Proper exception handling
|
|
self._check_service_exceptions(file_path, content, lines)
|
|
|
|
# SVC-003: DB session as parameter
|
|
self._check_db_session_parameter(file_path, content, lines)
|
|
|
|
def _check_no_http_exception_in_services(
|
|
self, file_path: Path, content: str, lines: list[str]
|
|
):
|
|
"""SVC-001: Services must not raise HTTPException"""
|
|
rule = self._get_rule("SVC-001")
|
|
if not rule:
|
|
return
|
|
|
|
for i, line in enumerate(lines, 1):
|
|
if "raise HTTPException" in line:
|
|
self._add_violation(
|
|
rule_id="SVC-001",
|
|
rule_name=rule["name"],
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Service raises HTTPException - use domain exceptions instead",
|
|
context=line.strip(),
|
|
suggestion="Create custom exception class (e.g., VendorNotFoundError) and raise that",
|
|
)
|
|
|
|
if (
|
|
"from fastapi import HTTPException" in line
|
|
or "from fastapi.exceptions import HTTPException" in line
|
|
):
|
|
self._add_violation(
|
|
rule_id="SVC-001",
|
|
rule_name=rule["name"],
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Service imports HTTPException - services should not know about HTTP",
|
|
context=line.strip(),
|
|
suggestion="Remove HTTPException import and use domain exceptions",
|
|
)
|
|
|
|
def _check_service_exceptions(
|
|
self, file_path: Path, content: str, lines: list[str]
|
|
):
|
|
"""SVC-002: Check for proper exception handling"""
|
|
rule = self._get_rule("SVC-002")
|
|
if not rule:
|
|
return
|
|
|
|
for i, line in enumerate(lines, 1):
|
|
# Check for generic Exception raises
|
|
if re.match(r"\s*raise Exception\(", line):
|
|
self._add_violation(
|
|
rule_id="SVC-002",
|
|
rule_name=rule["name"],
|
|
severity=Severity.WARNING,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Service raises generic Exception - use specific domain exception",
|
|
context=line.strip(),
|
|
suggestion="Create custom exception class for this error case",
|
|
)
|
|
|
|
def _check_db_session_parameter(
|
|
self, file_path: Path, content: str, lines: list[str]
|
|
):
|
|
"""SVC-003: Service methods should accept db session as parameter"""
|
|
rule = self._get_rule("SVC-003")
|
|
if not rule:
|
|
return
|
|
|
|
# Check for SessionLocal() creation in service files
|
|
for i, line in enumerate(lines, 1):
|
|
if "SessionLocal()" in line and "class" not in line:
|
|
self._add_violation(
|
|
rule_id="SVC-003",
|
|
rule_name=rule["name"],
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Service creates database session internally",
|
|
context=line.strip(),
|
|
suggestion="Accept db: Session as method parameter instead",
|
|
)
|
|
|
|
def _validate_models(self, target_path: Path):
|
|
"""Validate model rules"""
|
|
print("📦 Validating models...")
|
|
|
|
model_files = list(target_path.glob("app/models/**/*.py"))
|
|
self.result.files_checked += len(model_files)
|
|
|
|
# Basic validation - can be extended
|
|
for file_path in model_files:
|
|
if self._should_ignore_file(file_path):
|
|
continue
|
|
|
|
content = file_path.read_text()
|
|
lines = content.split("\n")
|
|
|
|
# Check for mixing SQLAlchemy and Pydantic
|
|
for i, line in enumerate(lines, 1):
|
|
if re.search(r"class.*\(Base.*,.*BaseModel.*\)", line):
|
|
self._add_violation(
|
|
rule_id="MDL-002",
|
|
rule_name="Separate SQLAlchemy and Pydantic models",
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Model mixes SQLAlchemy Base and Pydantic BaseModel",
|
|
context=line.strip(),
|
|
suggestion="Keep SQLAlchemy models and Pydantic models separate",
|
|
)
|
|
|
|
def _validate_exceptions(self, target_path: Path):
|
|
"""Validate exception handling patterns"""
|
|
print("⚠️ Validating exception handling...")
|
|
|
|
py_files = list(target_path.glob("**/*.py"))
|
|
|
|
for file_path in py_files:
|
|
if self._should_ignore_file(file_path):
|
|
continue
|
|
|
|
content = file_path.read_text()
|
|
lines = content.split("\n")
|
|
|
|
# EXC-002: Check for bare except
|
|
for i, line in enumerate(lines, 1):
|
|
if re.match(r"\s*except\s*:", line):
|
|
self._add_violation(
|
|
rule_id="EXC-002",
|
|
rule_name="No bare except clauses",
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Bare except clause catches all exceptions including system exits",
|
|
context=line.strip(),
|
|
suggestion="Specify exception type: except ValueError: or except Exception:",
|
|
)
|
|
|
|
def _validate_javascript(self, target_path: Path):
|
|
"""Validate JavaScript patterns"""
|
|
print("🟨 Validating JavaScript...")
|
|
|
|
js_files = list(target_path.glob("static/admin/js/**/*.js"))
|
|
self.result.files_checked += len(js_files)
|
|
|
|
for file_path in js_files:
|
|
content = file_path.read_text()
|
|
lines = content.split("\n")
|
|
|
|
# JS-001: Check for window.apiClient
|
|
for i, line in enumerate(lines, 1):
|
|
if (
|
|
"window.apiClient" in line
|
|
and "//" not in line[: line.find("window.apiClient")]
|
|
if "window.apiClient" in line
|
|
else True
|
|
):
|
|
self._add_violation(
|
|
rule_id="JS-001",
|
|
rule_name="Use apiClient directly",
|
|
severity=Severity.WARNING,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Use apiClient directly instead of window.apiClient",
|
|
context=line.strip(),
|
|
suggestion="Replace window.apiClient with apiClient",
|
|
)
|
|
|
|
# JS-002: Check for console usage
|
|
for i, line in enumerate(lines, 1):
|
|
if re.search(r"console\.(log|warn|error)", line):
|
|
# Skip if it's a comment or bootstrap message
|
|
if "//" in line or "✅" in line or "eslint-disable" in line:
|
|
continue
|
|
|
|
self._add_violation(
|
|
rule_id="JS-002",
|
|
rule_name="Use centralized logger",
|
|
severity=Severity.WARNING,
|
|
file_path=file_path,
|
|
line_number=i,
|
|
message="Use centralized logger instead of console",
|
|
context=line.strip()[:80],
|
|
suggestion="Use window.LogConfig.createLogger('moduleName')",
|
|
)
|
|
|
|
def _validate_templates(self, target_path: Path):
|
|
"""Validate template patterns"""
|
|
print("📄 Validating templates...")
|
|
|
|
template_files = list(target_path.glob("app/templates/admin/**/*.html"))
|
|
self.result.files_checked += len(template_files)
|
|
|
|
for file_path in template_files:
|
|
# Skip base template and partials
|
|
if "base.html" in file_path.name or "partials" in str(file_path):
|
|
continue
|
|
|
|
content = file_path.read_text()
|
|
lines = content.split("\n")
|
|
|
|
# TPL-001: Check for extends
|
|
has_extends = any(
|
|
"{% extends" in line and "admin/base.html" in line for line in lines
|
|
)
|
|
|
|
if not has_extends:
|
|
self._add_violation(
|
|
rule_id="TPL-001",
|
|
rule_name="Templates must extend base",
|
|
severity=Severity.ERROR,
|
|
file_path=file_path,
|
|
line_number=1,
|
|
message="Admin template does not extend admin/base.html",
|
|
context=file_path.name,
|
|
suggestion="Add {% extends 'admin/base.html' %} at the top",
|
|
)
|
|
|
|
def _get_rule(self, rule_id: str) -> dict[str, Any]:
|
|
"""Get rule configuration by ID"""
|
|
# Look in different rule categories
|
|
for category in [
|
|
"api_endpoint_rules",
|
|
"service_layer_rules",
|
|
"model_rules",
|
|
"exception_rules",
|
|
"javascript_rules",
|
|
"template_rules",
|
|
]:
|
|
rules = self.config.get(category, [])
|
|
for rule in rules:
|
|
if rule.get("id") == rule_id:
|
|
return rule
|
|
return None
|
|
|
|
def _should_ignore_file(self, file_path: Path) -> bool:
|
|
"""Check if file should be ignored"""
|
|
ignore_patterns = self.config.get("ignore", {}).get("files", [])
|
|
|
|
# Convert to string for easier matching
|
|
file_path_str = str(file_path)
|
|
|
|
for pattern in ignore_patterns:
|
|
# Check if any part of the path matches the pattern
|
|
if file_path.match(pattern):
|
|
return True
|
|
# Also check if pattern appears in the path (for .venv, venv, etc.)
|
|
if "/.venv/" in file_path_str or file_path_str.startswith(".venv/"):
|
|
return True
|
|
if "/venv/" in file_path_str or file_path_str.startswith("venv/"):
|
|
return True
|
|
|
|
return False
|
|
|
|
def _add_violation(
|
|
self,
|
|
rule_id: str,
|
|
rule_name: str,
|
|
severity: Severity,
|
|
file_path: Path,
|
|
line_number: int,
|
|
message: str,
|
|
context: str = "",
|
|
suggestion: str = "",
|
|
):
|
|
"""Add a violation to results"""
|
|
violation = Violation(
|
|
rule_id=rule_id,
|
|
rule_name=rule_name,
|
|
severity=severity,
|
|
file_path=file_path,
|
|
line_number=line_number,
|
|
message=message,
|
|
context=context,
|
|
suggestion=suggestion,
|
|
)
|
|
self.result.violations.append(violation)
|
|
|
|
def print_report(self):
|
|
"""Print validation report"""
|
|
print("\n" + "=" * 80)
|
|
print("📊 ARCHITECTURE VALIDATION REPORT")
|
|
print("=" * 80 + "\n")
|
|
|
|
print(f"Files checked: {self.result.files_checked}")
|
|
print(f"Total violations: {len(self.result.violations)}\n")
|
|
|
|
# Group by severity
|
|
errors = [v for v in self.result.violations if v.severity == Severity.ERROR]
|
|
warnings = [v for v in self.result.violations if v.severity == Severity.WARNING]
|
|
|
|
if errors:
|
|
print(f"\n❌ ERRORS ({len(errors)}):")
|
|
print("-" * 80)
|
|
for violation in errors:
|
|
self._print_violation(violation)
|
|
|
|
if warnings:
|
|
print(f"\n⚠️ WARNINGS ({len(warnings)}):")
|
|
print("-" * 80)
|
|
for violation in warnings:
|
|
self._print_violation(violation)
|
|
|
|
# Summary
|
|
print("\n" + "=" * 80)
|
|
if self.result.has_errors():
|
|
print("❌ VALIDATION FAILED - Fix errors before committing")
|
|
print("=" * 80)
|
|
return 1
|
|
if self.result.has_warnings():
|
|
print("⚠️ VALIDATION PASSED WITH WARNINGS")
|
|
print("=" * 80)
|
|
return 0
|
|
print("✅ VALIDATION PASSED - No violations found")
|
|
print("=" * 80)
|
|
return 0
|
|
|
|
def print_json(self) -> int:
|
|
"""Print validation results as JSON"""
|
|
import json
|
|
|
|
violations_json = []
|
|
for v in self.result.violations:
|
|
rel_path = (
|
|
str(v.file_path.relative_to(self.project_root))
|
|
if self.project_root in v.file_path.parents
|
|
else str(v.file_path)
|
|
)
|
|
violations_json.append(
|
|
{
|
|
"rule_id": v.rule_id,
|
|
"rule_name": v.rule_name,
|
|
"severity": v.severity.value,
|
|
"file_path": rel_path,
|
|
"line_number": v.line_number,
|
|
"message": v.message,
|
|
"context": v.context or "",
|
|
"suggestion": v.suggestion or "",
|
|
}
|
|
)
|
|
|
|
output = {
|
|
"files_checked": self.result.files_checked,
|
|
"total_violations": len(self.result.violations),
|
|
"errors": len(
|
|
[v for v in self.result.violations if v.severity == Severity.ERROR]
|
|
),
|
|
"warnings": len(
|
|
[v for v in self.result.violations if v.severity == Severity.WARNING]
|
|
),
|
|
"violations": violations_json,
|
|
}
|
|
|
|
print(json.dumps(output, indent=2))
|
|
|
|
return 1 if self.result.has_errors() else 0
|
|
|
|
def _print_violation(self, v: Violation):
|
|
"""Print a single violation"""
|
|
rel_path = (
|
|
v.file_path.relative_to(self.project_root)
|
|
if self.project_root in v.file_path.parents
|
|
else v.file_path
|
|
)
|
|
|
|
print(f"\n [{v.rule_id}] {v.rule_name}")
|
|
print(f" File: {rel_path}:{v.line_number}")
|
|
print(f" Issue: {v.message}")
|
|
|
|
if v.context and self.verbose:
|
|
print(f" Context: {v.context}")
|
|
|
|
if v.suggestion:
|
|
print(f" 💡 Suggestion: {v.suggestion}")
|
|
|
|
|
|
def main():
|
|
"""Main entry point"""
|
|
parser = argparse.ArgumentParser(
|
|
description="Validate architecture patterns in codebase",
|
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
epilog=__doc__,
|
|
)
|
|
|
|
parser.add_argument(
|
|
"path",
|
|
nargs="?",
|
|
type=Path,
|
|
default=Path.cwd(),
|
|
help="Path to validate (default: current directory)",
|
|
)
|
|
|
|
parser.add_argument(
|
|
"-c",
|
|
"--config",
|
|
type=Path,
|
|
default=Path.cwd() / ".architecture-rules.yaml",
|
|
help="Path to architecture rules config (default: .architecture-rules.yaml)",
|
|
)
|
|
|
|
parser.add_argument(
|
|
"-v",
|
|
"--verbose",
|
|
action="store_true",
|
|
help="Show detailed output including context",
|
|
)
|
|
|
|
parser.add_argument(
|
|
"--errors-only", action="store_true", help="Only show errors, suppress warnings"
|
|
)
|
|
|
|
parser.add_argument(
|
|
"--json",
|
|
action="store_true",
|
|
help="Output results as JSON (for programmatic use)",
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
# Create validator
|
|
validator = ArchitectureValidator(args.config, verbose=args.verbose)
|
|
|
|
# Run validation
|
|
result = validator.validate_all(args.path)
|
|
|
|
# Output results
|
|
if args.json:
|
|
exit_code = validator.print_json()
|
|
else:
|
|
exit_code = validator.print_report()
|
|
|
|
sys.exit(exit_code)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|