Vendor API endpoints use JWT authentication, not URL path parameters. The vendorCode should only be used for page URLs (navigation), not API calls. Fixed API paths in 10 vendor JS files: - analytics.js, customers.js, inventory.js, notifications.js - order-detail.js, orders.js, products.js, profile.js - settings.js, team.js Added architecture rule JS-014 to prevent this pattern from recurring. Added validation check _check_vendor_api_paths to validate_architecture.py. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
3915 lines
162 KiB
Python
Executable File
3915 lines
162 KiB
Python
Executable File
#!/usr/bin/env python3
|
||
"""
|
||
Architecture Validator
|
||
======================
|
||
Validates code against architectural rules defined in .architecture-rules.yaml
|
||
|
||
This script checks that the codebase follows key architectural decisions:
|
||
- Separation of concerns (routes vs services)
|
||
- Proper exception handling (domain exceptions vs HTTPException)
|
||
- Correct use of Pydantic vs SQLAlchemy models
|
||
- Service layer patterns
|
||
- API endpoint patterns
|
||
|
||
Usage:
|
||
python scripts/validate_architecture.py # Check all files in current directory
|
||
python scripts/validate_architecture.py -d app/api/ # Check specific directory
|
||
python scripts/validate_architecture.py -f app/api/v1/vendors.py # Check single file
|
||
python scripts/validate_architecture.py -o company # Check all company-related files
|
||
python scripts/validate_architecture.py -o vendor --verbose # Check vendor files with details
|
||
python scripts/validate_architecture.py --json # JSON output
|
||
|
||
Options:
|
||
-f, --file PATH Validate a single file (.py, .js, or .html)
|
||
-d, --folder PATH Validate all files in a directory (recursive)
|
||
-o, --object NAME Validate all files related to an entity (e.g., company, vendor, order)
|
||
-c, --config PATH Path to architecture rules config
|
||
-v, --verbose Show detailed output including context
|
||
--errors-only Only show errors, suppress warnings
|
||
--json Output results as JSON
|
||
"""
|
||
|
||
import argparse
|
||
import re
|
||
import sys
|
||
from dataclasses import dataclass, field
|
||
from enum import Enum
|
||
from pathlib import Path
|
||
from typing import Any
|
||
|
||
import yaml
|
||
|
||
|
||
class Severity(Enum):
|
||
"""Validation severity levels"""
|
||
|
||
ERROR = "error"
|
||
WARNING = "warning"
|
||
INFO = "info"
|
||
|
||
|
||
@dataclass
|
||
class Violation:
|
||
"""Represents an architectural rule violation"""
|
||
|
||
rule_id: str
|
||
rule_name: str
|
||
severity: Severity
|
||
file_path: Path
|
||
line_number: int
|
||
message: str
|
||
context: str = ""
|
||
suggestion: str = ""
|
||
|
||
|
||
@dataclass
|
||
class FileResult:
|
||
"""Results for a single file validation"""
|
||
|
||
file_path: Path
|
||
errors: int = 0
|
||
warnings: int = 0
|
||
|
||
@property
|
||
def passed(self) -> bool:
|
||
return self.errors == 0
|
||
|
||
@property
|
||
def status(self) -> str:
|
||
if self.errors > 0:
|
||
return "FAILED"
|
||
if self.warnings > 0:
|
||
return "PASSED*"
|
||
return "PASSED"
|
||
|
||
@property
|
||
def status_icon(self) -> str:
|
||
if self.errors > 0:
|
||
return "❌"
|
||
if self.warnings > 0:
|
||
return "⚠️"
|
||
return "✅"
|
||
|
||
|
||
@dataclass
|
||
class ValidationResult:
|
||
"""Results of architecture validation"""
|
||
|
||
violations: list[Violation] = field(default_factory=list)
|
||
files_checked: int = 0
|
||
rules_applied: int = 0
|
||
file_results: list[FileResult] = field(default_factory=list)
|
||
|
||
def has_errors(self) -> bool:
|
||
"""Check if there are any error-level violations"""
|
||
return any(v.severity == Severity.ERROR for v in self.violations)
|
||
|
||
def has_warnings(self) -> bool:
|
||
"""Check if there are any warning-level violations"""
|
||
return any(v.severity == Severity.WARNING for v in self.violations)
|
||
|
||
|
||
class ArchitectureValidator:
|
||
"""Main validator class"""
|
||
|
||
def __init__(self, config_path: Path, verbose: bool = False):
|
||
"""Initialize validator with configuration"""
|
||
self.config_path = config_path
|
||
self.verbose = verbose
|
||
self.config = self._load_config()
|
||
self.result = ValidationResult()
|
||
self.project_root = Path.cwd()
|
||
|
||
def _load_config(self) -> dict[str, Any]:
|
||
"""
|
||
Load validation rules from YAML config.
|
||
|
||
Supports two modes:
|
||
1. Split directory mode: .architecture-rules/ directory with multiple YAML files
|
||
2. Single file mode: .architecture-rules.yaml (legacy)
|
||
|
||
The split directory mode takes precedence if it exists.
|
||
"""
|
||
# Check for split directory mode first
|
||
rules_dir = self.config_path.parent / ".architecture-rules"
|
||
if rules_dir.is_dir():
|
||
return self._load_config_from_directory(rules_dir)
|
||
|
||
# Fall back to single file mode
|
||
if not self.config_path.exists():
|
||
print(f"❌ Configuration file not found: {self.config_path}")
|
||
print(f" (Also checked for directory: {rules_dir})")
|
||
sys.exit(1)
|
||
|
||
with open(self.config_path) as f:
|
||
config = yaml.safe_load(f)
|
||
|
||
print(f"📋 Loaded architecture rules: {config.get('project', 'unknown')}")
|
||
return config
|
||
|
||
def _load_config_from_directory(self, rules_dir: Path) -> dict[str, Any]:
|
||
"""
|
||
Load and merge configuration from split YAML files in a directory.
|
||
|
||
Reads _main.yaml first for base config, then merges all other YAML files.
|
||
"""
|
||
config: dict[str, Any] = {}
|
||
|
||
# Load _main.yaml first (contains project info, principles, ignore patterns)
|
||
main_file = rules_dir / "_main.yaml"
|
||
if main_file.exists():
|
||
with open(main_file) as f:
|
||
config = yaml.safe_load(f) or {}
|
||
|
||
# Load all other YAML files and merge their contents
|
||
yaml_files = sorted(rules_dir.glob("*.yaml"))
|
||
for yaml_file in yaml_files:
|
||
if yaml_file.name == "_main.yaml":
|
||
continue # Already loaded
|
||
|
||
with open(yaml_file) as f:
|
||
file_config = yaml.safe_load(f) or {}
|
||
|
||
# Merge rule sections from this file into main config
|
||
for key, value in file_config.items():
|
||
if key.endswith("_rules") and isinstance(value, list):
|
||
# Merge rule lists
|
||
if key not in config:
|
||
config[key] = []
|
||
config[key].extend(value)
|
||
elif key not in config:
|
||
# Add new top-level keys
|
||
config[key] = value
|
||
|
||
print(f"📋 Loaded architecture rules: {config.get('project', 'unknown')}")
|
||
print(f" (from {len(yaml_files)} files in {rules_dir.name}/)")
|
||
return config
|
||
|
||
def validate_all(self, target_path: Path = None) -> ValidationResult:
|
||
"""Validate all files in a directory"""
|
||
print("\n🔍 Starting architecture validation...\n")
|
||
|
||
target = target_path or self.project_root
|
||
|
||
# Validate API endpoints
|
||
self._validate_api_endpoints(target)
|
||
|
||
# Validate service layer
|
||
self._validate_service_layer(target)
|
||
|
||
# Validate models
|
||
self._validate_models(target)
|
||
|
||
# Validate exception handling
|
||
self._validate_exceptions(target)
|
||
|
||
# Validate naming conventions
|
||
self._validate_naming_conventions(target)
|
||
|
||
# Validate auth patterns
|
||
self._validate_auth_patterns(target)
|
||
|
||
# Validate middleware
|
||
self._validate_middleware(target)
|
||
|
||
# Validate JavaScript
|
||
self._validate_javascript(target)
|
||
|
||
# Validate templates
|
||
self._validate_templates(target)
|
||
|
||
# Validate language/i18n rules
|
||
self._validate_language_rules(target)
|
||
|
||
return self.result
|
||
|
||
def validate_file(self, file_path: Path, quiet: bool = False) -> ValidationResult:
|
||
"""Validate a single file"""
|
||
if not file_path.exists():
|
||
if not quiet:
|
||
print(f"❌ File not found: {file_path}")
|
||
return self.result
|
||
|
||
if not file_path.is_file():
|
||
if not quiet:
|
||
print(f"❌ Not a file: {file_path}")
|
||
return self.result
|
||
|
||
if not quiet:
|
||
print(f"\n🔍 Validating single file: {file_path}\n")
|
||
|
||
# Resolve file path to absolute
|
||
file_path = file_path.resolve()
|
||
file_path_str = str(file_path)
|
||
|
||
if self._should_ignore_file(file_path):
|
||
if not quiet:
|
||
print("⏭️ File is in ignore list, skipping")
|
||
return self.result
|
||
|
||
self.result.files_checked += 1
|
||
|
||
# Track violations before this file
|
||
violations_before = len(self.result.violations)
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# Determine file type and run appropriate validators
|
||
if file_path.suffix == ".py":
|
||
self._validate_python_file(file_path, content, lines, file_path_str)
|
||
elif file_path.suffix == ".js":
|
||
self._validate_js_file(file_path, content, lines)
|
||
elif file_path.suffix == ".html":
|
||
self._validate_html_file(file_path, content, lines)
|
||
else:
|
||
if not quiet:
|
||
print(f"⚠️ Unsupported file type: {file_path.suffix}")
|
||
|
||
# Calculate violations for this file
|
||
file_violations = self.result.violations[violations_before:]
|
||
errors = sum(1 for v in file_violations if v.severity == Severity.ERROR)
|
||
warnings = sum(1 for v in file_violations if v.severity == Severity.WARNING)
|
||
|
||
# Track file result
|
||
self.result.file_results.append(
|
||
FileResult(file_path=file_path, errors=errors, warnings=warnings)
|
||
)
|
||
|
||
return self.result
|
||
|
||
def validate_object(self, object_name: str) -> ValidationResult:
|
||
"""Validate all files related to an entity (e.g., company, vendor, order)"""
|
||
print(f"\n🔍 Searching for '{object_name}'-related files...\n")
|
||
|
||
# Generate name variants (singular/plural forms)
|
||
name = object_name.lower()
|
||
variants = {name}
|
||
|
||
# Handle common plural patterns
|
||
if name.endswith("ies"):
|
||
# companies -> company
|
||
variants.add(name[:-3] + "y")
|
||
elif name.endswith("s"):
|
||
# vendors -> vendor
|
||
variants.add(name[:-1])
|
||
else:
|
||
# company -> companies, vendor -> vendors
|
||
if name.endswith("y"):
|
||
variants.add(name[:-1] + "ies")
|
||
variants.add(name + "s")
|
||
|
||
# Search patterns for different file types
|
||
patterns = []
|
||
for variant in variants:
|
||
patterns.extend(
|
||
[
|
||
f"app/api/**/*{variant}*.py",
|
||
f"app/services/*{variant}*.py",
|
||
f"app/exceptions/*{variant}*.py",
|
||
f"models/database/*{variant}*.py",
|
||
f"models/schema/*{variant}*.py",
|
||
f"static/admin/js/*{variant}*.js",
|
||
f"app/templates/admin/*{variant}*.html",
|
||
]
|
||
)
|
||
|
||
# Find all matching files
|
||
found_files: set[Path] = set()
|
||
for pattern in patterns:
|
||
matches = list(self.project_root.glob(pattern))
|
||
for match in matches:
|
||
if match.is_file() and not self._should_ignore_file(match):
|
||
found_files.add(match)
|
||
|
||
if not found_files:
|
||
print(f"❌ No files found matching '{object_name}'")
|
||
return self.result
|
||
|
||
# Sort files by type for better readability
|
||
sorted_files = sorted(found_files, key=lambda p: (p.suffix, str(p)))
|
||
|
||
print(f"📁 Found {len(sorted_files)} files:\n")
|
||
for f in sorted_files:
|
||
rel_path = f.relative_to(self.project_root)
|
||
print(f" • {rel_path}")
|
||
|
||
print("\n" + "-" * 60 + "\n")
|
||
|
||
# Validate each file
|
||
for file_path in sorted_files:
|
||
rel_path = file_path.relative_to(self.project_root)
|
||
print(f"📄 {rel_path}")
|
||
self.validate_file(file_path, quiet=True)
|
||
|
||
return self.result
|
||
|
||
def _validate_python_file(
|
||
self, file_path: Path, content: str, lines: list[str], file_path_str: str
|
||
):
|
||
"""Validate a single Python file based on its location"""
|
||
# API endpoints
|
||
if "/app/api/" in file_path_str or "\\app\\api\\" in file_path_str:
|
||
print("📡 Validating as API endpoint...")
|
||
self._check_pydantic_usage(file_path, content, lines)
|
||
self._check_no_business_logic_in_endpoints(file_path, content, lines)
|
||
self._check_endpoint_exception_handling(file_path, content, lines)
|
||
self._check_endpoint_authentication(file_path, content, lines)
|
||
|
||
# Service layer
|
||
elif "/app/services/" in file_path_str or "\\app\\services\\" in file_path_str:
|
||
print("🔧 Validating as service layer...")
|
||
self._check_no_http_exception_in_services(file_path, content, lines)
|
||
self._check_service_exceptions(file_path, content, lines)
|
||
self._check_db_session_parameter(file_path, content, lines)
|
||
self._check_no_commit_in_services(file_path, content, lines)
|
||
|
||
# Models
|
||
elif "/app/models/" in file_path_str or "\\app\\models\\" in file_path_str:
|
||
print("📦 Validating as model...")
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"class.*\(Base.*,.*BaseModel.*\)", line):
|
||
self._add_violation(
|
||
rule_id="MDL-002",
|
||
rule_name="Separate SQLAlchemy and Pydantic models",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Model mixes SQLAlchemy Base and Pydantic BaseModel",
|
||
context=line.strip(),
|
||
suggestion="Keep SQLAlchemy models and Pydantic models separate",
|
||
)
|
||
|
||
# Generic Python file - check exception handling
|
||
print("⚠️ Validating exception handling...")
|
||
for i, line in enumerate(lines, 1):
|
||
if re.match(r"\s*except\s*:", line):
|
||
self._add_violation(
|
||
rule_id="EXC-002",
|
||
rule_name="No bare except clauses",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Bare except clause catches all exceptions including system exits",
|
||
context=line.strip(),
|
||
suggestion="Specify exception type: except ValueError: or except Exception:",
|
||
)
|
||
|
||
def _validate_js_file(self, file_path: Path, content: str, lines: list[str]):
|
||
"""Validate a single JavaScript file"""
|
||
print("🟨 Validating JavaScript...")
|
||
|
||
# JS-001: Check for console usage (must use centralized logger)
|
||
# Skip init-*.js files - they run before logger is available
|
||
if not file_path.name.startswith("init-"):
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"console\.(log|warn|error)", line):
|
||
if "//" in line or "✅" in line or "eslint-disable" in line:
|
||
continue
|
||
self._add_violation(
|
||
rule_id="JS-001",
|
||
rule_name="Use centralized logger",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use centralized logger instead of console",
|
||
context=line.strip()[:80],
|
||
suggestion="Use window.LogConfig.createLogger('moduleName')",
|
||
)
|
||
|
||
# JS-002: Check for window.apiClient (must use lowercase apiClient)
|
||
for i, line in enumerate(lines, 1):
|
||
if "window.apiClient" in line:
|
||
before_occurrence = line[: line.find("window.apiClient")]
|
||
if "//" not in before_occurrence:
|
||
self._add_violation(
|
||
rule_id="JS-002",
|
||
rule_name="Use lowercase apiClient",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use apiClient directly instead of window.apiClient",
|
||
context=line.strip(),
|
||
suggestion="Replace window.apiClient with apiClient",
|
||
)
|
||
|
||
# JS-003: Check Alpine components spread ...data() for base layout inheritance
|
||
# Only check files that define Alpine component functions (function adminXxx() { return {...} })
|
||
self._check_alpine_data_spread(file_path, content, lines)
|
||
|
||
# JS-004: Check Alpine components set currentPage
|
||
self._check_alpine_current_page(file_path, content, lines)
|
||
|
||
# JS-005: Check initialization guard
|
||
self._check_init_guard(file_path, content, lines)
|
||
|
||
# JS-006: Check async error handling
|
||
self._check_async_error_handling(file_path, content, lines)
|
||
|
||
# JS-007: Check loading state management
|
||
self._check_loading_state(file_path, content, lines)
|
||
|
||
# JS-008: Check for raw fetch() calls instead of apiClient
|
||
self._check_fetch_vs_api_client(file_path, content, lines)
|
||
|
||
# JS-009: Check for alert() or window.showToast instead of Utils.showToast()
|
||
self._check_toast_usage(file_path, content, lines)
|
||
|
||
def _check_toast_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""JS-009: Check for alert() or window.showToast instead of Utils.showToast()"""
|
||
# Skip utils.js (where showToast is defined)
|
||
if "utils.js" in file_path.name:
|
||
return
|
||
|
||
# Check for file-level noqa comment
|
||
if "noqa: js-009" in content.lower():
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
stripped = line.strip()
|
||
|
||
# Skip comments
|
||
if stripped.startswith("//") or stripped.startswith("/*"):
|
||
continue
|
||
|
||
# Skip lines with inline noqa comment
|
||
if "noqa: js-009" in line.lower():
|
||
continue
|
||
|
||
# Check for alert() usage
|
||
if re.search(r"\balert\s*\(", line):
|
||
self._add_violation(
|
||
rule_id="JS-009",
|
||
rule_name="Use Utils.showToast() for notifications",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Browser alert() dialog - use Utils.showToast() for consistent UX",
|
||
context=stripped[:80],
|
||
suggestion="Replace alert('message') with Utils.showToast('message', 'success'|'error'|'warning'|'info')",
|
||
)
|
||
|
||
# Check for window.showToast usage
|
||
if "window.showToast" in line:
|
||
self._add_violation(
|
||
rule_id="JS-009",
|
||
rule_name="Use Utils.showToast() for notifications",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="window.showToast is undefined - use Utils.showToast()",
|
||
context=stripped[:80],
|
||
suggestion="Replace window.showToast('msg', 'type') with Utils.showToast('msg', 'type')",
|
||
)
|
||
|
||
def _check_alpine_data_spread(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-003: Check that Alpine components inherit base layout data using ...data()"""
|
||
# Skip utility/init files that aren't page components
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils", "helpers"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
# Check for noqa comment
|
||
if "noqa: js-003" in content.lower():
|
||
return
|
||
|
||
# Look for Alpine component function pattern: function adminXxx(...) { return { ... } }
|
||
# These are page-level components that should inherit from data()
|
||
# Allow optional parameters in the function signature
|
||
component_pattern = re.compile(
|
||
r"function\s+(admin\w+|vendor\w+|shop\w+)\s*\([^)]*\)\s*\{", re.IGNORECASE
|
||
)
|
||
|
||
for match in component_pattern.finditer(content):
|
||
func_name = match.group(1)
|
||
func_start = match.start()
|
||
line_num = content[:func_start].count("\n") + 1
|
||
|
||
# Find the return statement with object literal
|
||
# Look for "return {" within reasonable distance
|
||
search_region = content[func_start : func_start + 500]
|
||
|
||
if "return {" in search_region:
|
||
# Check if ...data() is present in the return object
|
||
# Look for pattern like "return {\n ...data()," or similar
|
||
return_match = re.search(
|
||
r"return\s*\{([^}]{0,200})", search_region, re.DOTALL
|
||
)
|
||
if return_match:
|
||
return_content = return_match.group(1)
|
||
if "...data()" not in return_content:
|
||
self._add_violation(
|
||
rule_id="JS-003",
|
||
rule_name="Alpine components must spread ...data()",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Alpine component '{func_name}' does not inherit base layout data",
|
||
context=f"function {func_name}() {{ return {{ ... }}",
|
||
suggestion="Add '...data(),' as first property in return object to inherit dark mode, sidebar state, etc.",
|
||
)
|
||
|
||
def _check_fetch_vs_api_client(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-008: Check for raw fetch() calls that should use apiClient instead"""
|
||
# Skip init files and api-client itself
|
||
if "init-" in file_path.name or "api-client" in file_path.name:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
if "noqa: js-008" in content.lower():
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Look for fetch( calls that hit API endpoints
|
||
if re.search(r"\bfetch\s*\(", line):
|
||
# Skip if it's a comment
|
||
stripped = line.strip()
|
||
if stripped.startswith("//") or stripped.startswith("/*"):
|
||
continue
|
||
|
||
# Check if it's calling an API endpoint (contains /api/)
|
||
if "/api/" in line or "apiClient" in line:
|
||
# If using fetch with /api/, should use apiClient instead
|
||
if "/api/" in line and "apiClient" not in line:
|
||
self._add_violation(
|
||
rule_id="JS-008",
|
||
rule_name="Use apiClient for API calls",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Raw fetch() call to API endpoint - use apiClient for automatic auth",
|
||
context=stripped[:80],
|
||
suggestion="Replace fetch('/api/...') with apiClient.get('/endpoint') or apiClient.post('/endpoint', data)",
|
||
)
|
||
|
||
def _check_alpine_current_page(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-004: Check that Alpine components set currentPage identifier"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils", "helpers"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-004" in content.lower():
|
||
return
|
||
|
||
# Look for Alpine component function pattern
|
||
component_pattern = re.compile(
|
||
r"function\s+(admin\w+|vendor\w+|shop\w+)\s*\(\s*\)\s*\{", re.IGNORECASE
|
||
)
|
||
|
||
for match in component_pattern.finditer(content):
|
||
func_name = match.group(1)
|
||
func_start = match.start()
|
||
line_num = content[:func_start].count("\n") + 1
|
||
|
||
# Check if currentPage is set in the return object
|
||
search_region = content[func_start : func_start + 800]
|
||
if "return {" in search_region:
|
||
return_match = re.search(
|
||
r"return\s*\{([^}]{0,500})", search_region, re.DOTALL
|
||
)
|
||
if return_match:
|
||
return_content = return_match.group(1)
|
||
if (
|
||
"currentPage:" not in return_content
|
||
and "currentPage :" not in return_content
|
||
):
|
||
self._add_violation(
|
||
rule_id="JS-004",
|
||
rule_name="Alpine components must set currentPage",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Alpine component '{func_name}' does not set currentPage identifier",
|
||
context=f"function {func_name}()",
|
||
suggestion="Add 'currentPage: \"page-name\",' in return object for sidebar highlighting",
|
||
)
|
||
|
||
def _check_init_guard(self, file_path: Path, content: str, lines: list[str]):
|
||
"""JS-005: Check that init methods have duplicate initialization guards"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils", "helpers"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-005" in content.lower():
|
||
return
|
||
|
||
# Look for init() methods in Alpine components
|
||
init_pattern = re.compile(r"async\s+init\s*\(\s*\)\s*\{|init\s*\(\s*\)\s*\{")
|
||
|
||
for match in init_pattern.finditer(content):
|
||
init_start = match.start()
|
||
line_num = content[:init_start].count("\n") + 1
|
||
|
||
# Check next 200 chars for initialization guard pattern
|
||
search_region = content[init_start : init_start + 300]
|
||
guard_patterns = [
|
||
"window._",
|
||
"if (this._initialized)",
|
||
"if (window.",
|
||
"_initialized",
|
||
]
|
||
|
||
has_guard = any(pattern in search_region for pattern in guard_patterns)
|
||
if not has_guard:
|
||
self._add_violation(
|
||
rule_id="JS-005",
|
||
rule_name="Initialization methods must include guard",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="init() method lacks duplicate initialization guard",
|
||
context="init() { ... }",
|
||
suggestion="Add guard: if (window._pageInitialized) return; window._pageInitialized = true;",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_async_error_handling(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-006: Check that async operations have try/catch error handling"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-006" in content.lower():
|
||
return
|
||
|
||
# Look for async functions/methods
|
||
async_pattern = re.compile(r"async\s+\w+\s*\([^)]*\)\s*\{")
|
||
|
||
for match in async_pattern.finditer(content):
|
||
func_start = match.start()
|
||
line_num = content[:func_start].count("\n") + 1
|
||
|
||
# Find the function body (look for matching braces)
|
||
# Simplified: check next 500 chars for try/catch
|
||
search_region = content[func_start : func_start + 800]
|
||
|
||
# Check if there's await without try/catch
|
||
if (
|
||
"await " in search_region
|
||
and "try {" not in search_region
|
||
and "try{" not in search_region
|
||
):
|
||
# Check if it's a simple one-liner or has error handling elsewhere
|
||
if ".catch(" not in search_region:
|
||
self._add_violation(
|
||
rule_id="JS-006",
|
||
rule_name="Async operations must have error handling",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="Async function with await lacks try/catch error handling",
|
||
context=match.group(0)[:50],
|
||
suggestion="Wrap await calls in try/catch with proper error logging",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_loading_state(self, file_path: Path, content: str, lines: list[str]):
|
||
"""JS-007: Check that async operations manage loading state"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-007" in content.lower():
|
||
return
|
||
|
||
# Look for Alpine component functions that have async methods with API calls
|
||
component_pattern = re.compile(
|
||
r"function\s+(admin\w+|vendor\w+|shop\w+)\s*\(\s*\)\s*\{", re.IGNORECASE
|
||
)
|
||
|
||
for match in component_pattern.finditer(content):
|
||
func_start = match.start()
|
||
# Get the component body (rough extraction)
|
||
component_region = content[func_start : func_start + 5000]
|
||
|
||
# Check if component has API calls but no loading state
|
||
has_api_calls = (
|
||
"apiClient." in component_region or "await " in component_region
|
||
)
|
||
has_loading_state = (
|
||
"loading:" in component_region or "loading :" in component_region
|
||
)
|
||
has_loading_assignment = (
|
||
"this.loading = " in component_region
|
||
or "loading = true" in component_region
|
||
)
|
||
|
||
if has_api_calls and not has_loading_state:
|
||
line_num = content[:func_start].count("\n") + 1
|
||
self._add_violation(
|
||
rule_id="JS-007",
|
||
rule_name="Set loading state for async operations",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="Component has API calls but no loading state property",
|
||
context=match.group(1),
|
||
suggestion="Add 'loading: false,' to component state and set it before/after API calls",
|
||
)
|
||
return
|
||
|
||
def _validate_html_file(self, file_path: Path, content: str, lines: list[str]):
|
||
"""Validate a single HTML template file"""
|
||
print("📄 Validating template...")
|
||
|
||
file_path_str = str(file_path)
|
||
|
||
# Skip base template and partials for extends check
|
||
is_base_or_partial = (
|
||
"base.html" in file_path.name or "partials" in file_path_str
|
||
)
|
||
|
||
# Skip macros directory for FE rules
|
||
is_macro = (
|
||
"shared/macros/" in file_path_str or "shared\\macros\\" in file_path_str
|
||
)
|
||
|
||
# Skip components showcase page
|
||
is_components_page = "components.html" in file_path.name
|
||
|
||
# Determine template type
|
||
is_admin = "/admin/" in file_path_str or "\\admin\\" in file_path_str
|
||
is_vendor = "/vendor/" in file_path_str or "\\vendor\\" in file_path_str
|
||
is_shop = "/shop/" in file_path_str or "\\shop\\" in file_path_str
|
||
|
||
if is_base_or_partial:
|
||
print("⏭️ Skipping base/partial template")
|
||
elif is_macro:
|
||
print("⏭️ Skipping macro file")
|
||
else:
|
||
# FE-001: Check for inline pagination (should use macro)
|
||
if not is_components_page:
|
||
self._check_pagination_macro_usage(file_path, content, lines)
|
||
|
||
# FE-002: Check for inline SVGs (should use $icon())
|
||
if not is_components_page and not is_macro:
|
||
self._check_icon_helper_usage(file_path, content, lines)
|
||
|
||
# FE-008: Check for raw number inputs (should use number_stepper)
|
||
if not is_components_page and not is_macro:
|
||
self._check_number_stepper_macro_usage(file_path, content, lines)
|
||
|
||
# TPL-004: Check x-text usage for dynamic content
|
||
self._check_xtext_usage(file_path, content, lines)
|
||
|
||
# TPL-005: Check x-html safety
|
||
self._check_xhtml_safety(file_path, content, lines)
|
||
|
||
# TPL-006: Check loading state implementation
|
||
self._check_template_loading_state(file_path, content, lines)
|
||
|
||
# TPL-007: Check empty state implementation
|
||
self._check_template_empty_state(file_path, content, lines)
|
||
|
||
# TPL-008: Check for call table_header() pattern (should be table_header_custom)
|
||
self._check_table_header_call_pattern(file_path, content, lines)
|
||
|
||
# TPL-009: Check for invalid block names (admin and vendor use same blocks)
|
||
if is_admin or is_vendor:
|
||
self._check_valid_block_names(file_path, content, lines)
|
||
|
||
if is_base_or_partial:
|
||
return
|
||
|
||
# Check for standalone marker in template (first 5 lines)
|
||
first_lines = "\n".join(lines[:5]).lower()
|
||
if "standalone" in first_lines or "noqa: tpl-00" in first_lines:
|
||
print("⏭️ Template marked as standalone, skipping extends check")
|
||
return
|
||
|
||
# TPL-001: Admin templates extends check
|
||
if is_admin:
|
||
self._check_template_extends(
|
||
file_path,
|
||
lines,
|
||
"admin/base.html",
|
||
"TPL-001",
|
||
["login.html", "errors/", "test-"],
|
||
)
|
||
|
||
# TPL-002: Vendor templates extends check
|
||
if is_vendor:
|
||
self._check_template_extends(
|
||
file_path,
|
||
lines,
|
||
"vendor/base.html",
|
||
"TPL-002",
|
||
["login.html", "errors/", "test-"],
|
||
)
|
||
|
||
# TPL-003: Shop templates extends check
|
||
if is_shop:
|
||
self._check_template_extends(
|
||
file_path, lines, "shop/base.html", "TPL-003", ["errors/", "test-"]
|
||
)
|
||
|
||
def _check_template_extends(
|
||
self,
|
||
file_path: Path,
|
||
lines: list[str],
|
||
base_template: str,
|
||
rule_id: str,
|
||
exclusions: list[str],
|
||
):
|
||
"""Check that template extends the appropriate base template"""
|
||
file_path_str = str(file_path)
|
||
|
||
# Check exclusion patterns
|
||
for exclusion in exclusions:
|
||
if exclusion in file_path_str:
|
||
print(f"⏭️ Template matches exclusion pattern '{exclusion}', skipping")
|
||
return
|
||
|
||
# Check for extends
|
||
has_extends = any(
|
||
"{% extends" in line and base_template in line for line in lines
|
||
)
|
||
|
||
if not has_extends:
|
||
template_type = base_template.split("/")[0]
|
||
self._add_violation(
|
||
rule_id=rule_id,
|
||
rule_name=f"{template_type.title()} templates must extend base",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"{template_type.title()} template does not extend {base_template}",
|
||
context=file_path.name,
|
||
suggestion=f"Add {{% extends '{base_template}' %}} at the top, or add {{# standalone #}} if intentional",
|
||
)
|
||
|
||
def _check_xtext_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""TPL-004: Check that x-text is used for dynamic text content"""
|
||
if "noqa: tpl-004" in content.lower():
|
||
return
|
||
|
||
# Look for potentially unsafe patterns where user data might be interpolated
|
||
# This is a heuristic check - not perfect
|
||
unsafe_patterns = [
|
||
(r"\{\{\s*\w+\.name\s*\}\}", "User data interpolation without x-text"),
|
||
(r"\{\{\s*\w+\.title\s*\}\}", "User data interpolation without x-text"),
|
||
(
|
||
r"\{\{\s*\w+\.description\s*\}\}",
|
||
"User data interpolation without x-text",
|
||
),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip Jinja2 template syntax (server-side safe)
|
||
if "{%" in line:
|
||
continue
|
||
|
||
for pattern, message in unsafe_patterns:
|
||
if re.search(pattern, line):
|
||
# Check if it's in an x-text context already
|
||
if 'x-text="' not in line and "x-text='" not in line:
|
||
self._add_violation(
|
||
rule_id="TPL-004",
|
||
rule_name="Use x-text for dynamic text content",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=line.strip()[:60],
|
||
suggestion='Use x-text="item.name" instead of {{ item.name }} for XSS safety',
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_xhtml_safety(self, file_path: Path, content: str, lines: list[str]):
|
||
"""TPL-005: Check that x-html is only used for safe content"""
|
||
if "noqa: tpl-005" in content.lower():
|
||
return
|
||
|
||
safe_xhtml_patterns = [
|
||
r'x-html="\$icon\(', # Icon helper is safe
|
||
r"x-html='\$icon\(",
|
||
r'x-html="`\$\{.*icon', # Template literal with icon
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "x-html=" in line:
|
||
# Check if it matches safe patterns
|
||
is_safe = any(
|
||
re.search(pattern, line) for pattern in safe_xhtml_patterns
|
||
)
|
||
|
||
if not is_safe:
|
||
# Check for potentially unsafe user content
|
||
unsafe_indicators = [
|
||
".description",
|
||
".content",
|
||
".body",
|
||
".text",
|
||
".message",
|
||
".comment",
|
||
".review",
|
||
".html",
|
||
]
|
||
for indicator in unsafe_indicators:
|
||
if indicator in line:
|
||
self._add_violation(
|
||
rule_id="TPL-005",
|
||
rule_name="Use x-html ONLY for safe content",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="x-html used with potentially unsafe user content",
|
||
context=line.strip()[:60],
|
||
suggestion="Sanitize HTML content or use x-text for plain text",
|
||
)
|
||
return
|
||
|
||
def _check_template_loading_state(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-006: Check that templates with data loads show loading state"""
|
||
if "noqa: tpl-006" in content.lower():
|
||
return
|
||
|
||
# Check if template has data loading (Alpine init, fetch, apiClient)
|
||
has_data_loading = any(
|
||
[
|
||
"x-init=" in content,
|
||
"@load=" in content,
|
||
"apiClient" in content,
|
||
"loadData" in content,
|
||
"fetchData" in content,
|
||
]
|
||
)
|
||
|
||
if not has_data_loading:
|
||
return
|
||
|
||
# Check for loading state display
|
||
has_loading_state = any(
|
||
[
|
||
'x-show="loading"' in content,
|
||
"x-show='loading'" in content,
|
||
'x-if="loading"' in content,
|
||
"x-if='loading'" in content,
|
||
"loading_state" in content,
|
||
"Loading..." in content,
|
||
"spinner" in content.lower(),
|
||
]
|
||
)
|
||
|
||
if not has_loading_state:
|
||
self._add_violation(
|
||
rule_id="TPL-006",
|
||
rule_name="Implement loading state for data loads",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Template loads data but has no visible loading state",
|
||
context=file_path.name,
|
||
suggestion='Add <div x-show="loading">Loading...</div> or use loading_state macro',
|
||
)
|
||
|
||
def _check_template_empty_state(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-007: Check that templates with lists show empty state"""
|
||
if "noqa: tpl-007" in content.lower():
|
||
return
|
||
|
||
# Check if template has list iteration
|
||
has_list = any(
|
||
[
|
||
"x-for=" in content,
|
||
"{% for " in content,
|
||
]
|
||
)
|
||
|
||
if not has_list:
|
||
return
|
||
|
||
# Check for empty state handling
|
||
has_empty_state = any(
|
||
[
|
||
".length === 0" in content,
|
||
".length == 0" in content,
|
||
"items.length" in content,
|
||
"empty_state" in content,
|
||
"No items" in content,
|
||
"No results" in content,
|
||
"No data" in content,
|
||
"table_empty_state" in content,
|
||
]
|
||
)
|
||
|
||
if not has_empty_state:
|
||
self._add_violation(
|
||
rule_id="TPL-007",
|
||
rule_name="Implement empty state when no data",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Template has list but no empty state handling",
|
||
context=file_path.name,
|
||
suggestion='Add <template x-if="items.length === 0">No items found</template>',
|
||
)
|
||
|
||
def _check_table_header_call_pattern(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-008: Check that {% call table_header() %} is not used.
|
||
|
||
The table_header() macro doesn't support caller() - it takes a columns list.
|
||
Using {% call table_header() %} causes a Jinja2 error:
|
||
"macro 'table_header' was invoked with two values for the special caller argument"
|
||
|
||
Use table_header_custom() instead for custom headers with caller().
|
||
"""
|
||
if "noqa: tpl-008" in content.lower():
|
||
return
|
||
|
||
import re
|
||
pattern = re.compile(r"{%\s*call\s+table_header\s*\(\s*\)\s*%}")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if pattern.search(line):
|
||
self._add_violation(
|
||
rule_id="TPL-008",
|
||
rule_name="Use table_header_custom for custom headers",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="{% call table_header() %} causes Jinja2 error - table_header doesn't support caller()",
|
||
context=line.strip()[:80],
|
||
suggestion="Use {% call table_header_custom() %} for custom headers with th_sortable or custom <th> elements",
|
||
)
|
||
|
||
def _check_valid_block_names(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-009: Check that templates use valid block names from base template"""
|
||
if "noqa: tpl-009" in content.lower():
|
||
return
|
||
|
||
# Skip base templates
|
||
if file_path.name == "base.html":
|
||
return
|
||
|
||
# Valid admin template blocks
|
||
valid_blocks = {"title", "extra_head", "alpine_data", "content", "extra_scripts"}
|
||
|
||
# Common invalid block names that developers might mistakenly use
|
||
invalid_blocks = {
|
||
"page_scripts": "extra_scripts",
|
||
"scripts": "extra_scripts",
|
||
"js": "extra_scripts",
|
||
"footer_scripts": "extra_scripts",
|
||
"head": "extra_head",
|
||
"body": "content",
|
||
"main": "content",
|
||
}
|
||
|
||
import re
|
||
block_pattern = re.compile(r"{%\s*block\s+(\w+)\s*%}")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
match = block_pattern.search(line)
|
||
if match:
|
||
block_name = match.group(1)
|
||
if block_name in invalid_blocks:
|
||
self._add_violation(
|
||
rule_id="TPL-009",
|
||
rule_name="Use valid block names from base templates",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Invalid block name '{block_name}' - this block doesn't exist in admin/base.html",
|
||
context=line.strip(),
|
||
suggestion=f"Use '{{% block {invalid_blocks[block_name]} %}}' instead",
|
||
)
|
||
|
||
def _check_deprecated_macros(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-011: Check for usage of deprecated macros"""
|
||
if "noqa: tpl-011" in content.lower():
|
||
return
|
||
|
||
# Deprecated macros and their replacements
|
||
deprecated_macros = {
|
||
"pagination_full": {
|
||
"replacement": "pagination",
|
||
"reason": "pagination_full expects flat variables (total, skip, page, limit) but components use nested pagination object",
|
||
},
|
||
}
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
for macro_name, info in deprecated_macros.items():
|
||
if macro_name in line and "# deprecated" not in line.lower():
|
||
# Check if it's an import or usage
|
||
if f"import {macro_name}" in line or f"{{ {macro_name}" in line or f"{{{macro_name}" in line:
|
||
self._add_violation(
|
||
rule_id="TPL-011",
|
||
rule_name="Avoid deprecated macros",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Deprecated macro '{macro_name}' used - {info['reason']}",
|
||
context=line.strip()[:80],
|
||
suggestion=f"Use '{info['replacement']}' macro instead",
|
||
)
|
||
|
||
def _check_escaped_quotes_in_alpine(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-012: Check for problematic quotes in Alpine copyCode template literals.
|
||
|
||
When using copyCode() with a multi-line template literal, double quotes
|
||
inside the template will prematurely end the outer @click="..." attribute.
|
||
|
||
Bad: @click="copyCode(`{{ func("arg") }}`)" # inner " ends the attribute
|
||
Good: @click='copyCode(`{{ func("arg") }}`)' # single quotes for outer
|
||
"""
|
||
if "noqa: tpl-012" in content.lower():
|
||
return
|
||
|
||
import re
|
||
|
||
# Track multi-line copyCode template literals with double-quoted outer attribute
|
||
in_copycode_template = False
|
||
copycode_start_line = 0
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "noqa: tpl-012" in line.lower():
|
||
continue
|
||
|
||
# Check for start of copyCode with double-quoted attribute and template literal
|
||
# Pattern: @click="copyCode(` where the backtick doesn't close on same line
|
||
if '@click="copyCode(`' in line and '`)' not in line:
|
||
in_copycode_template = True
|
||
copycode_start_line = i
|
||
continue
|
||
|
||
# Check for end of copyCode template (backtick followed by )" or )')
|
||
if in_copycode_template:
|
||
if '`)"' in line or "`)'" in line:
|
||
in_copycode_template = False
|
||
continue
|
||
|
||
# Check for double quotes that would break the outer attribute
|
||
# These appear in Jinja template code like {{ func("arg") }}
|
||
if re.search(r'\(\s*"[^"]*"\s*[,)]', line):
|
||
self._add_violation(
|
||
rule_id="TPL-012",
|
||
rule_name="Double quotes in copyCode template literal",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Double quotes inside multi-line copyCode template literal will break HTML attribute parsing",
|
||
context=line.strip()[:80],
|
||
suggestion="Change outer attribute to single quotes: @click='copyCode(`...`)'",
|
||
)
|
||
|
||
def _check_alpine_template_vars(
|
||
self, file_path: Path, content: str, lines: list[str], js_content: str
|
||
):
|
||
"""TPL-010: Check that Alpine variables used in templates are defined in JS"""
|
||
if "noqa: tpl-010" in content.lower():
|
||
return
|
||
|
||
import re
|
||
|
||
# Common Alpine variable patterns that MUST be defined in the component
|
||
# These are variables that are directly referenced in templates
|
||
required_vars = set()
|
||
|
||
# Check for error_state macro usage (requires 'error' var)
|
||
if "error_state(" in content and "error_var=" not in content:
|
||
required_vars.add("error")
|
||
|
||
# Check for action_dropdown macro with custom vars
|
||
dropdown_pattern = re.compile(
|
||
r"action_dropdown\([^)]*open_var=['\"](\w+)['\"]"
|
||
)
|
||
for match in dropdown_pattern.finditer(content):
|
||
required_vars.add(match.group(1))
|
||
|
||
dropdown_pattern2 = re.compile(
|
||
r"action_dropdown\([^)]*loading_var=['\"](\w+)['\"]"
|
||
)
|
||
for match in dropdown_pattern2.finditer(content):
|
||
required_vars.add(match.group(1))
|
||
|
||
# Check if variables are defined in JS
|
||
for var in required_vars:
|
||
# Check if variable is defined in JS (look for "varname:" pattern)
|
||
var_pattern = re.compile(rf"\b{var}\s*:")
|
||
if not var_pattern.search(js_content):
|
||
# Find the line where it's used in template
|
||
for i, line in enumerate(lines, 1):
|
||
if var in line and ("error_state" in line or "action_dropdown" in line):
|
||
self._add_violation(
|
||
rule_id="TPL-010",
|
||
rule_name="Alpine variables must be defined in JS component",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Template uses Alpine variable '{var}' but it's not defined in the JS component",
|
||
context=line.strip()[:80],
|
||
suggestion=f"Add '{var}: null,' or '{var}: false,' to the Alpine component's return object",
|
||
)
|
||
break
|
||
|
||
def _check_pagination_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-001: Check for inline pagination that should use macro"""
|
||
# Check if already using the pagination macro
|
||
uses_macro = any(
|
||
"from 'shared/macros/pagination.html'" in line for line in lines
|
||
)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa: FE-001 comment
|
||
has_noqa = any("noqa: fe-001" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for signs of inline pagination
|
||
pagination_indicators = [
|
||
('aria-label="Table navigation"', "Inline table navigation found"),
|
||
("previousPage()", "Inline pagination controls found"),
|
||
("nextPage()", "Inline pagination controls found"),
|
||
("goToPage(", "Inline pagination controls found"),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
for pattern, message in pagination_indicators:
|
||
if pattern in line:
|
||
# Skip if it's in a comment
|
||
stripped = line.strip()
|
||
if stripped.startswith("{#") or stripped.startswith("<!--"):
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="FE-001",
|
||
rule_name="Use pagination macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message + " - use shared macro instead",
|
||
context=stripped[:60],
|
||
suggestion="{% from 'shared/macros/pagination.html' import pagination %}\n{{ pagination() }}",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_icon_helper_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""FE-002: Check for inline SVGs that should use $icon() helper"""
|
||
# Check for noqa: FE-002 comment
|
||
has_noqa = any("noqa: fe-002" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Pattern to find inline SVGs
|
||
svg_pattern = re.compile(
|
||
r"<svg[^>]*viewBox[^>]*>.*?</svg>", re.DOTALL | re.IGNORECASE
|
||
)
|
||
|
||
# Find all SVG occurrences
|
||
for match in svg_pattern.finditer(content):
|
||
# Find line number
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Skip if this is likely in a code example (inside <pre> or <code>)
|
||
context_before = content[max(0, match.start() - 200) : match.start()]
|
||
if "<pre" in context_before or "<code" in context_before:
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="FE-002",
|
||
rule_name="Use $icon() helper",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="Inline SVG found - use $icon() helper for consistency",
|
||
context="<svg...>",
|
||
suggestion="<span x-html=\"$icon('icon-name', 'w-4 h-4')\"></span>",
|
||
)
|
||
|
||
def _check_alerts_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-003: Check for inline loading/error states that should use alerts macro"""
|
||
# Check if already using the alerts macro
|
||
uses_macro = any("from 'shared/macros/alerts.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-003" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for inline loading states
|
||
for i, line in enumerate(lines, 1):
|
||
# Loading state pattern: text-center py-12 with loading content
|
||
if 'x-show="loading"' in line or "x-show='loading'" in line:
|
||
# Check if next few lines have spinner pattern
|
||
context_lines = "\n".join(lines[i - 1 : i + 3])
|
||
if "text-center" in context_lines and "py-12" in context_lines:
|
||
self._add_violation(
|
||
rule_id="FE-003",
|
||
rule_name="Use alerts macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline loading state found - use loading_state macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/alerts.html' import loading_state %}\n{{ loading_state('Loading...') }}",
|
||
)
|
||
return # Only report once per file
|
||
|
||
# Error state pattern: bg-red-100 border-red-400
|
||
if "bg-red-100" in line and "border-red-400" in line:
|
||
self._add_violation(
|
||
rule_id="FE-003",
|
||
rule_name="Use alerts macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline error state found - use error_state macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/alerts.html' import error_state %}\n{{ error_state('Error', 'error') }}",
|
||
)
|
||
return
|
||
|
||
def _check_modals_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-004: Check for inline modals that should use modals macro"""
|
||
# Check if already using the modals macro
|
||
uses_macro = any("from 'shared/macros/modals.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-004" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for modal patterns: fixed inset-0 with role="dialog" or modal backdrop
|
||
for i, line in enumerate(lines, 1):
|
||
if "fixed inset-0" in line and (
|
||
"z-50" in line or "z-30" in line or "z-40" in line
|
||
):
|
||
# Check context for modal indicators
|
||
context_lines = "\n".join(lines[max(0, i - 1) : min(len(lines), i + 5)])
|
||
if (
|
||
'role="dialog"' in context_lines
|
||
or "bg-opacity-50" in context_lines
|
||
or "bg-black/50" in context_lines
|
||
):
|
||
self._add_violation(
|
||
rule_id="FE-004",
|
||
rule_name="Use modals macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline modal found - use modal macro for consistency",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/modals.html' import modal %}\n{% call modal('myModal', 'Title', 'isModalOpen') %}...{% endcall %}",
|
||
)
|
||
return
|
||
|
||
def _check_tables_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-005: Check for inline table wrappers that should use tables macro"""
|
||
# Check if already using the tables macro
|
||
uses_macro = any("from 'shared/macros/tables.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-005" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for table wrapper pattern: overflow-hidden rounded-lg shadow-xs
|
||
for i, line in enumerate(lines, 1):
|
||
if (
|
||
"overflow-hidden" in line
|
||
and "rounded-lg" in line
|
||
and "shadow-xs" in line
|
||
):
|
||
# Check if there's a table nearby
|
||
context_lines = "\n".join(lines[i - 1 : min(len(lines), i + 10)])
|
||
if "<table" in context_lines:
|
||
self._add_violation(
|
||
rule_id="FE-005",
|
||
rule_name="Use tables macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline table wrapper found - use table_wrapper macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/tables.html' import table_wrapper, table_header %}\n{% call table_wrapper() %}...{% endcall %}",
|
||
)
|
||
return
|
||
|
||
def _check_dropdowns_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-006: Check for inline dropdowns that should use dropdowns macro"""
|
||
# Check if already using the dropdowns macro
|
||
uses_macro = any(
|
||
"from 'shared/macros/dropdowns.html'" in line for line in lines
|
||
)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-006" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for dropdown patterns: @click.outside or @click.away with menu positioning
|
||
for i, line in enumerate(lines, 1):
|
||
# Match @click.away="something = false" or @click.outside="..."
|
||
if "@click.away=" in line or "@click.outside=" in line:
|
||
# Skip if this is a modal (modals also use @click.away but have different structure)
|
||
context_lines = "\n".join(
|
||
lines[max(0, i - 5) : min(len(lines), i + 10)]
|
||
)
|
||
|
||
# Skip if it looks like a modal (fixed inset-0)
|
||
if "fixed inset-0" in context_lines:
|
||
continue
|
||
|
||
# Look for dropdown menu styling: absolute positioning with z-index
|
||
if "absolute" in context_lines and (
|
||
"z-10" in context_lines
|
||
or "z-20" in context_lines
|
||
or "z-50" in context_lines
|
||
):
|
||
# Additional indicators: shadow, border, bg-white/dark, rounded
|
||
if "shadow" in context_lines or "border" in context_lines:
|
||
self._add_violation(
|
||
rule_id="FE-006",
|
||
rule_name="Use dropdowns macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline dropdown menu found - use dropdown macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/dropdowns.html' import dropdown, dropdown_item %}\n{% call dropdown('Label', 'isOpen') %}...{% endcall %}",
|
||
)
|
||
return
|
||
|
||
def _check_headers_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-007: Check for inline page headers that should use headers macro"""
|
||
# Check if already using the headers macro
|
||
uses_macro = any("from 'shared/macros/headers.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-007" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for page header pattern: flex with h2 text-2xl font-semibold
|
||
for i, line in enumerate(lines, 1):
|
||
if "<h2" in line and "text-2xl" in line and "font-semibold" in line:
|
||
# Check context for typical page header pattern
|
||
context_before = "\n".join(lines[max(0, i - 3) : i])
|
||
if "flex" in context_before and (
|
||
"justify-between" in context_before
|
||
or "items-center" in context_before
|
||
):
|
||
self._add_violation(
|
||
rule_id="FE-007",
|
||
rule_name="Use headers macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline page header found - use page_header macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/headers.html' import page_header %}\n{{ page_header('Title', action_label='Create', action_url='/create') }}",
|
||
)
|
||
return
|
||
|
||
def _check_number_stepper_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-008: Check for raw number inputs that should use number_stepper macro
|
||
|
||
Detects <input type="number"> that should use the number_stepper macro for
|
||
consistent styling and dark mode support.
|
||
|
||
Exceptions:
|
||
- ID fields (placeholder contains 'id' or 'ID')
|
||
- Files already importing number_stepper
|
||
- Lines with noqa: FE-008 comment
|
||
"""
|
||
# Check if already using the number_stepper macro
|
||
uses_macro = any("number_stepper" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for file-level noqa comment
|
||
has_noqa = any("noqa: fe-008" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for raw number inputs
|
||
for i, line in enumerate(lines, 1):
|
||
if 'type="number"' in line or "type='number'" in line:
|
||
# Skip if line has noqa comment
|
||
if "noqa" in line.lower():
|
||
continue
|
||
|
||
# Skip if it looks like an ID field (check surrounding lines for context)
|
||
context_lines = "\n".join(
|
||
lines[max(0, i - 3) : min(len(lines), i + 2)]
|
||
).lower()
|
||
if (
|
||
"user id" in context_lines
|
||
or "placeholder" in context_lines
|
||
and "id" in context_lines
|
||
):
|
||
continue
|
||
|
||
# Skip if it's in a comment
|
||
stripped = line.strip()
|
||
if (
|
||
stripped.startswith("{#")
|
||
or stripped.startswith("<!--")
|
||
or stripped.startswith("//")
|
||
):
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="FE-008",
|
||
rule_name="Use number_stepper macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Raw number input found - use number_stepper macro for consistent styling",
|
||
context=stripped[:70],
|
||
suggestion="{% from 'shared/macros/inputs.html' import number_stepper %}\n{{ number_stepper(model='fieldName', min=1, max=100) }}\nOr add {# noqa: FE-008 #} if this is intentional (e.g., ID field)",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _validate_api_endpoints(self, target_path: Path):
|
||
"""Validate API endpoint rules (API-001, API-002, API-003, API-004)"""
|
||
print("📡 Validating API endpoints...")
|
||
|
||
api_files = list(target_path.glob("app/api/v1/**/*.py"))
|
||
self.result.files_checked += len(api_files)
|
||
|
||
for file_path in api_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# API-001: Check for Pydantic model usage
|
||
self._check_pydantic_usage(file_path, content, lines)
|
||
|
||
# API-002: Check for business logic in endpoints
|
||
self._check_no_business_logic_in_endpoints(file_path, content, lines)
|
||
|
||
# API-003: Check exception handling
|
||
self._check_endpoint_exception_handling(file_path, content, lines)
|
||
|
||
# API-004: Check authentication
|
||
self._check_endpoint_authentication(file_path, content, lines)
|
||
|
||
# API-005: Check vendor_id scoping for vendor/shop endpoints
|
||
self._check_vendor_scoping(file_path, content, lines)
|
||
|
||
def _check_pydantic_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""API-001: Ensure endpoints use Pydantic models"""
|
||
rule = self._get_rule("API-001")
|
||
if not rule:
|
||
return
|
||
|
||
# Check for response_model in route decorators
|
||
route_pattern = r"@router\.(get|post|put|delete|patch)"
|
||
dict_return_pattern = r"return\s+\{.*\}"
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Check for dict returns in endpoints
|
||
if re.search(route_pattern, line):
|
||
# Look ahead for function body
|
||
func_start = i
|
||
indent = len(line) - len(line.lstrip())
|
||
|
||
# Find function body
|
||
for j in range(func_start, min(func_start + 20, len(lines))):
|
||
if j >= len(lines):
|
||
break
|
||
|
||
func_line = lines[j]
|
||
if re.search(dict_return_pattern, func_line):
|
||
self._add_violation(
|
||
rule_id="API-001",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=j + 1,
|
||
message="Endpoint returns raw dict instead of Pydantic model",
|
||
context=func_line.strip(),
|
||
suggestion="Define a Pydantic response model and use response_model parameter",
|
||
)
|
||
|
||
def _check_no_business_logic_in_endpoints(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""API-002: Ensure no business logic in endpoints"""
|
||
rule = self._get_rule("API-002")
|
||
if not rule:
|
||
return
|
||
|
||
# NOTE: db.commit() is intentionally NOT included here
|
||
# Transaction control (commit) is allowed at endpoint level
|
||
# Only business logic operations are flagged
|
||
anti_patterns = [
|
||
(r"db\.add\(", "Creating entities should be in service layer"),
|
||
(r"db\.delete\(", "Deleting entities should be in service layer"),
|
||
(r"db\.query\(", "Database queries should be in service layer"),
|
||
(r"db\.execute\(", "Database operations should be in service layer"),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip service method calls (allowed)
|
||
if "_service." in line or "service." in line:
|
||
continue
|
||
|
||
for pattern, message in anti_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="API-002",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=line.strip(),
|
||
suggestion="Move database operations to service layer",
|
||
)
|
||
|
||
def _check_endpoint_exception_handling(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""API-003: Check that endpoints do NOT raise exceptions directly.
|
||
|
||
The architecture uses:
|
||
- Dependencies (deps.py) for authentication/authorization validation
|
||
- Services for business logic validation
|
||
- Global exception handler that catches WizamartException subclasses
|
||
|
||
Endpoints should be a thin orchestration layer that trusts dependencies
|
||
and services to handle all validation. They should NOT raise exceptions.
|
||
"""
|
||
rule = self._get_rule("API-003")
|
||
if not rule:
|
||
return
|
||
|
||
# Skip exception handler file and deps.py - they're allowed to raise exceptions
|
||
file_path_str = str(file_path)
|
||
if "exceptions/handler.py" in file_path_str or file_path_str.endswith(
|
||
"deps.py"
|
||
):
|
||
return
|
||
|
||
# Patterns that indicate endpoints are raising exceptions (BAD)
|
||
exception_patterns = [
|
||
("raise HTTPException", "Endpoint raises HTTPException directly"),
|
||
(
|
||
"raise InvalidTokenException",
|
||
"Endpoint raises InvalidTokenException - move to dependency",
|
||
),
|
||
(
|
||
"raise InsufficientPermissionsException",
|
||
"Endpoint raises permission exception - move to dependency",
|
||
),
|
||
(
|
||
"raise UnauthorizedVendorAccessException",
|
||
"Endpoint raises auth exception - move to dependency or service",
|
||
),
|
||
]
|
||
|
||
# Pattern that indicates redundant validation (BAD)
|
||
redundant_patterns = [
|
||
(
|
||
r"if not hasattr\(current_user.*token_vendor",
|
||
"Redundant token_vendor check - get_current_vendor_api guarantees this",
|
||
),
|
||
(
|
||
r"if not hasattr\(current_user.*token_vendor_id",
|
||
"Redundant token_vendor_id check - dependency guarantees this",
|
||
),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Check for direct exception raising
|
||
for pattern, message in exception_patterns:
|
||
if pattern in line:
|
||
self._add_violation(
|
||
rule_id="API-003",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=stripped[:80],
|
||
suggestion="Let dependencies or services handle validation and raise exceptions",
|
||
)
|
||
|
||
# Check for redundant validation patterns
|
||
for pattern, message in redundant_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="API-003",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=stripped[:80],
|
||
suggestion="Remove redundant check - auth dependency guarantees this attribute is present",
|
||
)
|
||
|
||
def _check_endpoint_authentication(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""API-004: Check authentication on endpoints
|
||
|
||
Automatically skips:
|
||
- Auth endpoint files (*/auth.py) - login/logout are intentionally public
|
||
- Endpoints marked with '# public' comment
|
||
"""
|
||
rule = self._get_rule("API-004")
|
||
if not rule:
|
||
return
|
||
|
||
# Skip auth endpoint files entirely - they are intentionally public
|
||
file_path_str = str(file_path)
|
||
if file_path_str.endswith("/auth.py") or file_path_str.endswith("\\auth.py"):
|
||
return
|
||
|
||
# This is a warning-level check
|
||
# Look for endpoints without proper authentication
|
||
# Valid auth patterns:
|
||
# - Depends(get_current_*) - direct user authentication
|
||
# - Depends(require_vendor_*) - vendor permission dependencies
|
||
# - Depends(require_any_vendor_*) - any permission check
|
||
# - Depends(require_all_vendor*) - all permissions check
|
||
# - Depends(get_user_permissions) - permission fetching
|
||
auth_patterns = [
|
||
"Depends(get_current_",
|
||
"Depends(require_vendor_",
|
||
"Depends(require_any_vendor_",
|
||
"Depends(require_all_vendor",
|
||
"Depends(get_user_permissions",
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "@router." in line and (
|
||
"post" in line or "put" in line or "delete" in line
|
||
):
|
||
# Check previous line and next 15 lines for auth or public marker
|
||
# (increased from 5 to handle multi-line decorators and long function signatures)
|
||
has_auth = False
|
||
is_public = False
|
||
# i is 1-indexed, lines is 0-indexed
|
||
# So lines[i-1] is the decorator line, lines[i-2] is the line before
|
||
start_idx = max(0, i - 2) # Line before decorator (1-2 lines up)
|
||
end_idx = i + 15 # 15 lines after decorator
|
||
context_lines = lines[start_idx:end_idx]
|
||
|
||
for ctx_line in context_lines:
|
||
# Check for any valid auth pattern
|
||
if any(pattern in ctx_line for pattern in auth_patterns):
|
||
has_auth = True
|
||
break
|
||
# Check for public endpoint markers
|
||
if (
|
||
"# public" in ctx_line.lower()
|
||
or "# noqa: api-004" in ctx_line.lower()
|
||
):
|
||
is_public = True
|
||
break
|
||
|
||
if (
|
||
not has_auth
|
||
and not is_public
|
||
and "include_in_schema=False" not in " ".join(lines[i : i + 15])
|
||
):
|
||
# Determine appropriate suggestion based on file path
|
||
file_path_str = str(file_path)
|
||
if "/vendor/" in file_path_str:
|
||
suggestion = "Add Depends(get_current_vendor_api) or permission dependency, or mark as '# public'"
|
||
elif "/admin/" in file_path_str:
|
||
suggestion = (
|
||
"Add Depends(get_current_admin_api), or mark as '# public'"
|
||
)
|
||
elif "/shop/" in file_path_str:
|
||
suggestion = "Add Depends(get_current_customer_api), or mark as '# public'"
|
||
else:
|
||
suggestion = "Add authentication dependency or mark as '# public' if intentionally unauthenticated"
|
||
|
||
self._add_violation(
|
||
rule_id="API-004",
|
||
rule_name=rule["name"],
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Endpoint may be missing authentication",
|
||
context=line.strip(),
|
||
suggestion=suggestion,
|
||
)
|
||
|
||
def _check_vendor_scoping(self, file_path: Path, content: str, lines: list[str]):
|
||
"""API-005: Check that vendor/shop endpoints scope queries to vendor_id"""
|
||
file_path_str = str(file_path)
|
||
|
||
# Only check vendor and shop API files
|
||
if "/vendor/" not in file_path_str and "/shop/" not in file_path_str:
|
||
return
|
||
|
||
# Skip auth files
|
||
if file_path_str.endswith("auth.py"):
|
||
return
|
||
|
||
# Check for noqa
|
||
if "noqa: api-005" in content.lower():
|
||
return
|
||
|
||
# Look for database queries without vendor_id filtering
|
||
# This is a heuristic check - not perfect
|
||
for i, line in enumerate(lines, 1):
|
||
# Look for .query().all() patterns without vendor filtering
|
||
if ".query(" in line and ".all()" in line:
|
||
# Check if vendor_id filter is nearby
|
||
context_start = max(0, i - 5)
|
||
context_end = min(len(lines), i + 3)
|
||
context_lines = "\n".join(lines[context_start:context_end])
|
||
|
||
if (
|
||
"vendor_id" not in context_lines
|
||
and "token_vendor_id" not in context_lines
|
||
):
|
||
self._add_violation(
|
||
rule_id="API-005",
|
||
rule_name="Multi-tenant queries must scope to vendor_id",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Query in vendor/shop endpoint may not be scoped to vendor_id",
|
||
context=line.strip()[:60],
|
||
suggestion="Add .filter(Model.vendor_id == vendor_id) to ensure tenant isolation",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _validate_service_layer(self, target_path: Path):
|
||
"""Validate service layer rules (SVC-001 to SVC-007)"""
|
||
print("🔧 Validating service layer...")
|
||
|
||
service_files = list(target_path.glob("app/services/**/*.py"))
|
||
self.result.files_checked += len(service_files)
|
||
|
||
for file_path in service_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# SVC-001: No HTTPException in services
|
||
self._check_no_http_exception_in_services(file_path, content, lines)
|
||
|
||
# SVC-002: Proper exception handling
|
||
self._check_service_exceptions(file_path, content, lines)
|
||
|
||
# SVC-003: DB session as parameter
|
||
self._check_db_session_parameter(file_path, content, lines)
|
||
|
||
# SVC-005: Vendor scoping in multi-tenant services
|
||
self._check_service_vendor_scoping(file_path, content, lines)
|
||
|
||
# SVC-006: No db.commit() in services
|
||
self._check_no_commit_in_services(file_path, content, lines)
|
||
|
||
def _check_service_vendor_scoping(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-005: Check that service queries are scoped to vendor_id in multi-tenant context"""
|
||
# Skip admin services that may legitimately access all vendors
|
||
file_path_str = str(file_path)
|
||
if "admin" in file_path_str.lower():
|
||
return
|
||
|
||
if "noqa: svc-005" in content.lower():
|
||
return
|
||
|
||
# Look for patterns that suggest unscoped queries
|
||
for i, line in enumerate(lines, 1):
|
||
# Check for .all() queries that might not be scoped
|
||
if ".query(" in line and ".all()" in line:
|
||
# Check context for vendor filtering
|
||
context_start = max(0, i - 5)
|
||
context_end = min(len(lines), i + 3)
|
||
context_lines = "\n".join(lines[context_start:context_end])
|
||
|
||
if "vendor_id" not in context_lines:
|
||
# Check if the method has vendor_id as parameter
|
||
method_start = self._find_method_start(lines, i)
|
||
if method_start:
|
||
method_sig = lines[method_start]
|
||
if "vendor_id" not in method_sig:
|
||
self._add_violation(
|
||
rule_id="SVC-005",
|
||
rule_name="Service must scope queries to vendor_id",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Query may not be scoped to vendor_id",
|
||
context=line.strip()[:60],
|
||
suggestion="Add vendor_id parameter and filter queries by it",
|
||
)
|
||
return
|
||
|
||
def _find_method_start(self, lines: list[str], current_line: int) -> int | None:
|
||
"""Find the start line of the method containing current_line"""
|
||
for i in range(current_line - 1, max(0, current_line - 30), -1):
|
||
if "def " in lines[i]:
|
||
return i
|
||
return None
|
||
|
||
def _check_no_http_exception_in_services(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-001: Services must not raise HTTPException"""
|
||
rule = self._get_rule("SVC-001")
|
||
if not rule:
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "raise HTTPException" in line:
|
||
self._add_violation(
|
||
rule_id="SVC-001",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service raises HTTPException - use domain exceptions instead",
|
||
context=line.strip(),
|
||
suggestion="Create custom exception class (e.g., VendorNotFoundError) and raise that",
|
||
)
|
||
|
||
if (
|
||
"from fastapi import HTTPException" in line
|
||
or "from fastapi.exceptions import HTTPException" in line
|
||
):
|
||
self._add_violation(
|
||
rule_id="SVC-001",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service imports HTTPException - services should not know about HTTP",
|
||
context=line.strip(),
|
||
suggestion="Remove HTTPException import and use domain exceptions",
|
||
)
|
||
|
||
def _check_service_exceptions(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-002: Check for proper exception handling"""
|
||
rule = self._get_rule("SVC-002")
|
||
if not rule:
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Check for generic Exception raises
|
||
if re.match(r"\s*raise Exception\(", line):
|
||
self._add_violation(
|
||
rule_id="SVC-002",
|
||
rule_name=rule["name"],
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service raises generic Exception - use specific domain exception",
|
||
context=line.strip(),
|
||
suggestion="Create custom exception class for this error case",
|
||
)
|
||
|
||
def _check_db_session_parameter(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-003: Service methods should accept db session as parameter"""
|
||
rule = self._get_rule("SVC-003")
|
||
if not rule:
|
||
return
|
||
|
||
# Check for SessionLocal() creation in service files
|
||
for i, line in enumerate(lines, 1):
|
||
if "SessionLocal()" in line and "class" not in line:
|
||
self._add_violation(
|
||
rule_id="SVC-003",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service creates database session internally",
|
||
context=line.strip(),
|
||
suggestion="Accept db: Session as method parameter instead",
|
||
)
|
||
|
||
def _check_no_commit_in_services(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-006: Services should NOT call db.commit()
|
||
|
||
Transaction control belongs at the API endpoint level.
|
||
Exception: log_service.py may need immediate commits for audit logs.
|
||
Exception: Background task processing may need incremental commits.
|
||
"""
|
||
rule = self._get_rule("SVC-006")
|
||
if not rule:
|
||
return
|
||
|
||
# Exception: log_service.py is allowed to commit (audit logs)
|
||
if "log_service.py" in str(file_path):
|
||
return
|
||
|
||
# Check for file-level noqa comment
|
||
if "noqa: svc-006" in content.lower():
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "db.commit()" in line:
|
||
# Skip if it's a comment
|
||
stripped = line.strip()
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Skip if line has inline noqa comment
|
||
if "noqa: svc-006" in line.lower():
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="SVC-006",
|
||
rule_name=rule["name"],
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service calls db.commit() - transaction control should be at endpoint level",
|
||
context=stripped,
|
||
suggestion="Remove db.commit() from service; let endpoint handle transaction. For background tasks, add # noqa: SVC-006",
|
||
)
|
||
|
||
def _validate_models(self, target_path: Path):
|
||
"""Validate model rules (MDL-001 to MDL-004)"""
|
||
print("📦 Validating models...")
|
||
|
||
# Validate database models
|
||
db_model_files = list(target_path.glob("models/database/**/*.py"))
|
||
self.result.files_checked += len(db_model_files)
|
||
|
||
for file_path in db_model_files:
|
||
if self._should_ignore_file(file_path) or file_path.name == "__init__.py":
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# MDL-001: Check for SQLAlchemy Base inheritance
|
||
self._check_sqlalchemy_base_usage(file_path, content, lines)
|
||
|
||
# MDL-004: Check table naming (singular)
|
||
self._check_table_naming(file_path, content)
|
||
|
||
# Validate schema models
|
||
schema_model_files = list(target_path.glob("models/schema/**/*.py"))
|
||
self.result.files_checked += len(schema_model_files)
|
||
|
||
for file_path in schema_model_files:
|
||
if self._should_ignore_file(file_path) or file_path.name == "__init__.py":
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# MDL-002: Check for mixing SQLAlchemy and Pydantic
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"class.*\(Base.*,.*BaseModel.*\)", line):
|
||
self._add_violation(
|
||
rule_id="MDL-002",
|
||
rule_name="Separate SQLAlchemy and Pydantic models",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Model mixes SQLAlchemy Base and Pydantic BaseModel",
|
||
context=line.strip(),
|
||
suggestion="Keep SQLAlchemy models and Pydantic models separate",
|
||
)
|
||
|
||
# MDL-003: Check for from_attributes in Pydantic response models
|
||
self._check_pydantic_from_attributes(file_path, content)
|
||
|
||
def _check_sqlalchemy_base_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""MDL-001: Check that database models inherit from SQLAlchemy Base"""
|
||
# Look for class definitions
|
||
class_pattern = re.compile(r"class\s+(\w+)\s*\(([^)]+)\):")
|
||
|
||
for match in class_pattern.finditer(content):
|
||
class_name = match.group(1)
|
||
base_classes = match.group(2)
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Skip non-model classes (mixins, helpers, etc.)
|
||
if class_name.endswith("Mixin") or class_name.startswith("_"):
|
||
continue
|
||
|
||
# Skip Enum classes - they inherit from (str, Enum) or (Enum)
|
||
if "Enum" in base_classes:
|
||
continue
|
||
|
||
# Check if it inherits from Base
|
||
if "Base" not in base_classes and "db.Model" not in base_classes:
|
||
# Check if it might be a model - MUST have __tablename__ in its own class body
|
||
class_body_start = match.end()
|
||
# Find the end of this class body (next class definition at same indentation or EOF)
|
||
next_class_match = class_pattern.search(content, class_body_start)
|
||
if next_class_match:
|
||
class_body = content[class_body_start : next_class_match.start()]
|
||
else:
|
||
class_body = content[class_body_start:]
|
||
|
||
# Only flag if this specific class has __tablename__ (definite model indicator)
|
||
if "__tablename__" in class_body:
|
||
self._add_violation(
|
||
rule_id="MDL-001",
|
||
rule_name="Database models must use SQLAlchemy Base",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Model '{class_name}' appears to be a database model but doesn't inherit from Base",
|
||
context=f"class {class_name}({base_classes})",
|
||
suggestion="Inherit from Base: class MyModel(Base):",
|
||
)
|
||
|
||
def _check_table_naming(self, file_path: Path, content: str):
|
||
"""MDL-004: Check that table names are plural (industry standard)
|
||
|
||
Following Rails, Django, Laravel conventions - tables represent collections
|
||
of entities so should use plural names: users, orders, products.
|
||
"""
|
||
# Look for __tablename__ definitions
|
||
tablename_pattern = re.compile(r'__tablename__\s*=\s*["\'](\w+)["\']')
|
||
|
||
for match in tablename_pattern.finditer(content):
|
||
table_name = match.group(1)
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Common singular names that should be plural
|
||
singular_indicators = {
|
||
"vendor": "vendors",
|
||
"product": "products",
|
||
"order": "orders",
|
||
"user": "users",
|
||
"customer": "customers",
|
||
"category": "categories",
|
||
"payment": "payments",
|
||
"setting": "settings",
|
||
"item": "items",
|
||
"image": "images",
|
||
"role": "roles",
|
||
"company": "companies",
|
||
}
|
||
|
||
if table_name in singular_indicators:
|
||
plural = singular_indicators[table_name]
|
||
self._add_violation(
|
||
rule_id="MDL-004",
|
||
rule_name="Database tables use plural names",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Table name '{table_name}' should be plural (industry standard)",
|
||
context=f'__tablename__ = "{table_name}"',
|
||
suggestion=f'Use __tablename__ = "{plural}"',
|
||
)
|
||
|
||
def _check_pydantic_from_attributes(self, file_path: Path, content: str):
|
||
"""MDL-003: Check that Pydantic response models have from_attributes"""
|
||
# Look for response model classes that likely represent ORM entities
|
||
# Skip stats/aggregate models which use plain dicts
|
||
response_pattern = re.compile(
|
||
r"class\s+(\w*Response\w*|\w*Out\w*|\w*Read\w*)\s*\(([^)]+)\):"
|
||
)
|
||
|
||
for match in response_pattern.finditer(content):
|
||
class_name = match.group(1)
|
||
base_classes = match.group(2)
|
||
class_start = match.end()
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Skip stats/aggregate response models - they use plain dicts, not ORM objects
|
||
stats_patterns = [
|
||
"Stats",
|
||
"Analytics",
|
||
"Dashboard",
|
||
"Summary",
|
||
"Count",
|
||
"Total",
|
||
]
|
||
if any(p in class_name for p in stats_patterns):
|
||
continue
|
||
|
||
# Get class body - find the next class definition to properly scope
|
||
next_class = re.search(r"\nclass\s+\w+", content[class_start:])
|
||
if next_class:
|
||
class_body = content[class_start : class_start + next_class.start()]
|
||
else:
|
||
class_body = content[class_start : class_start + 800]
|
||
|
||
# Check for from_attributes in Config or model_config
|
||
# Supports multiple formats:
|
||
# - model_config = {"from_attributes": True}
|
||
# - model_config = ConfigDict(from_attributes=True)
|
||
# - class Config: from_attributes = True
|
||
# - orm_mode = True (Pydantic v1)
|
||
has_from_attributes = any(
|
||
[
|
||
"from_attributes" in class_body and "True" in class_body,
|
||
"orm_mode" in class_body and "True" in class_body,
|
||
]
|
||
)
|
||
|
||
if not has_from_attributes:
|
||
# Only flag if it looks like an ORM entity response:
|
||
# - Has an 'id: int' field as a direct attribute (typical ORM primary key)
|
||
# - AND has at least one other typical ORM field (created_at, updated_at, etc.)
|
||
# Skip wrapper/list responses that just contain other models
|
||
has_id_field = re.search(r"^\s+id\s*:\s*int", class_body, re.MULTILINE)
|
||
has_timestamp = (
|
||
"created_at:" in class_body or "updated_at:" in class_body
|
||
)
|
||
|
||
# Skip list/wrapper responses (contain 'list[' or just have message/total fields)
|
||
is_wrapper = any(
|
||
[
|
||
": list[" in class_body
|
||
and "id:" not in class_body.split(": list[")[0],
|
||
re.search(r"^\s+items\s*:", class_body, re.MULTILINE),
|
||
re.search(r"^\s+total\s*:", class_body, re.MULTILINE)
|
||
and not has_id_field,
|
||
]
|
||
)
|
||
|
||
# Only flag if it has id AND timestamps, or id and multiple other ORM-like fields
|
||
looks_like_orm = has_id_field and (
|
||
has_timestamp
|
||
or (
|
||
# Has multiple fields suggesting ORM entity
|
||
sum(
|
||
[
|
||
"vendor_id:" in class_body,
|
||
"user_id:" in class_body,
|
||
"is_active:" in class_body,
|
||
"email:" in class_body,
|
||
]
|
||
)
|
||
>= 2
|
||
)
|
||
)
|
||
|
||
if "BaseModel" in base_classes and looks_like_orm and not is_wrapper:
|
||
self._add_violation(
|
||
rule_id="MDL-003",
|
||
rule_name="Pydantic response models must use from_attributes",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Response model '{class_name}' may need from_attributes for ORM compatibility",
|
||
context=f"class {class_name}({base_classes})",
|
||
suggestion="Add 'model_config = ConfigDict(from_attributes=True)' or 'class Config: from_attributes = True'",
|
||
)
|
||
|
||
def _validate_exceptions(self, target_path: Path):
|
||
"""Validate exception handling patterns"""
|
||
print("⚠️ Validating exception handling...")
|
||
|
||
py_files = list(target_path.glob("**/*.py"))
|
||
|
||
for file_path in py_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# EXC-002: Check for bare except
|
||
for i, line in enumerate(lines, 1):
|
||
if re.match(r"\s*except\s*:", line):
|
||
self._add_violation(
|
||
rule_id="EXC-002",
|
||
rule_name="No bare except clauses",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Bare except clause catches all exceptions including system exits",
|
||
context=line.strip(),
|
||
suggestion="Specify exception type: except ValueError: or except Exception:",
|
||
)
|
||
|
||
# EXC-004: Check exception inheritance in exceptions module
|
||
exception_files = list(target_path.glob("app/exceptions/**/*.py"))
|
||
for file_path in exception_files:
|
||
if "__init__" in file_path.name or "handler" in file_path.name:
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_exception_inheritance(file_path, content)
|
||
|
||
# EXC-005: Check exception handler registration in main.py
|
||
main_py = target_path / "app" / "main.py"
|
||
if main_py.exists():
|
||
content = main_py.read_text()
|
||
if (
|
||
"setup_exception_handlers" not in content
|
||
and "exception_handler" not in content
|
||
):
|
||
self._add_violation(
|
||
rule_id="EXC-005",
|
||
rule_name="Exception handler must be registered",
|
||
severity=Severity.ERROR,
|
||
file_path=main_py,
|
||
line_number=1,
|
||
message="No exception handler registration found in main.py",
|
||
context="app/main.py",
|
||
suggestion="Add setup_exception_handlers(app) or register exception handlers",
|
||
)
|
||
|
||
def _check_exception_inheritance(self, file_path: Path, content: str):
|
||
"""EXC-004: Check that custom exceptions inherit from WizamartException"""
|
||
# Look for class definitions that look like exceptions
|
||
exception_pattern = re.compile(r"class\s+(\w+Exception|\w+Error)\s*\(([^)]+)\)")
|
||
|
||
for match in exception_pattern.finditer(content):
|
||
class_name = match.group(1)
|
||
base_classes = match.group(2)
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Check if it inherits from appropriate base
|
||
valid_bases = [
|
||
"WizamartException",
|
||
"ResourceNotFoundException",
|
||
"ValidationException",
|
||
"AuthenticationException",
|
||
"AuthorizationException",
|
||
"ConflictException",
|
||
"Exception", # Allow base Exception for some cases
|
||
]
|
||
|
||
has_valid_base = any(base in base_classes for base in valid_bases)
|
||
if not has_valid_base:
|
||
self._add_violation(
|
||
rule_id="EXC-004",
|
||
rule_name="Domain exceptions must inherit from WizamartException",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Exception {class_name} should inherit from WizamartException hierarchy",
|
||
context=f"class {class_name}({base_classes})",
|
||
suggestion="Inherit from WizamartException or one of its subclasses",
|
||
)
|
||
|
||
def _validate_naming_conventions(self, target_path: Path):
|
||
"""Validate naming convention rules (NAM-001 to NAM-005)"""
|
||
print("📛 Validating naming conventions...")
|
||
|
||
# NAM-001: API files use PLURAL names
|
||
api_files = list(target_path.glob("app/api/v1/**/*.py"))
|
||
for file_path in api_files:
|
||
if file_path.name in ["__init__.py", "auth.py", "health.py"]:
|
||
continue
|
||
self._check_api_file_naming(file_path)
|
||
|
||
# NAM-002: Service files use SINGULAR + 'service' suffix
|
||
service_files = list(target_path.glob("app/services/**/*.py"))
|
||
for file_path in service_files:
|
||
if file_path.name == "__init__.py":
|
||
continue
|
||
self._check_service_file_naming(file_path)
|
||
|
||
# NAM-003: Model files use SINGULAR names
|
||
model_files = list(target_path.glob("models/**/*.py"))
|
||
for file_path in model_files:
|
||
if file_path.name == "__init__.py":
|
||
continue
|
||
self._check_model_file_naming(file_path)
|
||
|
||
# NAM-004 & NAM-005: Check terminology consistency
|
||
py_files = list(target_path.glob("app/**/*.py"))
|
||
for file_path in py_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_terminology_consistency(file_path, content)
|
||
|
||
def _check_api_file_naming(self, file_path: Path):
|
||
"""NAM-001: Check that API files use plural names"""
|
||
name = file_path.stem
|
||
|
||
# Common singular forms that should be plural
|
||
singular_to_plural = {
|
||
"vendor": "vendors",
|
||
"product": "products",
|
||
"order": "orders",
|
||
"user": "users",
|
||
"category": "categories",
|
||
"import": "imports",
|
||
"export": "exports",
|
||
"customer": "customers",
|
||
"cart": "carts",
|
||
"payment": "payments",
|
||
"setting": "settings",
|
||
}
|
||
|
||
if name in singular_to_plural:
|
||
self._add_violation(
|
||
rule_id="NAM-001",
|
||
rule_name="API files use PLURAL names",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"API file '{name}.py' should use plural form",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{singular_to_plural[name]}.py'",
|
||
)
|
||
|
||
def _check_service_file_naming(self, file_path: Path):
|
||
"""NAM-002: Check that service files use singular + _service suffix"""
|
||
# Check for noqa comment
|
||
try:
|
||
content = file_path.read_text()
|
||
if "noqa: nam-002" in content.lower():
|
||
return
|
||
except Exception:
|
||
pass
|
||
|
||
name = file_path.stem
|
||
|
||
# Check for _service suffix
|
||
if not name.endswith("_service"):
|
||
self._add_violation(
|
||
rule_id="NAM-002",
|
||
rule_name="Service files use SINGULAR + 'service' suffix",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"Service file '{name}.py' should end with '_service'",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{name}_service.py' or ensure it's a service file",
|
||
)
|
||
return
|
||
|
||
# Check if the base name is plural (should be singular)
|
||
base_name = name.replace("_service", "")
|
||
plurals = [
|
||
"vendors",
|
||
"products",
|
||
"orders",
|
||
"users",
|
||
"categories",
|
||
"customers",
|
||
"payments",
|
||
]
|
||
if base_name in plurals:
|
||
singular = base_name.rstrip("s")
|
||
if base_name == "categories":
|
||
singular = "category"
|
||
self._add_violation(
|
||
rule_id="NAM-002",
|
||
rule_name="Service files use SINGULAR + 'service' suffix",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"Service file '{name}.py' should use singular form",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{singular}_service.py'",
|
||
)
|
||
|
||
def _check_model_file_naming(self, file_path: Path):
|
||
"""NAM-003: Check that model files use singular names"""
|
||
name = file_path.stem
|
||
|
||
# Common plural forms that should be singular
|
||
plural_to_singular = {
|
||
"vendors": "vendor",
|
||
"products": "product",
|
||
"orders": "order",
|
||
"users": "user",
|
||
"categories": "category",
|
||
"customers": "customer",
|
||
"payments": "payment",
|
||
"settings": "setting",
|
||
}
|
||
|
||
if name in plural_to_singular:
|
||
self._add_violation(
|
||
rule_id="NAM-003",
|
||
rule_name="Model files use SINGULAR names",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"Model file '{name}.py' should use singular form",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{plural_to_singular[name]}.py'",
|
||
)
|
||
|
||
def _check_terminology_consistency(self, file_path: Path, content: str):
|
||
"""NAM-004 & NAM-005: Check for inconsistent terminology"""
|
||
lines = content.split("\n")
|
||
|
||
# NAM-004: Check for 'shop_id' (should be vendor_id)
|
||
# Skip shop-specific files where shop_id might be legitimate
|
||
# Use word boundary to avoid matching 'letzshop_id' etc.
|
||
shop_id_pattern = re.compile(r'\bshop_id\b')
|
||
if "/shop/" not in str(file_path):
|
||
for i, line in enumerate(lines, 1):
|
||
if shop_id_pattern.search(line) and "# noqa" not in line.lower():
|
||
# Avoid false positives for legitimate shop context
|
||
if "shop_service" not in str(file_path):
|
||
self._add_violation(
|
||
rule_id="NAM-004",
|
||
rule_name="Use 'vendor' not 'shop'",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Consider using 'vendor_id' instead of 'shop_id'",
|
||
context=line.strip()[:60],
|
||
suggestion="Use vendor_id for multi-tenant context",
|
||
)
|
||
return # Only report once per file
|
||
|
||
# NAM-005: Check for 'stock' (should be inventory)
|
||
for i, line in enumerate(lines, 1):
|
||
if "stock_service" in line or "stock_id" in line:
|
||
if "# noqa" not in line.lower():
|
||
self._add_violation(
|
||
rule_id="NAM-005",
|
||
rule_name="Use 'inventory' not 'stock'",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Consider using 'inventory' instead of 'stock'",
|
||
context=line.strip()[:60],
|
||
suggestion="Use inventory_service or inventory_id for consistency",
|
||
)
|
||
return
|
||
|
||
def _validate_auth_patterns(self, target_path: Path):
|
||
"""Validate authentication patterns (AUTH-001 to AUTH-004)"""
|
||
print("🔐 Validating auth patterns...")
|
||
|
||
# AUTH-003: Check password hashing in auth service
|
||
auth_service = target_path / "app" / "services" / "auth_service.py"
|
||
if auth_service.exists():
|
||
content = auth_service.read_text()
|
||
if "bcrypt" not in content and "hash" not in content.lower():
|
||
self._add_violation(
|
||
rule_id="AUTH-003",
|
||
rule_name="Never store plain passwords",
|
||
severity=Severity.ERROR,
|
||
file_path=auth_service,
|
||
line_number=1,
|
||
message="Auth service may not be hashing passwords",
|
||
context="auth_service.py",
|
||
suggestion="Use bcrypt or similar library to hash passwords before storing",
|
||
)
|
||
|
||
# AUTH-004: Check vendor context patterns
|
||
vendor_api_files = list(target_path.glob("app/api/v1/vendor/**/*.py"))
|
||
for file_path in vendor_api_files:
|
||
if file_path.name == "__init__.py" or file_path.name == "auth.py":
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_vendor_context_pattern(file_path, content)
|
||
|
||
shop_api_files = list(target_path.glob("app/api/v1/shop/**/*.py"))
|
||
for file_path in shop_api_files:
|
||
if file_path.name == "__init__.py" or file_path.name == "auth.py":
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_shop_context_pattern(file_path, content)
|
||
|
||
def _check_vendor_context_pattern(self, file_path: Path, content: str):
|
||
"""AUTH-004: Check that vendor API endpoints use token-based vendor context"""
|
||
if "noqa: auth-004" in content.lower():
|
||
return
|
||
|
||
# Vendor APIs should NOT use require_vendor_context() - that's for shop
|
||
if "require_vendor_context()" in content:
|
||
lines = content.split("\n")
|
||
for i, line in enumerate(lines, 1):
|
||
if "require_vendor_context()" in line:
|
||
self._add_violation(
|
||
rule_id="AUTH-004",
|
||
rule_name="Use correct vendor context pattern",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Vendor API should use token_vendor_id, not require_vendor_context()",
|
||
context=line.strip()[:60],
|
||
suggestion="Use current_user.token_vendor_id from JWT token instead",
|
||
)
|
||
return
|
||
|
||
def _check_shop_context_pattern(self, file_path: Path, content: str):
|
||
"""AUTH-004: Check that shop API endpoints use proper vendor context"""
|
||
if "noqa: auth-004" in content.lower():
|
||
return
|
||
|
||
# Shop APIs that need vendor context should use require_vendor_context,
|
||
# # public, or # authenticated (customer auth includes vendor context)
|
||
has_vendor_context = (
|
||
"require_vendor_context" in content
|
||
or "# public" in content
|
||
or "# authenticated" in content
|
||
)
|
||
|
||
# Check for routes that might need vendor context
|
||
if "@router." in content and not has_vendor_context:
|
||
# Only flag if there are non-public endpoints without vendor context
|
||
lines = content.split("\n")
|
||
for i, line in enumerate(lines, 1):
|
||
if "@router." in line:
|
||
# Check next few lines for public/authenticated marker or vendor context
|
||
context_lines = "\n".join(lines[i - 1 : i + 10])
|
||
if (
|
||
"# public" not in context_lines
|
||
and "# authenticated" not in context_lines
|
||
and "require_vendor_context" not in context_lines
|
||
):
|
||
self._add_violation(
|
||
rule_id="AUTH-004",
|
||
rule_name="Shop endpoints need vendor context",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Shop endpoint may need vendor context dependency",
|
||
context=line.strip()[:60],
|
||
suggestion="Add Depends(require_vendor_context()) or mark as '# public'",
|
||
)
|
||
return
|
||
|
||
def _validate_middleware(self, target_path: Path):
|
||
"""Validate middleware patterns (MDW-001, MDW-002)"""
|
||
print("🔌 Validating middleware...")
|
||
|
||
middleware_dir = target_path / "middleware"
|
||
if not middleware_dir.exists():
|
||
return
|
||
|
||
middleware_files = list(middleware_dir.glob("*.py"))
|
||
|
||
for file_path in middleware_files:
|
||
if file_path.name == "__init__.py":
|
||
continue
|
||
|
||
# MDW-001: Check naming convention
|
||
if "_middleware" in file_path.stem:
|
||
self._add_violation(
|
||
rule_id="MDW-001",
|
||
rule_name="Middleware uses simple noun naming",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Middleware file should use simple noun naming",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{file_path.stem.replace('_middleware', '')}.py'",
|
||
)
|
||
|
||
# MDW-002: Check vendor context middleware exists and sets proper state
|
||
vendor_context = middleware_dir / "vendor_context.py"
|
||
if vendor_context.exists():
|
||
content = vendor_context.read_text()
|
||
# The middleware can set either request.state.vendor (full object)
|
||
# or request.state.vendor_id - vendor object is preferred as it allows
|
||
# accessing vendor_id via request.state.vendor.id
|
||
if "request.state.vendor" not in content:
|
||
self._add_violation(
|
||
rule_id="MDW-002",
|
||
rule_name="Vendor context middleware must set state",
|
||
severity=Severity.ERROR,
|
||
file_path=vendor_context,
|
||
line_number=1,
|
||
message="Vendor context middleware should set request.state.vendor",
|
||
context="vendor_context.py",
|
||
suggestion="Add 'request.state.vendor = vendor' in the middleware",
|
||
)
|
||
|
||
def _validate_javascript(self, target_path: Path):
|
||
"""Validate JavaScript patterns"""
|
||
print("🟨 Validating JavaScript...")
|
||
|
||
# Include admin, vendor, and shared JS files
|
||
js_files = (
|
||
list(target_path.glob("static/admin/js/**/*.js"))
|
||
+ list(target_path.glob("static/vendor/js/**/*.js"))
|
||
+ list(target_path.glob("static/shared/js/**/*.js"))
|
||
)
|
||
self.result.files_checked += len(js_files)
|
||
|
||
for file_path in js_files:
|
||
# Skip third-party vendor libraries
|
||
if "/vendor/" in str(file_path) and file_path.suffix == ".js":
|
||
if any(x in file_path.name for x in [".min.js", "chart.", "alpine."]):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# JS-001: Check for console usage (must use centralized logger)
|
||
# Skip init-*.js files - they run before logger is available
|
||
# Skip files with noqa: js-001 comment
|
||
if not file_path.name.startswith("init-") and "noqa: js-001" not in content.lower():
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"console\.(log|warn|error)", line):
|
||
# Skip if it's a comment or bootstrap message
|
||
if "//" in line or "✅" in line or "eslint-disable" in line:
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="JS-001",
|
||
rule_name="Use centralized logger",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use centralized logger instead of console",
|
||
context=line.strip()[:80],
|
||
suggestion="Use window.LogConfig.createLogger('moduleName')",
|
||
)
|
||
|
||
# JS-002: Check for window.apiClient (must use lowercase apiClient)
|
||
for i, line in enumerate(lines, 1):
|
||
if "window.apiClient" in line:
|
||
# Check if it's not in a comment
|
||
before_occurrence = line[: line.find("window.apiClient")]
|
||
if "//" not in before_occurrence:
|
||
self._add_violation(
|
||
rule_id="JS-002",
|
||
rule_name="Use lowercase apiClient",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use apiClient directly instead of window.apiClient",
|
||
context=line.strip(),
|
||
suggestion="Replace window.apiClient with apiClient",
|
||
)
|
||
|
||
# JS-003: Check Alpine components spread ...data()
|
||
self._check_alpine_data_spread(file_path, content, lines)
|
||
|
||
# JS-004: Check Alpine components set currentPage
|
||
self._check_alpine_current_page(file_path, content, lines)
|
||
|
||
# JS-005: Check initialization guard
|
||
self._check_init_guard(file_path, content, lines)
|
||
|
||
# JS-006: Check async error handling
|
||
self._check_async_error_handling(file_path, content, lines)
|
||
|
||
# JS-007: Check loading state management
|
||
self._check_loading_state(file_path, content, lines)
|
||
|
||
# JS-008: Check for raw fetch() calls instead of apiClient
|
||
self._check_fetch_vs_api_client(file_path, content, lines)
|
||
|
||
# JS-009: Check for alert() or window.showToast instead of Utils.showToast()
|
||
self._check_toast_usage(file_path, content, lines)
|
||
|
||
# JS-010: Check PlatformSettings usage for pagination
|
||
self._check_platform_settings_usage(file_path, content, lines)
|
||
|
||
# JS-011: Check standard pagination structure
|
||
self._check_pagination_structure(file_path, content, lines)
|
||
|
||
# JS-012: Check for double /api/v1 prefix in API calls
|
||
self._check_api_prefix_usage(file_path, content, lines)
|
||
|
||
# JS-013: Check that components overriding init() call parent init
|
||
self._check_parent_init_call(file_path, content, lines)
|
||
|
||
# JS-014: Check that vendor API calls don't include vendorCode in path
|
||
self._check_vendor_api_paths(file_path, content, lines)
|
||
|
||
def _check_platform_settings_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-010: Check that pages with pagination use PlatformSettings.getRowsPerPage()
|
||
"""
|
||
# Skip excluded files
|
||
excluded_files = ["init-alpine.js", "init-api-client.js", "settings.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
# Check for various pagination patterns:
|
||
# - Standard: pagination: { ... }
|
||
# - Named: jobsPagination: { ... }, ordersPagination: { ... }
|
||
# - Separate vars: ordersLimit, productsLimit, etc.
|
||
pagination_patterns = [
|
||
r"pagination:\s*\{",
|
||
r"\w+Pagination:\s*\{",
|
||
r"(orders|products|exceptions|jobs|items)Limit\s*:",
|
||
r"per_page\s*:\s*\d+",
|
||
]
|
||
|
||
has_pagination = any(re.search(p, content) for p in pagination_patterns)
|
||
if not has_pagination:
|
||
return
|
||
|
||
# Check if file uses PlatformSettings
|
||
uses_platform_settings = "PlatformSettings" in content
|
||
|
||
if not uses_platform_settings:
|
||
# Find the line where pagination is defined
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"(pagination|Pagination|Limit|per_page)\s*[:{]", line):
|
||
self._add_violation(
|
||
rule_id="JS-010",
|
||
rule_name="Use PlatformSettings for pagination",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Page with pagination must use PlatformSettings.getRowsPerPage()",
|
||
context=line.strip()[:80],
|
||
suggestion="Add: if (window.PlatformSettings) { this.pagination.per_page = await window.PlatformSettings.getRowsPerPage(); }",
|
||
)
|
||
break
|
||
|
||
def _check_pagination_structure(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-011: Check that pagination uses standard structure (pagination.per_page not page_size)
|
||
"""
|
||
# Skip excluded files
|
||
excluded_files = ["init-alpine.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
# Check if this file has pagination
|
||
has_pagination = re.search(r"pagination:\s*\{", content)
|
||
if not has_pagination:
|
||
return
|
||
|
||
# Check for wrong property names in pagination object definition
|
||
# Look for pagination object with wrong property names
|
||
wrong_patterns = [
|
||
(r"page_size\s*:", "page_size", "per_page"),
|
||
(r"pageSize\s*:", "pageSize", "per_page"),
|
||
(r"total_pages\s*:", "total_pages", "pages"),
|
||
(r"totalPages\s*:", "totalPages (in pagination object)", "pages"),
|
||
]
|
||
|
||
# Find the pagination object definition and check its properties
|
||
in_pagination_block = False
|
||
brace_count = 0
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Detect start of pagination object
|
||
if "pagination:" in line and "{" in line:
|
||
in_pagination_block = True
|
||
brace_count = line.count("{") - line.count("}")
|
||
continue
|
||
|
||
if in_pagination_block:
|
||
brace_count += line.count("{") - line.count("}")
|
||
|
||
# Check for wrong property names only inside the pagination block
|
||
for pattern, wrong_name, correct_name in wrong_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="JS-011",
|
||
rule_name="Use standard pagination structure",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Use '{correct_name}' instead of '{wrong_name}' in pagination",
|
||
context=line.strip()[:80],
|
||
suggestion=f"Rename '{wrong_name}' to '{correct_name}'",
|
||
)
|
||
|
||
# Exit pagination block when braces close
|
||
if brace_count <= 0:
|
||
in_pagination_block = False
|
||
|
||
def _check_api_prefix_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-012: Check that apiClient calls don't include /api/v1 prefix
|
||
(apiClient adds this prefix automatically)
|
||
"""
|
||
# Skip excluded files
|
||
excluded_files = ["init-api-client.js", "api-client.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith("//") or stripped.startswith("*"):
|
||
continue
|
||
|
||
# Check for apiClient calls with /api/v1 prefix
|
||
if re.search(r"apiClient\.(get|post|put|delete|patch)\s*\(\s*['\"`]/api/v1", line):
|
||
self._add_violation(
|
||
rule_id="JS-012",
|
||
rule_name="Do not include /api/v1 prefix",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="apiClient already adds /api/v1 prefix - remove it from the endpoint",
|
||
context=line.strip()[:80],
|
||
suggestion="Change '/api/v1/admin/...' to '/admin/...'",
|
||
)
|
||
|
||
def _check_parent_init_call(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-013: Check that vendor components overriding init() call parent init.
|
||
|
||
When a component uses ...data() to inherit base layout functionality AND
|
||
defines its own init() method, it MUST call the parent init first to set
|
||
critical properties like vendorCode.
|
||
|
||
Note: This only applies to vendor JS files because the vendor data() has
|
||
an init() method that extracts vendorCode from URL. Admin data() does not.
|
||
"""
|
||
# Only check vendor JS files (admin data() doesn't have init())
|
||
if "/vendor/js/" not in str(file_path):
|
||
return
|
||
|
||
# Skip files that shouldn't have this pattern
|
||
excluded_files = ["init-alpine.js", "login.js", "onboarding.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
# Check if file uses ...data() spread operator
|
||
uses_data_spread = "...data()" in content
|
||
|
||
# Check if file defines its own async init()
|
||
has_own_init = re.search(r"async\s+init\s*\(\s*\)", content)
|
||
|
||
# If both conditions are true, check for parent init call
|
||
if uses_data_spread and has_own_init:
|
||
# Check for parent init call patterns
|
||
calls_parent_init = (
|
||
"data().init" in content
|
||
or "parentInit" in content
|
||
or "parent.init" in content
|
||
)
|
||
|
||
if not calls_parent_init:
|
||
# Find the line with async init() to report
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"async\s+init\s*\(\s*\)", line):
|
||
self._add_violation(
|
||
rule_id="JS-013",
|
||
rule_name="Components overriding init() must call parent init",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Component with ...data() must call parent init() to set vendorCode",
|
||
context=line.strip()[:80],
|
||
suggestion="Add: const parentInit = data().init; if (parentInit) { await parentInit.call(this); }",
|
||
)
|
||
break
|
||
|
||
def _check_vendor_api_paths(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-014: Check that vendor API calls don't include vendorCode in path.
|
||
|
||
Vendor API endpoints use JWT token authentication, NOT URL path parameters.
|
||
The vendorCode is only used for page URLs (navigation), not API calls.
|
||
|
||
Incorrect: apiClient.get(`/vendor/${this.vendorCode}/orders`)
|
||
Correct: apiClient.get(`/vendor/orders`)
|
||
|
||
Exceptions (these DO use vendorCode in path):
|
||
- /vendor/{vendor_code} (public vendor info)
|
||
- /vendor/{vendor_code}/content-pages (public content)
|
||
"""
|
||
# Only check vendor JS files
|
||
if "/vendor/js/" not in str(file_path):
|
||
return
|
||
|
||
# Pattern to match apiClient calls with vendorCode in the path
|
||
# Matches patterns like:
|
||
# apiClient.get(`/vendor/${this.vendorCode}/
|
||
# apiClient.post(`/vendor/${vendorCode}/
|
||
# apiClient.put(`/vendor/${this.vendorCode}/
|
||
# apiClient.delete(`/vendor/${this.vendorCode}/
|
||
pattern = r"apiClient\.(get|post|put|delete|patch)\s*\(\s*[`'\"]\/vendor\/\$\{(?:this\.)?vendorCode\}\/"
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(pattern, line):
|
||
# Check if this is an allowed exception
|
||
# content-pages uses vendorCode for public content access
|
||
is_exception = (
|
||
"/content-pages" in line
|
||
or "content-page" in file_path.name
|
||
)
|
||
|
||
if not is_exception:
|
||
self._add_violation(
|
||
rule_id="JS-014",
|
||
rule_name="Vendor API calls must not include vendorCode in path",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Vendor API endpoints use JWT authentication, not URL path parameters",
|
||
context=line.strip()[:100],
|
||
suggestion="Remove vendorCode from path: /vendor/orders instead of /vendor/${this.vendorCode}/orders",
|
||
)
|
||
|
||
def _validate_templates(self, target_path: Path):
|
||
"""Validate template patterns"""
|
||
print("📄 Validating templates...")
|
||
|
||
template_files = list(target_path.glob("app/templates/admin/**/*.html"))
|
||
self.result.files_checked += len(template_files)
|
||
|
||
# TPL-001 exclusion patterns
|
||
tpl_001_exclusions = [
|
||
"login.html", # Standalone login page
|
||
"errors/", # Error pages extend errors/base.html
|
||
"test-", # Test templates
|
||
]
|
||
|
||
for file_path in template_files:
|
||
file_path_str = str(file_path)
|
||
|
||
# Skip base template and partials
|
||
is_base_or_partial = (
|
||
"base.html" in file_path.name or "partials" in file_path_str
|
||
)
|
||
|
||
# Skip macros directory for FE rules
|
||
is_macro = (
|
||
"shared/macros/" in file_path_str or "shared\\macros\\" in file_path_str
|
||
)
|
||
|
||
# Skip components showcase page
|
||
is_components_page = "components.html" in file_path.name
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# FE-001: Check for inline pagination (should use macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_pagination_macro_usage(file_path, content, lines)
|
||
|
||
# FE-002: Check for inline SVGs (should use $icon())
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_icon_helper_usage(file_path, content, lines)
|
||
|
||
# FE-003: Check for inline loading/error states (should use alerts macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_alerts_macro_usage(file_path, content, lines)
|
||
|
||
# FE-004: Check for inline modals (should use modals macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_modals_macro_usage(file_path, content, lines)
|
||
|
||
# FE-005: Check for inline table wrappers (should use tables macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_tables_macro_usage(file_path, content, lines)
|
||
|
||
# FE-006: Check for inline dropdowns (should use dropdowns macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_dropdowns_macro_usage(file_path, content, lines)
|
||
|
||
# FE-007: Check for inline page headers (should use headers macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_headers_macro_usage(file_path, content, lines)
|
||
|
||
# FE-008: Check for raw number inputs (should use number_stepper)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_number_stepper_macro_usage(file_path, content, lines)
|
||
|
||
# TPL-008: Check for call table_header() pattern
|
||
self._check_table_header_call_pattern(file_path, content, lines)
|
||
|
||
# TPL-009: Check for invalid block names
|
||
if not is_base_or_partial:
|
||
self._check_valid_block_names(file_path, content, lines)
|
||
|
||
# TPL-010: Check Alpine variables are defined in JS
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
# Try to find corresponding JS file
|
||
# Template: app/templates/admin/messages.html -> JS: static/admin/js/messages.js
|
||
template_name = file_path.stem # e.g., "messages"
|
||
js_file = target_path / f"static/admin/js/{template_name}.js"
|
||
if js_file.exists():
|
||
js_content = js_file.read_text()
|
||
self._check_alpine_template_vars(file_path, content, lines, js_content)
|
||
|
||
# TPL-011: Check for deprecated macros
|
||
self._check_deprecated_macros(file_path, content, lines)
|
||
|
||
# TPL-012: Check for escaped quotes in Alpine template literals
|
||
self._check_escaped_quotes_in_alpine(file_path, content, lines)
|
||
|
||
# Skip base/partials for TPL-001 check
|
||
if is_base_or_partial:
|
||
continue
|
||
|
||
# Check exclusion patterns for TPL-001
|
||
skip = False
|
||
for exclusion in tpl_001_exclusions:
|
||
if exclusion in file_path_str:
|
||
skip = True
|
||
break
|
||
if skip:
|
||
continue
|
||
|
||
# Check for standalone marker in template (first 5 lines)
|
||
first_lines = "\n".join(lines[:5]).lower()
|
||
if "standalone" in first_lines or "noqa: tpl-001" in first_lines:
|
||
continue
|
||
|
||
# TPL-001: Check for extends
|
||
has_extends = any(
|
||
"{% extends" in line and "admin/base.html" in line for line in lines
|
||
)
|
||
|
||
if not has_extends:
|
||
self._add_violation(
|
||
rule_id="TPL-001",
|
||
rule_name="Templates must extend base",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Admin template does not extend admin/base.html",
|
||
context=file_path.name,
|
||
suggestion="Add {% extends 'admin/base.html' %} at the top, or add {# standalone #} if intentional",
|
||
)
|
||
|
||
# =========================================================================
|
||
# LANGUAGE/I18N VALIDATION
|
||
# =========================================================================
|
||
|
||
def _validate_language_rules(self, target_path: Path):
|
||
"""Validate language/i18n patterns"""
|
||
print("\n🌐 Validating language/i18n rules...")
|
||
|
||
# LANG-001: Check for invalid language codes in Python files
|
||
self._check_language_codes(target_path)
|
||
|
||
# LANG-002, LANG-003: Check inline Alpine.js and tojson|safe usage
|
||
self._check_template_language_inline_patterns(target_path)
|
||
|
||
# LANG-004: Check language selector function exists in JS files
|
||
self._check_language_selector_function(target_path)
|
||
|
||
# LANG-005, LANG-006, LANG-008: Check language names, flag codes, and API usage in JS files
|
||
self._check_language_js_patterns(target_path)
|
||
|
||
# LANG-007, LANG-009: Check language patterns in templates
|
||
self._check_language_template_patterns(target_path)
|
||
|
||
# LANG-010: Check translation files are valid JSON
|
||
self._check_translation_files(target_path)
|
||
|
||
def _check_language_codes(self, target_path: Path):
|
||
"""LANG-001: Check for invalid language codes in Python files"""
|
||
py_files = list(target_path.glob("app/**/*.py"))
|
||
py_files.extend(list(target_path.glob("models/**/*.py")))
|
||
|
||
invalid_codes = [
|
||
("'english'", "'en'"),
|
||
("'french'", "'fr'"),
|
||
("'german'", "'de'"),
|
||
("'luxembourgish'", "'lb'"),
|
||
("'lux'", "'lb'"),
|
||
('"english"', '"en"'),
|
||
('"french"', '"fr"'),
|
||
('"german"', '"de"'),
|
||
('"luxembourgish"', '"lb"'),
|
||
('"lux"', '"lb"'),
|
||
]
|
||
|
||
for file_path in py_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
try:
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# Track if we're inside LANGUAGE_NAMES dicts (allowed to use language names)
|
||
in_language_names_dict = False
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Track LANGUAGE_NAMES/LANGUAGE_NAMES_EN blocks - name values are allowed
|
||
if (
|
||
"LANGUAGE_NAMES" in line or "LANGUAGE_NAMES_EN" in line
|
||
) and "=" in line:
|
||
in_language_names_dict = True
|
||
if in_language_names_dict and stripped == "}":
|
||
in_language_names_dict = False
|
||
continue
|
||
if in_language_names_dict:
|
||
continue
|
||
|
||
for wrong, correct in invalid_codes:
|
||
if wrong in line.lower():
|
||
self._add_violation(
|
||
rule_id="LANG-001",
|
||
rule_name="Only use supported language codes",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Invalid language code '{wrong}' - use ISO code instead",
|
||
context=stripped[:80],
|
||
suggestion=f"Change to: {correct}",
|
||
)
|
||
except Exception:
|
||
pass # Skip files that can't be read
|
||
|
||
def _check_template_language_inline_patterns(self, target_path: Path):
|
||
"""LANG-002, LANG-003: Check inline Alpine.js and tojson|safe usage in templates"""
|
||
template_files = list(target_path.glob("app/templates/**/*.html"))
|
||
|
||
for file_path in template_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
file_path_str = str(file_path)
|
||
|
||
# Skip partials/header.html which may use hardcoded arrays (allowed)
|
||
if "partials/header.html" in file_path_str:
|
||
continue
|
||
|
||
try:
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# LANG-002: Check for inline complex x-data with language-related properties
|
||
if 'x-data="{' in line or "x-data='{" in line:
|
||
# Check next few lines for language-related properties
|
||
context_lines = "\n".join(lines[i - 1 : i + 10])
|
||
if (
|
||
"languages:" in context_lines
|
||
or "languageNames:" in context_lines
|
||
or "languageFlags:" in context_lines
|
||
):
|
||
# Check if it's using a function call (allowed)
|
||
if "languageSelector(" not in context_lines:
|
||
self._add_violation(
|
||
rule_id="LANG-002",
|
||
rule_name="Never use inline Alpine.js x-data for language selector",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Complex language selector should use languageSelector() function",
|
||
context=line.strip()[:80],
|
||
suggestion="Use: x-data='languageSelector(\"{{ lang }}\", {{ langs|tojson }})'",
|
||
)
|
||
|
||
# LANG-003: Check for double-quoted x-data with tojson (breaks parsing)
|
||
# Double quotes + tojson outputs raw JSON double quotes that break HTML
|
||
# Use single quotes for x-data when using tojson
|
||
if 'x-data="' in line and "|tojson" in line:
|
||
self._add_violation(
|
||
rule_id="LANG-003",
|
||
rule_name="Use single quotes for x-data with tojson",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Double-quoted x-data with |tojson breaks HTML (JSON has double quotes)",
|
||
context=line.strip()[:80],
|
||
suggestion="Use single quotes: x-data='func({{ data|tojson }})'",
|
||
)
|
||
except Exception:
|
||
pass # Skip files that can't be read
|
||
|
||
def _check_language_selector_function(self, target_path: Path):
|
||
"""LANG-004: Check that languageSelector function exists and is exported"""
|
||
required_files = [
|
||
target_path / "static/shop/js/shop-layout.js",
|
||
target_path / "static/vendor/js/init-alpine.js",
|
||
]
|
||
|
||
for file_path in required_files:
|
||
if not file_path.exists():
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
|
||
# Check for function definition
|
||
has_function = "function languageSelector" in content
|
||
has_export = "window.languageSelector" in content
|
||
|
||
if not has_function:
|
||
self._add_violation(
|
||
rule_id="LANG-004",
|
||
rule_name="Language selector function must be defined",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Missing languageSelector function definition",
|
||
context=file_path.name,
|
||
suggestion="Add: function languageSelector(currentLang, enabledLanguages) { return {...}; }",
|
||
)
|
||
|
||
if has_function and not has_export:
|
||
self._add_violation(
|
||
rule_id="LANG-004",
|
||
rule_name="Language selector function must be exported",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="languageSelector function not exported to window",
|
||
context=file_path.name,
|
||
suggestion="Add: window.languageSelector = languageSelector;",
|
||
)
|
||
|
||
def _check_language_js_patterns(self, target_path: Path):
|
||
"""LANG-005, LANG-006: Check language names and flag codes"""
|
||
js_files = list(target_path.glob("static/**/js/**/*.js"))
|
||
|
||
for file_path in js_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith("//") or stripped.startswith("/*"):
|
||
continue
|
||
|
||
# LANG-005: Check for English language names instead of native
|
||
english_names = [
|
||
("'fr': 'French'", "'fr': 'Français'"),
|
||
("'de': 'German'", "'de': 'Deutsch'"),
|
||
("'lb': 'Luxembourgish'", "'lb': 'Lëtzebuergesch'"),
|
||
('"fr": "French"', '"fr": "Français"'),
|
||
('"de": "German"', '"de": "Deutsch"'),
|
||
('"lb": "Luxembourgish"', '"lb": "Lëtzebuergesch"'),
|
||
]
|
||
|
||
for wrong, correct in english_names:
|
||
if wrong.lower().replace(" ", "") in line.lower().replace(" ", ""):
|
||
self._add_violation(
|
||
rule_id="LANG-005",
|
||
rule_name="Use native language names",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use native language name instead of English",
|
||
context=stripped[:80],
|
||
suggestion=f"Change to: {correct}",
|
||
)
|
||
|
||
# LANG-006: Check for incorrect flag codes
|
||
wrong_flags = [
|
||
("'en': 'us'", "'en': 'gb'"),
|
||
("'en': 'en'", "'en': 'gb'"),
|
||
("'lb': 'lb'", "'lb': 'lu'"),
|
||
('"en": "us"', '"en": "gb"'),
|
||
('"en": "en"', '"en": "gb"'),
|
||
('"lb": "lb"', '"lb": "lu"'),
|
||
]
|
||
|
||
for wrong, correct in wrong_flags:
|
||
if wrong.lower().replace(" ", "") in line.lower().replace(" ", ""):
|
||
self._add_violation(
|
||
rule_id="LANG-006",
|
||
rule_name="Use correct flag codes",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Invalid flag code mapping",
|
||
context=stripped[:80],
|
||
suggestion=f"Change to: {correct}",
|
||
)
|
||
|
||
# LANG-008: Check for wrong API method (GET instead of POST)
|
||
if "/language/set" in line:
|
||
if "method: 'GET'" in line or 'method: "GET"' in line:
|
||
self._add_violation(
|
||
rule_id="LANG-008",
|
||
rule_name="Language API endpoint must use POST method",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Language set endpoint must use POST, not GET",
|
||
context=stripped[:80],
|
||
suggestion="Change to: method: 'POST'",
|
||
)
|
||
if "/language/set?" in line:
|
||
self._add_violation(
|
||
rule_id="LANG-008",
|
||
rule_name="Language API endpoint must use POST method",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Language set endpoint must use POST with body, not GET with query params",
|
||
context=stripped[:80],
|
||
suggestion="Use POST with JSON body: { language: lang }",
|
||
)
|
||
|
||
def _check_language_template_patterns(self, target_path: Path):
|
||
"""LANG-007, LANG-009: Check language patterns in templates"""
|
||
template_files = list(target_path.glob("app/templates/**/*.html"))
|
||
|
||
for file_path in template_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
file_path_str = str(file_path)
|
||
is_shop = "/shop/" in file_path_str or "\\shop\\" in file_path_str
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# LANG-009: Check for request.state.language without default
|
||
if "request.state.language" in line:
|
||
# Check if it has |default
|
||
if "request.state.language" in line and "|default" not in line:
|
||
# Make sure it's not just part of a longer expression
|
||
if re.search(r'request\.state\.language[\'"\s\}\)]', line):
|
||
self._add_violation(
|
||
rule_id="LANG-009",
|
||
rule_name="Always provide language default",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="request.state.language used without default",
|
||
context=line.strip()[:80],
|
||
suggestion='Use: request.state.language|default("fr")',
|
||
)
|
||
|
||
# LANG-007: Shop templates must use vendor.storefront_languages
|
||
if is_shop and "languageSelector" in content:
|
||
if "vendor.storefront_languages" not in content:
|
||
# Check if file has any language selector
|
||
if "enabled_langs" in content or "languages" in content:
|
||
self._add_violation(
|
||
rule_id="LANG-007",
|
||
rule_name="Storefront must respect vendor languages",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Shop template should use vendor.storefront_languages",
|
||
context=file_path.name,
|
||
suggestion="Use: {% set enabled_langs = vendor.storefront_languages if vendor else ['fr', 'de', 'en'] %}",
|
||
)
|
||
|
||
def _check_translation_files(self, target_path: Path):
|
||
"""LANG-010: Check that translation files are valid JSON"""
|
||
import json
|
||
|
||
locales_dir = target_path / "static/locales"
|
||
if not locales_dir.exists():
|
||
return
|
||
|
||
required_files = ["en.json", "fr.json", "de.json", "lb.json"]
|
||
|
||
for filename in required_files:
|
||
file_path = locales_dir / filename
|
||
if not file_path.exists():
|
||
self._add_violation(
|
||
rule_id="LANG-010",
|
||
rule_name="Translation files required",
|
||
severity=Severity.ERROR,
|
||
file_path=locales_dir,
|
||
line_number=1,
|
||
message=f"Missing translation file: {filename}",
|
||
context=str(locales_dir),
|
||
suggestion=f"Create {filename} with all required translation keys",
|
||
)
|
||
continue
|
||
|
||
try:
|
||
with open(file_path) as f:
|
||
json.load(f)
|
||
except json.JSONDecodeError as e:
|
||
self._add_violation(
|
||
rule_id="LANG-010",
|
||
rule_name="Translation files must be valid JSON",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=e.lineno or 1,
|
||
message=f"Invalid JSON: {e.msg}",
|
||
context=str(e),
|
||
suggestion="Fix JSON syntax error (check for trailing commas, missing quotes)",
|
||
)
|
||
|
||
def _get_rule(self, rule_id: str) -> dict[str, Any]:
|
||
"""Get rule configuration by ID"""
|
||
# Look in different rule categories
|
||
for category in [
|
||
"api_endpoint_rules",
|
||
"service_layer_rules",
|
||
"model_rules",
|
||
"exception_rules",
|
||
"javascript_rules",
|
||
"template_rules",
|
||
"frontend_component_rules",
|
||
"language_rules",
|
||
]:
|
||
rules = self.config.get(category, [])
|
||
for rule in rules:
|
||
if rule.get("id") == rule_id:
|
||
return rule
|
||
return None
|
||
|
||
def _should_ignore_file(self, file_path: Path) -> bool:
|
||
"""Check if file should be ignored"""
|
||
ignore_patterns = self.config.get("ignore", {}).get("files", [])
|
||
|
||
# Convert to string for easier matching
|
||
file_path_str = str(file_path)
|
||
|
||
for pattern in ignore_patterns:
|
||
# Check if any part of the path matches the pattern
|
||
if file_path.match(pattern):
|
||
return True
|
||
# Also check if pattern appears in the path (for .venv, venv, etc.)
|
||
if "/.venv/" in file_path_str or file_path_str.startswith(".venv/"):
|
||
return True
|
||
if "/venv/" in file_path_str or file_path_str.startswith("venv/"):
|
||
return True
|
||
|
||
return False
|
||
|
||
def _add_violation(
|
||
self,
|
||
rule_id: str,
|
||
rule_name: str,
|
||
severity: Severity,
|
||
file_path: Path,
|
||
line_number: int,
|
||
message: str,
|
||
context: str = "",
|
||
suggestion: str = "",
|
||
):
|
||
"""Add a violation to results"""
|
||
violation = Violation(
|
||
rule_id=rule_id,
|
||
rule_name=rule_name,
|
||
severity=severity,
|
||
file_path=file_path,
|
||
line_number=line_number,
|
||
message=message,
|
||
context=context,
|
||
suggestion=suggestion,
|
||
)
|
||
self.result.violations.append(violation)
|
||
|
||
def print_report(self):
|
||
"""Print validation report"""
|
||
print("\n" + "=" * 80)
|
||
print("📊 ARCHITECTURE VALIDATION REPORT")
|
||
print("=" * 80 + "\n")
|
||
|
||
# Group by severity
|
||
errors = [v for v in self.result.violations if v.severity == Severity.ERROR]
|
||
warnings = [v for v in self.result.violations if v.severity == Severity.WARNING]
|
||
infos = [v for v in self.result.violations if v.severity == Severity.INFO]
|
||
|
||
print(f"Files checked: {self.result.files_checked}")
|
||
print(
|
||
f"Findings: {len(errors)} errors, {len(warnings)} warnings, {len(infos)} info\n"
|
||
)
|
||
|
||
# Print file summary table if we have file results
|
||
if self.result.file_results:
|
||
self._print_summary_table()
|
||
|
||
if errors:
|
||
print(f"\n❌ ERRORS ({len(errors)}):")
|
||
print("-" * 80)
|
||
for violation in errors:
|
||
self._print_violation(violation)
|
||
|
||
if warnings:
|
||
print(f"\n⚠️ WARNINGS ({len(warnings)}):")
|
||
print("-" * 80)
|
||
for violation in warnings:
|
||
self._print_violation(violation)
|
||
|
||
if infos and self.verbose:
|
||
print(f"\nℹ️ INFO ({len(infos)}):")
|
||
print("-" * 80)
|
||
for violation in infos:
|
||
self._print_violation(violation)
|
||
|
||
# Summary
|
||
print("\n" + "=" * 80)
|
||
if self.result.has_errors():
|
||
print(
|
||
f"❌ VALIDATION FAILED - {len(errors)} error(s) must be fixed before committing"
|
||
)
|
||
print("=" * 80)
|
||
return 1
|
||
if self.result.has_warnings():
|
||
print(f"⚠️ VALIDATION PASSED WITH {len(warnings)} WARNING(S)")
|
||
print("=" * 80)
|
||
return 0
|
||
print("✅ VALIDATION PASSED")
|
||
print("=" * 80)
|
||
return 0
|
||
|
||
def _print_summary_table(self):
|
||
"""Print a summary table of file results"""
|
||
print("📋 FILE SUMMARY:")
|
||
print("-" * 80)
|
||
|
||
# Calculate column widths
|
||
max_path_len = max(
|
||
len(
|
||
str(
|
||
fr.file_path.relative_to(self.project_root)
|
||
if self.project_root in fr.file_path.parents
|
||
else fr.file_path
|
||
)
|
||
)
|
||
for fr in self.result.file_results
|
||
)
|
||
max_path_len = min(max_path_len, 55) # Cap at 55 chars
|
||
|
||
# Print header
|
||
print(
|
||
f" {'File':<{max_path_len}} {'Status':<8} {'Errors':<7} {'Warnings':<8}"
|
||
)
|
||
print(f" {'-' * max_path_len} {'-' * 8} {'-' * 7} {'-' * 8}")
|
||
|
||
# Print each file
|
||
for fr in self.result.file_results:
|
||
rel_path = (
|
||
fr.file_path.relative_to(self.project_root)
|
||
if self.project_root in fr.file_path.parents
|
||
else fr.file_path
|
||
)
|
||
path_str = str(rel_path)
|
||
if len(path_str) > max_path_len:
|
||
path_str = "..." + path_str[-(max_path_len - 3) :]
|
||
|
||
print(
|
||
f" {path_str:<{max_path_len}} "
|
||
f"{fr.status_icon} {fr.status:<5} "
|
||
f"{fr.errors:<7} "
|
||
f"{fr.warnings:<8}"
|
||
)
|
||
|
||
print("-" * 80)
|
||
|
||
# Print totals
|
||
total_errors = sum(fr.errors for fr in self.result.file_results)
|
||
total_warnings = sum(fr.warnings for fr in self.result.file_results)
|
||
passed = sum(1 for fr in self.result.file_results if fr.passed)
|
||
failed = len(self.result.file_results) - passed
|
||
|
||
print(
|
||
f"\n Total: {len(self.result.file_results)} files | "
|
||
f"✅ {passed} passed | ❌ {failed} failed | "
|
||
f"{total_errors} errors | {total_warnings} warnings\n"
|
||
)
|
||
|
||
def print_json(self) -> int:
|
||
"""Print validation results as JSON"""
|
||
import json
|
||
|
||
violations_json = []
|
||
for v in self.result.violations:
|
||
rel_path = (
|
||
str(v.file_path.relative_to(self.project_root))
|
||
if self.project_root in v.file_path.parents
|
||
else str(v.file_path)
|
||
)
|
||
violations_json.append(
|
||
{
|
||
"rule_id": v.rule_id,
|
||
"rule_name": v.rule_name,
|
||
"severity": v.severity.value,
|
||
"file_path": rel_path,
|
||
"line_number": v.line_number,
|
||
"message": v.message,
|
||
"context": v.context or "",
|
||
"suggestion": v.suggestion or "",
|
||
}
|
||
)
|
||
|
||
output = {
|
||
"files_checked": self.result.files_checked,
|
||
"total_violations": len(self.result.violations),
|
||
"errors": len(
|
||
[v for v in self.result.violations if v.severity == Severity.ERROR]
|
||
),
|
||
"warnings": len(
|
||
[v for v in self.result.violations if v.severity == Severity.WARNING]
|
||
),
|
||
"violations": violations_json,
|
||
}
|
||
|
||
print(json.dumps(output, indent=2))
|
||
|
||
return 1 if self.result.has_errors() else 0
|
||
|
||
def _print_violation(self, v: Violation):
|
||
"""Print a single violation"""
|
||
rel_path = (
|
||
v.file_path.relative_to(self.project_root)
|
||
if self.project_root in v.file_path.parents
|
||
else v.file_path
|
||
)
|
||
|
||
print(f"\n [{v.rule_id}] {v.rule_name}")
|
||
print(f" File: {rel_path}:{v.line_number}")
|
||
print(f" Issue: {v.message}")
|
||
|
||
if v.context and self.verbose:
|
||
print(f" Context: {v.context}")
|
||
|
||
if v.suggestion:
|
||
print(f" 💡 Suggestion: {v.suggestion}")
|
||
|
||
|
||
def main():
|
||
"""Main entry point"""
|
||
parser = argparse.ArgumentParser(
|
||
description="Validate architecture patterns in codebase",
|
||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||
epilog=__doc__,
|
||
)
|
||
|
||
# Target options (mutually exclusive)
|
||
target_group = parser.add_mutually_exclusive_group()
|
||
|
||
target_group.add_argument(
|
||
"-f",
|
||
"--file",
|
||
type=Path,
|
||
metavar="PATH",
|
||
help="Validate a single file (.py, .js, or .html)",
|
||
)
|
||
|
||
target_group.add_argument(
|
||
"-d",
|
||
"--folder",
|
||
type=Path,
|
||
metavar="PATH",
|
||
help="Validate all files in a directory (recursive)",
|
||
)
|
||
|
||
target_group.add_argument(
|
||
"-o",
|
||
"--object",
|
||
type=str,
|
||
metavar="NAME",
|
||
help="Validate all files related to an entity (e.g., company, vendor, order)",
|
||
)
|
||
|
||
parser.add_argument(
|
||
"-c",
|
||
"--config",
|
||
type=Path,
|
||
default=Path.cwd() / ".architecture-rules.yaml",
|
||
help="Path to architecture rules config (default: .architecture-rules.yaml)",
|
||
)
|
||
|
||
parser.add_argument(
|
||
"-v",
|
||
"--verbose",
|
||
action="store_true",
|
||
help="Show detailed output including context",
|
||
)
|
||
|
||
parser.add_argument(
|
||
"--errors-only", action="store_true", help="Only show errors, suppress warnings"
|
||
)
|
||
|
||
parser.add_argument(
|
||
"--json",
|
||
action="store_true",
|
||
help="Output results as JSON (for programmatic use)",
|
||
)
|
||
|
||
args = parser.parse_args()
|
||
|
||
# Create validator
|
||
validator = ArchitectureValidator(args.config, verbose=args.verbose)
|
||
|
||
# Determine validation mode
|
||
if args.file:
|
||
# Validate single file
|
||
result = validator.validate_file(args.file)
|
||
elif args.folder:
|
||
# Validate directory
|
||
if not args.folder.is_dir():
|
||
print(f"❌ Not a directory: {args.folder}")
|
||
sys.exit(1)
|
||
result = validator.validate_all(args.folder)
|
||
elif args.object:
|
||
# Validate all files related to an entity
|
||
result = validator.validate_object(args.object)
|
||
else:
|
||
# Default: validate current directory
|
||
result = validator.validate_all(Path.cwd())
|
||
|
||
# Output results
|
||
if args.json:
|
||
exit_code = validator.print_json()
|
||
else:
|
||
exit_code = validator.print_report()
|
||
|
||
sys.exit(exit_code)
|
||
|
||
|
||
if __name__ == "__main__":
|
||
main()
|