Replace all ~1,086 occurrences of Wizamart/wizamart/WIZAMART/WizaMart with Orion/orion/ORION across 184 files. This includes database identifiers, email addresses, domain references, R2 bucket names, DNS prefixes, encryption salt, Celery app name, config defaults, Docker configs, CI configs, documentation, seed data, and templates. Renames homepage-wizamart.html template to homepage-orion.html. Fixes duplicate file_pattern key in api.yaml architecture rule. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
5471 lines
232 KiB
Python
Executable File
5471 lines
232 KiB
Python
Executable File
#!/usr/bin/env python3
|
||
"""
|
||
Architecture Validator
|
||
======================
|
||
Validates code against architectural rules defined in .architecture-rules.yaml
|
||
|
||
This script checks that the codebase follows key architectural decisions:
|
||
- Separation of concerns (routes vs services)
|
||
- Proper exception handling (domain exceptions vs HTTPException)
|
||
- Correct use of Pydantic vs SQLAlchemy models
|
||
- Service layer patterns
|
||
- API endpoint patterns
|
||
|
||
Usage:
|
||
python scripts/validate/validate_architecture.py # Check all files in current directory
|
||
python scripts/validate/validate_architecture.py -d app/api/ # Check specific directory
|
||
python scripts/validate/validate_architecture.py -f app/api/v1/stores.py # Check single file
|
||
python scripts/validate/validate_architecture.py -o merchant # Check all merchant-related files
|
||
python scripts/validate/validate_architecture.py -o store --verbose # Check store files with details
|
||
python scripts/validate/validate_architecture.py --json # JSON output
|
||
|
||
Options:
|
||
-f, --file PATH Validate a single file (.py, .js, or .html)
|
||
-d, --folder PATH Validate all files in a directory (recursive)
|
||
-o, --object NAME Validate all files related to an entity (e.g., merchant, store, order)
|
||
-c, --config PATH Path to architecture rules config
|
||
-v, --verbose Show detailed output including context
|
||
--errors-only Only show errors, suppress warnings
|
||
--json Output results as JSON
|
||
"""
|
||
|
||
import argparse
|
||
import re
|
||
import sys
|
||
from dataclasses import dataclass, field
|
||
from enum import Enum
|
||
from pathlib import Path
|
||
from typing import Any
|
||
|
||
import yaml
|
||
|
||
|
||
class Severity(Enum):
|
||
"""Validation severity levels"""
|
||
|
||
ERROR = "error"
|
||
WARNING = "warning"
|
||
INFO = "info"
|
||
|
||
|
||
@dataclass
|
||
class Violation:
|
||
"""Represents an architectural rule violation"""
|
||
|
||
rule_id: str
|
||
rule_name: str
|
||
severity: Severity
|
||
file_path: Path
|
||
line_number: int
|
||
message: str
|
||
context: str = ""
|
||
suggestion: str = ""
|
||
|
||
|
||
@dataclass
|
||
class FileResult:
|
||
"""Results for a single file validation"""
|
||
|
||
file_path: Path
|
||
errors: int = 0
|
||
warnings: int = 0
|
||
|
||
@property
|
||
def passed(self) -> bool:
|
||
return self.errors == 0
|
||
|
||
@property
|
||
def status(self) -> str:
|
||
if self.errors > 0:
|
||
return "FAILED"
|
||
if self.warnings > 0:
|
||
return "PASSED*"
|
||
return "PASSED"
|
||
|
||
@property
|
||
def status_icon(self) -> str:
|
||
if self.errors > 0:
|
||
return "❌"
|
||
if self.warnings > 0:
|
||
return "⚠️"
|
||
return "✅"
|
||
|
||
|
||
@dataclass
|
||
class ValidationResult:
|
||
"""Results of architecture validation"""
|
||
|
||
violations: list[Violation] = field(default_factory=list)
|
||
files_checked: int = 0
|
||
rules_applied: int = 0
|
||
file_results: list[FileResult] = field(default_factory=list)
|
||
|
||
def has_errors(self) -> bool:
|
||
"""Check if there are any error-level violations"""
|
||
return any(v.severity == Severity.ERROR for v in self.violations)
|
||
|
||
def has_warnings(self) -> bool:
|
||
"""Check if there are any warning-level violations"""
|
||
return any(v.severity == Severity.WARNING for v in self.violations)
|
||
|
||
|
||
class ArchitectureValidator:
|
||
"""Main validator class"""
|
||
|
||
CORE_MODULES = {"contracts", "core", "tenancy", "cms", "customers", "billing", "payments", "messaging"}
|
||
OPTIONAL_MODULES = {"analytics", "cart", "catalog", "checkout", "inventory", "loyalty", "marketplace", "orders"}
|
||
|
||
def __init__(self, config_path: Path, verbose: bool = False):
|
||
"""Initialize validator with configuration"""
|
||
self.config_path = config_path
|
||
self.verbose = verbose
|
||
self.config = self._load_config()
|
||
self.result = ValidationResult()
|
||
self.project_root = Path.cwd()
|
||
|
||
def _load_config(self) -> dict[str, Any]:
|
||
"""
|
||
Load validation rules from YAML config.
|
||
|
||
Supports two modes:
|
||
1. Split directory mode: .architecture-rules/ directory with multiple YAML files
|
||
2. Single file mode: .architecture-rules.yaml (legacy)
|
||
|
||
The split directory mode takes precedence if it exists.
|
||
"""
|
||
# Check for split directory mode first
|
||
rules_dir = self.config_path.parent / ".architecture-rules"
|
||
if rules_dir.is_dir():
|
||
return self._load_config_from_directory(rules_dir)
|
||
|
||
# Fall back to single file mode
|
||
if not self.config_path.exists():
|
||
print(f"❌ Configuration file not found: {self.config_path}")
|
||
print(f" (Also checked for directory: {rules_dir})")
|
||
sys.exit(1)
|
||
|
||
with open(self.config_path) as f:
|
||
config = yaml.safe_load(f)
|
||
|
||
print(f"📋 Loaded architecture rules: {config.get('project', 'unknown')}")
|
||
return config
|
||
|
||
def _load_config_from_directory(self, rules_dir: Path) -> dict[str, Any]:
|
||
"""
|
||
Load and merge configuration from split YAML files in a directory.
|
||
|
||
Reads _main.yaml first for base config, then merges all other YAML files.
|
||
"""
|
||
config: dict[str, Any] = {}
|
||
|
||
# Load _main.yaml first (contains project info, principles, ignore patterns)
|
||
main_file = rules_dir / "_main.yaml"
|
||
if main_file.exists():
|
||
with open(main_file) as f:
|
||
config = yaml.safe_load(f) or {}
|
||
|
||
# Load all other YAML files and merge their contents
|
||
yaml_files = sorted(rules_dir.glob("*.yaml"))
|
||
for yaml_file in yaml_files:
|
||
if yaml_file.name == "_main.yaml":
|
||
continue # Already loaded
|
||
|
||
with open(yaml_file) as f:
|
||
file_config = yaml.safe_load(f) or {}
|
||
|
||
# Merge rule sections from this file into main config
|
||
for key, value in file_config.items():
|
||
if key.endswith("_rules") and isinstance(value, list):
|
||
# Merge rule lists
|
||
if key not in config:
|
||
config[key] = []
|
||
config[key].extend(value)
|
||
elif key not in config:
|
||
# Add new top-level keys
|
||
config[key] = value
|
||
|
||
print(f"📋 Loaded architecture rules: {config.get('project', 'unknown')}")
|
||
print(f" (from {len(yaml_files)} files in {rules_dir.name}/)")
|
||
return config
|
||
|
||
def validate_all(self, target_path: Path = None) -> ValidationResult:
|
||
"""Validate all files in a directory"""
|
||
print("\n🔍 Starting architecture validation...\n")
|
||
|
||
target = target_path or self.project_root
|
||
|
||
# Validate API endpoints
|
||
self._validate_api_endpoints(target)
|
||
|
||
# Validate service layer
|
||
self._validate_service_layer(target)
|
||
|
||
# Validate models
|
||
self._validate_models(target)
|
||
|
||
# Validate exception handling
|
||
self._validate_exceptions(target)
|
||
|
||
# Validate naming conventions
|
||
self._validate_naming_conventions(target)
|
||
|
||
# Validate auth patterns
|
||
self._validate_auth_patterns(target)
|
||
|
||
# Validate middleware
|
||
self._validate_middleware(target)
|
||
|
||
# Validate JavaScript
|
||
self._validate_javascript(target)
|
||
|
||
# Validate templates
|
||
self._validate_templates(target)
|
||
|
||
# Validate language/i18n rules
|
||
self._validate_language_rules(target)
|
||
|
||
# Validate module structure
|
||
self._validate_modules(target)
|
||
|
||
# Validate service test coverage
|
||
self._validate_service_test_coverage(target)
|
||
|
||
# Validate cross-module imports (core cannot import from optional)
|
||
self._validate_cross_module_imports(target)
|
||
|
||
# Validate circular module dependencies
|
||
self._validate_circular_dependencies(target)
|
||
|
||
# Validate unused exception classes
|
||
self._validate_unused_exceptions(target)
|
||
|
||
# Validate legacy locations (must be in modules)
|
||
self._validate_legacy_locations(target)
|
||
|
||
return self.result
|
||
|
||
def validate_file(self, file_path: Path, quiet: bool = False) -> ValidationResult:
|
||
"""Validate a single file"""
|
||
if not file_path.exists():
|
||
if not quiet:
|
||
print(f"❌ File not found: {file_path}")
|
||
return self.result
|
||
|
||
if not file_path.is_file():
|
||
if not quiet:
|
||
print(f"❌ Not a file: {file_path}")
|
||
return self.result
|
||
|
||
if not quiet:
|
||
print(f"\n🔍 Validating single file: {file_path}\n")
|
||
|
||
# Resolve file path to absolute
|
||
file_path = file_path.resolve()
|
||
file_path_str = str(file_path)
|
||
|
||
if self._should_ignore_file(file_path):
|
||
if not quiet:
|
||
print("⏭️ File is in ignore list, skipping")
|
||
return self.result
|
||
|
||
self.result.files_checked += 1
|
||
|
||
# Track violations before this file
|
||
violations_before = len(self.result.violations)
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# Determine file type and run appropriate validators
|
||
if file_path.suffix == ".py":
|
||
self._validate_python_file(file_path, content, lines, file_path_str)
|
||
elif file_path.suffix == ".js":
|
||
self._validate_js_file(file_path, content, lines)
|
||
elif file_path.suffix == ".html":
|
||
self._validate_html_file(file_path, content, lines)
|
||
else:
|
||
if not quiet:
|
||
print(f"⚠️ Unsupported file type: {file_path.suffix}")
|
||
|
||
# Calculate violations for this file
|
||
file_violations = self.result.violations[violations_before:]
|
||
errors = sum(1 for v in file_violations if v.severity == Severity.ERROR)
|
||
warnings = sum(1 for v in file_violations if v.severity == Severity.WARNING)
|
||
|
||
# Track file result
|
||
self.result.file_results.append(
|
||
FileResult(file_path=file_path, errors=errors, warnings=warnings)
|
||
)
|
||
|
||
return self.result
|
||
|
||
def validate_object(self, object_name: str) -> ValidationResult:
|
||
"""Validate all files related to an entity (e.g., merchant, store, order)"""
|
||
print(f"\n🔍 Searching for '{object_name}'-related files...\n")
|
||
|
||
# Generate name variants (singular/plural forms)
|
||
name = object_name.lower()
|
||
variants = {name}
|
||
|
||
# Handle common plural patterns
|
||
if name.endswith("ies"):
|
||
# merchants -> merchant
|
||
variants.add(name[:-3] + "y")
|
||
elif name.endswith("s"):
|
||
# stores -> store
|
||
variants.add(name[:-1])
|
||
else:
|
||
# merchant -> merchants, store -> stores
|
||
if name.endswith("y"):
|
||
variants.add(name[:-1] + "ies")
|
||
variants.add(name + "s")
|
||
|
||
# Search patterns for different file types
|
||
patterns = []
|
||
for variant in variants:
|
||
patterns.extend(
|
||
[
|
||
f"app/api/**/*{variant}*.py",
|
||
f"app/services/*{variant}*.py",
|
||
f"app/exceptions/*{variant}*.py",
|
||
f"models/database/*{variant}*.py",
|
||
f"models/schema/*{variant}*.py",
|
||
f"static/admin/js/*{variant}*.js",
|
||
f"app/templates/admin/*{variant}*.html",
|
||
]
|
||
)
|
||
|
||
# Find all matching files
|
||
found_files: set[Path] = set()
|
||
for pattern in patterns:
|
||
matches = list(self.project_root.glob(pattern))
|
||
for match in matches:
|
||
if match.is_file() and not self._should_ignore_file(match):
|
||
found_files.add(match)
|
||
|
||
if not found_files:
|
||
print(f"❌ No files found matching '{object_name}'")
|
||
return self.result
|
||
|
||
# Sort files by type for better readability
|
||
sorted_files = sorted(found_files, key=lambda p: (p.suffix, str(p)))
|
||
|
||
print(f"📁 Found {len(sorted_files)} files:\n")
|
||
for f in sorted_files:
|
||
rel_path = f.relative_to(self.project_root)
|
||
print(f" • {rel_path}")
|
||
|
||
print("\n" + "-" * 60 + "\n")
|
||
|
||
# Validate each file
|
||
for file_path in sorted_files:
|
||
rel_path = file_path.relative_to(self.project_root)
|
||
print(f"📄 {rel_path}")
|
||
self.validate_file(file_path, quiet=True)
|
||
|
||
return self.result
|
||
|
||
def _validate_python_file(
|
||
self, file_path: Path, content: str, lines: list[str], file_path_str: str
|
||
):
|
||
"""Validate a single Python file based on its location"""
|
||
# API endpoints
|
||
if "/app/api/" in file_path_str or "\\app\\api\\" in file_path_str:
|
||
print("📡 Validating as API endpoint...")
|
||
self._check_pydantic_usage(file_path, content, lines)
|
||
self._check_no_business_logic_in_endpoints(file_path, content, lines)
|
||
self._check_endpoint_exception_handling(file_path, content, lines)
|
||
self._check_endpoint_authentication(file_path, content, lines)
|
||
|
||
# Service layer
|
||
elif "/app/services/" in file_path_str or "\\app\\services\\" in file_path_str:
|
||
print("🔧 Validating as service layer...")
|
||
self._check_no_http_exception_in_services(file_path, content, lines)
|
||
self._check_service_exceptions(file_path, content, lines)
|
||
self._check_db_session_parameter(file_path, content, lines)
|
||
self._check_no_commit_in_services(file_path, content, lines)
|
||
|
||
# Models
|
||
elif "/app/models/" in file_path_str or "\\app\\models\\" in file_path_str:
|
||
print("📦 Validating as model...")
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"class.*\(Base.*,.*BaseModel.*\)", line):
|
||
self._add_violation(
|
||
rule_id="MDL-002",
|
||
rule_name="Separate SQLAlchemy and Pydantic models",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Model mixes SQLAlchemy Base and Pydantic BaseModel",
|
||
context=line.strip(),
|
||
suggestion="Keep SQLAlchemy models and Pydantic models separate",
|
||
)
|
||
|
||
# Alembic migrations
|
||
elif "/alembic/versions/" in file_path_str or "\\alembic\\versions\\" in file_path_str:
|
||
print("🔄 Validating as Alembic migration...")
|
||
self._check_migration_batch_mode(file_path, content, lines)
|
||
self._check_migration_constraint_names(file_path, content, lines)
|
||
|
||
# Generic Python file - check exception handling
|
||
print("⚠️ Validating exception handling...")
|
||
for i, line in enumerate(lines, 1):
|
||
if re.match(r"\s*except\s*:", line):
|
||
self._add_violation(
|
||
rule_id="EXC-002",
|
||
rule_name="No bare except clauses",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Bare except clause catches all exceptions including system exits",
|
||
context=line.strip(),
|
||
suggestion="Specify exception type: except ValueError: or except Exception:",
|
||
)
|
||
|
||
def _validate_js_file(self, file_path: Path, content: str, lines: list[str]):
|
||
"""Validate a single JavaScript file"""
|
||
print("🟨 Validating JavaScript...")
|
||
|
||
# JS-001: Check for console usage (must use centralized logger)
|
||
# Skip init-*.js files - they run before logger is available
|
||
if not file_path.name.startswith("init-"):
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"console\.(log|warn|error)", line):
|
||
if "//" in line or "✅" in line or "eslint-disable" in line:
|
||
continue
|
||
self._add_violation(
|
||
rule_id="JS-001",
|
||
rule_name="Use centralized logger",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use centralized logger instead of console",
|
||
context=line.strip()[:80],
|
||
suggestion="Use window.LogConfig.createLogger('moduleName')",
|
||
)
|
||
|
||
# JS-002: Check for window.apiClient (must use lowercase apiClient)
|
||
for i, line in enumerate(lines, 1):
|
||
if "window.apiClient" in line:
|
||
before_occurrence = line[: line.find("window.apiClient")]
|
||
if "//" not in before_occurrence:
|
||
self._add_violation(
|
||
rule_id="JS-002",
|
||
rule_name="Use lowercase apiClient",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use apiClient directly instead of window.apiClient",
|
||
context=line.strip(),
|
||
suggestion="Replace window.apiClient with apiClient",
|
||
)
|
||
|
||
# JS-003: Check Alpine components spread ...data() for base layout inheritance
|
||
# Only check files that define Alpine component functions (function adminXxx() { return {...} })
|
||
self._check_alpine_data_spread(file_path, content, lines)
|
||
|
||
# JS-004: Check Alpine components set currentPage
|
||
self._check_alpine_current_page(file_path, content, lines)
|
||
|
||
# JS-005: Check initialization guard
|
||
self._check_init_guard(file_path, content, lines)
|
||
|
||
# JS-006: Check async error handling
|
||
self._check_async_error_handling(file_path, content, lines)
|
||
|
||
# JS-007: Check loading state management
|
||
self._check_loading_state(file_path, content, lines)
|
||
|
||
# JS-008: Check for raw fetch() calls instead of apiClient
|
||
self._check_fetch_vs_api_client(file_path, content, lines)
|
||
|
||
# JS-009: Check for alert() or window.showToast instead of Utils.showToast()
|
||
self._check_toast_usage(file_path, content, lines)
|
||
|
||
def _check_toast_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""JS-009: Check for alert() or window.showToast instead of Utils.showToast()"""
|
||
# Skip utils.js (where showToast is defined)
|
||
if "utils.js" in file_path.name:
|
||
return
|
||
|
||
# Check for file-level noqa comment
|
||
if "noqa: js-009" in content.lower():
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
stripped = line.strip()
|
||
|
||
# Skip comments
|
||
if stripped.startswith(("//", "/*")):
|
||
continue
|
||
|
||
# Skip lines with inline noqa comment
|
||
if "noqa: js-009" in line.lower():
|
||
continue
|
||
|
||
# Check for alert() usage
|
||
if re.search(r"\balert\s*\(", line):
|
||
self._add_violation(
|
||
rule_id="JS-009",
|
||
rule_name="Use Utils.showToast() for notifications",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Browser alert() dialog - use Utils.showToast() for consistent UX",
|
||
context=stripped[:80],
|
||
suggestion="Replace alert('message') with Utils.showToast('message', 'success'|'error'|'warning'|'info')",
|
||
)
|
||
|
||
# Check for window.showToast usage
|
||
if "window.showToast" in line:
|
||
self._add_violation(
|
||
rule_id="JS-009",
|
||
rule_name="Use Utils.showToast() for notifications",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="window.showToast is undefined - use Utils.showToast()",
|
||
context=stripped[:80],
|
||
suggestion="Replace window.showToast('msg', 'type') with Utils.showToast('msg', 'type')",
|
||
)
|
||
|
||
def _check_alpine_data_spread(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-003: Check that Alpine components inherit base layout data using ...data()"""
|
||
# Skip utility/init files that aren't page components
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils", "helpers"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
# Check for noqa comment
|
||
if "noqa: js-003" in content.lower():
|
||
return
|
||
|
||
# Look for Alpine component function pattern: function adminXxx(...) { return { ... } }
|
||
# These are page-level components that should inherit from data()
|
||
# Allow optional parameters in the function signature
|
||
component_pattern = re.compile(
|
||
r"function\s+(admin\w+|store\w+|shop\w+|platform\w+)\s*\([^)]*\)\s*\{", re.IGNORECASE
|
||
)
|
||
|
||
for match in component_pattern.finditer(content):
|
||
func_name = match.group(1)
|
||
func_start = match.start()
|
||
line_num = content[:func_start].count("\n") + 1
|
||
|
||
# Find the return statement with object literal
|
||
# Look for "return {" within reasonable distance
|
||
search_region = content[func_start : func_start + 500]
|
||
|
||
if "return {" in search_region:
|
||
# Check if ...data() is present in the return object
|
||
# Look for pattern like "return {\n ...data()," or similar
|
||
return_match = re.search(
|
||
r"return\s*\{([^}]{0,200})", search_region, re.DOTALL
|
||
)
|
||
if return_match:
|
||
return_content = return_match.group(1)
|
||
if "...data()" not in return_content:
|
||
self._add_violation(
|
||
rule_id="JS-003",
|
||
rule_name="Alpine components must spread ...data()",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Alpine component '{func_name}' does not inherit base layout data",
|
||
context=f"function {func_name}() {{ return {{ ... }}",
|
||
suggestion="Add '...data(),' as first property in return object to inherit dark mode, sidebar state, etc.",
|
||
)
|
||
|
||
def _check_fetch_vs_api_client(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-008: Check for raw fetch() calls that should use apiClient instead"""
|
||
# Skip init files and api-client itself
|
||
if "init-" in file_path.name or "api-client" in file_path.name:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
if "noqa: js-008" in content.lower():
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Look for fetch( calls that hit API endpoints
|
||
if re.search(r"\bfetch\s*\(", line):
|
||
# Skip if it's a comment
|
||
stripped = line.strip()
|
||
if stripped.startswith(("//", "/*")):
|
||
continue
|
||
|
||
# Check if it's calling an API endpoint (contains /api/)
|
||
if "/api/" in line or "apiClient" in line:
|
||
# If using fetch with /api/, should use apiClient instead
|
||
if "/api/" in line and "apiClient" not in line:
|
||
self._add_violation(
|
||
rule_id="JS-008",
|
||
rule_name="Use apiClient for API calls",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Raw fetch() call to API endpoint - use apiClient for automatic auth",
|
||
context=stripped[:80],
|
||
suggestion="Replace fetch('/api/...') with apiClient.get('/endpoint') or apiClient.post('/endpoint', data)",
|
||
)
|
||
|
||
def _check_alpine_current_page(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-004: Check that Alpine components set currentPage identifier"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils", "helpers"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-004" in content.lower():
|
||
return
|
||
|
||
# Look for Alpine component function pattern
|
||
component_pattern = re.compile(
|
||
r"function\s+(admin\w+|store\w+|shop\w+)\s*\(\s*\)\s*\{", re.IGNORECASE
|
||
)
|
||
|
||
for match in component_pattern.finditer(content):
|
||
func_name = match.group(1)
|
||
func_start = match.start()
|
||
line_num = content[:func_start].count("\n") + 1
|
||
|
||
# Check if currentPage is set in the return object
|
||
# Use larger region to handle functions with setup code before return
|
||
search_region = content[func_start : func_start + 2000]
|
||
if "return {" in search_region:
|
||
return_match = re.search(
|
||
r"return\s*\{([^}]{0,500})", search_region, re.DOTALL
|
||
)
|
||
if return_match:
|
||
return_content = return_match.group(1)
|
||
if (
|
||
"currentPage:" not in return_content
|
||
and "currentPage :" not in return_content
|
||
):
|
||
self._add_violation(
|
||
rule_id="JS-004",
|
||
rule_name="Alpine components must set currentPage",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Alpine component '{func_name}' does not set currentPage identifier",
|
||
context=f"function {func_name}()",
|
||
suggestion="Add 'currentPage: \"page-name\",' in return object for sidebar highlighting",
|
||
)
|
||
|
||
def _check_init_guard(self, file_path: Path, content: str, lines: list[str]):
|
||
"""JS-005: Check that init methods have duplicate initialization guards"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils", "helpers"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-005" in content.lower():
|
||
return
|
||
|
||
# Look for init() methods in Alpine components
|
||
init_pattern = re.compile(r"async\s+init\s*\(\s*\)\s*\{|init\s*\(\s*\)\s*\{")
|
||
|
||
for match in init_pattern.finditer(content):
|
||
init_start = match.start()
|
||
line_num = content[:init_start].count("\n") + 1
|
||
|
||
# Check next 200 chars for initialization guard pattern
|
||
search_region = content[init_start : init_start + 300]
|
||
guard_patterns = [
|
||
"window._",
|
||
"if (this._initialized)",
|
||
"if (window.",
|
||
"_initialized",
|
||
]
|
||
|
||
has_guard = any(pattern in search_region for pattern in guard_patterns)
|
||
if not has_guard:
|
||
self._add_violation(
|
||
rule_id="JS-005",
|
||
rule_name="Initialization methods must include guard",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="init() method lacks duplicate initialization guard",
|
||
context="init() { ... }",
|
||
suggestion="Add guard: if (window._pageInitialized) return; window._pageInitialized = true;",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_async_error_handling(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""JS-006: Check that async operations have try/catch error handling"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-006" in content.lower():
|
||
return
|
||
|
||
# Look for async functions/methods
|
||
async_pattern = re.compile(r"async\s+\w+\s*\([^)]*\)\s*\{")
|
||
|
||
for match in async_pattern.finditer(content):
|
||
func_start = match.start()
|
||
line_num = content[:func_start].count("\n") + 1
|
||
|
||
# Find the function body (look for matching braces)
|
||
# Simplified: check next 500 chars for try/catch
|
||
search_region = content[func_start : func_start + 800]
|
||
|
||
# Check if there's await without try/catch
|
||
if (
|
||
"await " in search_region
|
||
and "try {" not in search_region
|
||
and "try{" not in search_region
|
||
):
|
||
# Check if it's a simple one-liner or has error handling elsewhere
|
||
if ".catch(" not in search_region:
|
||
self._add_violation(
|
||
rule_id="JS-006",
|
||
rule_name="Async operations must have error handling",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="Async function with await lacks try/catch error handling",
|
||
context=match.group(0)[:50],
|
||
suggestion="Wrap await calls in try/catch with proper error logging",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_loading_state(self, file_path: Path, content: str, lines: list[str]):
|
||
"""JS-007: Check that async operations manage loading state"""
|
||
# Skip utility/init files
|
||
skip_patterns = ["init-", "api-client", "log-config", "utils"]
|
||
if any(pattern in file_path.name for pattern in skip_patterns):
|
||
return
|
||
|
||
if "noqa: js-007" in content.lower():
|
||
return
|
||
|
||
# Look for Alpine component functions that have async methods with API calls
|
||
component_pattern = re.compile(
|
||
r"function\s+(admin\w+|store\w+|shop\w+)\s*\(\s*\)\s*\{", re.IGNORECASE
|
||
)
|
||
|
||
for match in component_pattern.finditer(content):
|
||
func_start = match.start()
|
||
# Get the component body (rough extraction)
|
||
component_region = content[func_start : func_start + 5000]
|
||
|
||
# Check if component has API calls but no loading state
|
||
has_api_calls = (
|
||
"apiClient." in component_region or "await " in component_region
|
||
)
|
||
has_loading_state = (
|
||
"loading:" in component_region or "loading :" in component_region
|
||
)
|
||
|
||
if has_api_calls and not has_loading_state:
|
||
line_num = content[:func_start].count("\n") + 1
|
||
self._add_violation(
|
||
rule_id="JS-007",
|
||
rule_name="Set loading state for async operations",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="Component has API calls but no loading state property",
|
||
context=match.group(1),
|
||
suggestion="Add 'loading: false,' to component state and set it before/after API calls",
|
||
)
|
||
return
|
||
|
||
def _validate_html_file(self, file_path: Path, content: str, lines: list[str]):
|
||
"""Validate a single HTML template file"""
|
||
print("📄 Validating template...")
|
||
|
||
file_path_str = str(file_path)
|
||
|
||
# Skip base template and partials for extends check
|
||
is_base_or_partial = (
|
||
"base.html" in file_path.name or "partials" in file_path_str
|
||
)
|
||
|
||
# Skip macros directory for FE rules
|
||
is_macro = (
|
||
"shared/macros/" in file_path_str or "shared\\macros\\" in file_path_str
|
||
)
|
||
|
||
# Skip components showcase page
|
||
is_components_page = "components.html" in file_path.name
|
||
|
||
# Determine template type
|
||
is_admin = "/admin/" in file_path_str or "\\admin\\" in file_path_str
|
||
is_store = "/store/" in file_path_str or "\\store\\" in file_path_str
|
||
is_shop = "/shop/" in file_path_str or "\\shop\\" in file_path_str
|
||
|
||
if is_base_or_partial:
|
||
print("⏭️ Skipping base/partial template")
|
||
elif is_macro:
|
||
print("⏭️ Skipping macro file")
|
||
else:
|
||
# FE-001: Check for inline pagination (should use macro)
|
||
if not is_components_page:
|
||
self._check_pagination_macro_usage(file_path, content, lines)
|
||
|
||
# FE-002: Check for inline SVGs (should use $icon())
|
||
if not is_components_page and not is_macro:
|
||
self._check_icon_helper_usage(file_path, content, lines)
|
||
|
||
# FE-008: Check for raw number inputs (should use number_stepper)
|
||
if not is_components_page and not is_macro:
|
||
self._check_number_stepper_macro_usage(file_path, content, lines)
|
||
|
||
# TPL-004: Check x-text usage for dynamic content
|
||
self._check_xtext_usage(file_path, content, lines)
|
||
|
||
# TPL-005: Check x-html safety
|
||
self._check_xhtml_safety(file_path, content, lines)
|
||
|
||
# TPL-006: Check loading state implementation
|
||
self._check_template_loading_state(file_path, content, lines)
|
||
|
||
# TPL-007: Check empty state implementation
|
||
self._check_template_empty_state(file_path, content, lines)
|
||
|
||
# TPL-008: Check for call table_header() pattern (should be table_header_custom)
|
||
self._check_table_header_call_pattern(file_path, content, lines)
|
||
|
||
# TPL-009: Check for invalid block names (admin and store use same blocks)
|
||
if is_admin or is_store:
|
||
self._check_valid_block_names(file_path, content, lines)
|
||
|
||
if is_base_or_partial:
|
||
return
|
||
|
||
# Check for standalone marker in template (first 5 lines)
|
||
first_lines = "\n".join(lines[:5]).lower()
|
||
if "standalone" in first_lines or "noqa: tpl-00" in first_lines:
|
||
print("⏭️ Template marked as standalone, skipping extends check")
|
||
return
|
||
|
||
# TPL-001: Admin templates extends check
|
||
if is_admin:
|
||
self._check_template_extends(
|
||
file_path,
|
||
lines,
|
||
"admin/base.html",
|
||
"TPL-001",
|
||
["login.html", "errors/", "test-"],
|
||
)
|
||
|
||
# TPL-002: Store templates extends check
|
||
if is_store:
|
||
self._check_template_extends(
|
||
file_path,
|
||
lines,
|
||
"store/base.html",
|
||
"TPL-002",
|
||
["login.html", "errors/", "test-"],
|
||
)
|
||
|
||
# TPL-003: Shop templates extends check
|
||
if is_shop:
|
||
self._check_template_extends(
|
||
file_path, lines, "shop/base.html", "TPL-003", ["errors/", "test-"]
|
||
)
|
||
|
||
def _check_template_extends(
|
||
self,
|
||
file_path: Path,
|
||
lines: list[str],
|
||
base_template: str,
|
||
rule_id: str,
|
||
exclusions: list[str],
|
||
):
|
||
"""Check that template extends the appropriate base template"""
|
||
file_path_str = str(file_path)
|
||
|
||
# Check exclusion patterns
|
||
for exclusion in exclusions:
|
||
if exclusion in file_path_str:
|
||
print(f"⏭️ Template matches exclusion pattern '{exclusion}', skipping")
|
||
return
|
||
|
||
# Check for extends
|
||
has_extends = any(
|
||
"{% extends" in line and base_template in line for line in lines
|
||
)
|
||
|
||
if not has_extends:
|
||
template_type = base_template.split("/")[0]
|
||
self._add_violation(
|
||
rule_id=rule_id,
|
||
rule_name=f"{template_type.title()} templates must extend base",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"{template_type.title()} template does not extend {base_template}",
|
||
context=file_path.name,
|
||
suggestion=f"Add {{% extends '{base_template}' %}} at the top, or add {{# standalone #}} if intentional",
|
||
)
|
||
|
||
def _check_xtext_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""TPL-004: Check that x-text is used for dynamic text content"""
|
||
if "noqa: tpl-004" in content.lower():
|
||
return
|
||
|
||
# Look for potentially unsafe patterns where user data might be interpolated
|
||
# This is a heuristic check - not perfect
|
||
unsafe_patterns = [
|
||
(r"\{\{\s*\w+\.name\s*\}\}", "User data interpolation without x-text"),
|
||
(r"\{\{\s*\w+\.title\s*\}\}", "User data interpolation without x-text"),
|
||
(
|
||
r"\{\{\s*\w+\.description\s*\}\}",
|
||
"User data interpolation without x-text",
|
||
),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip Jinja2 template syntax (server-side safe)
|
||
if "{%" in line:
|
||
continue
|
||
|
||
for pattern, message in unsafe_patterns:
|
||
if re.search(pattern, line):
|
||
# Check if it's in an x-text context already
|
||
if 'x-text="' not in line and "x-text='" not in line:
|
||
self._add_violation(
|
||
rule_id="TPL-004",
|
||
rule_name="Use x-text for dynamic text content",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=line.strip()[:60],
|
||
suggestion='Use x-text="item.name" instead of {{ item.name }} for XSS safety',
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_xhtml_safety(self, file_path: Path, content: str, lines: list[str]):
|
||
"""TPL-005: Check that x-html is only used for safe content"""
|
||
if "noqa: tpl-005" in content.lower():
|
||
return
|
||
|
||
safe_xhtml_patterns = [
|
||
r'x-html="\$icon\(', # Icon helper is safe
|
||
r"x-html='\$icon\(",
|
||
r'x-html="`\$\{.*icon', # Template literal with icon
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "x-html=" in line:
|
||
# Check if it matches safe patterns
|
||
is_safe = any(
|
||
re.search(pattern, line) for pattern in safe_xhtml_patterns
|
||
)
|
||
|
||
if not is_safe:
|
||
# Check for potentially unsafe user content
|
||
unsafe_indicators = [
|
||
".description",
|
||
".content",
|
||
".body",
|
||
".text",
|
||
".message",
|
||
".comment",
|
||
".review",
|
||
".html",
|
||
]
|
||
for indicator in unsafe_indicators:
|
||
if indicator in line:
|
||
self._add_violation(
|
||
rule_id="TPL-005",
|
||
rule_name="Use x-html ONLY for safe content",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="x-html used with potentially unsafe user content",
|
||
context=line.strip()[:60],
|
||
suggestion="Sanitize HTML content or use x-text for plain text",
|
||
)
|
||
return
|
||
|
||
def _check_template_loading_state(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-006: Check that templates with data loads show loading state"""
|
||
if "noqa: tpl-006" in content.lower():
|
||
return
|
||
|
||
# Check if template has data loading (Alpine init, fetch, apiClient)
|
||
has_data_loading = any(
|
||
[
|
||
"x-init=" in content,
|
||
"@load=" in content,
|
||
"apiClient" in content,
|
||
"loadData" in content,
|
||
"fetchData" in content,
|
||
]
|
||
)
|
||
|
||
if not has_data_loading:
|
||
return
|
||
|
||
# Check for loading state display
|
||
has_loading_state = any(
|
||
[
|
||
'x-show="loading"' in content,
|
||
"x-show='loading'" in content,
|
||
'x-if="loading"' in content,
|
||
"x-if='loading'" in content,
|
||
"loading_state" in content,
|
||
"Loading..." in content,
|
||
"spinner" in content.lower(),
|
||
]
|
||
)
|
||
|
||
if not has_loading_state:
|
||
self._add_violation(
|
||
rule_id="TPL-006",
|
||
rule_name="Implement loading state for data loads",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Template loads data but has no visible loading state",
|
||
context=file_path.name,
|
||
suggestion='Add <div x-show="loading">Loading...</div> or use loading_state macro',
|
||
)
|
||
|
||
def _check_template_empty_state(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-007: Check that templates with lists show empty state"""
|
||
if "noqa: tpl-007" in content.lower():
|
||
return
|
||
|
||
# Check if template has list iteration
|
||
has_list = any(
|
||
[
|
||
"x-for=" in content,
|
||
"{% for " in content,
|
||
]
|
||
)
|
||
|
||
if not has_list:
|
||
return
|
||
|
||
# Check for empty state handling
|
||
has_empty_state = any(
|
||
[
|
||
".length === 0" in content,
|
||
".length == 0" in content,
|
||
"items.length" in content,
|
||
"empty_state" in content,
|
||
"No items" in content,
|
||
"No results" in content,
|
||
"No data" in content,
|
||
"table_empty_state" in content,
|
||
]
|
||
)
|
||
|
||
if not has_empty_state:
|
||
self._add_violation(
|
||
rule_id="TPL-007",
|
||
rule_name="Implement empty state when no data",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Template has list but no empty state handling",
|
||
context=file_path.name,
|
||
suggestion='Add <template x-if="items.length === 0">No items found</template>',
|
||
)
|
||
|
||
def _check_table_header_call_pattern(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-008: Check that {% call table_header() %} is not used.
|
||
|
||
The table_header() macro doesn't support caller() - it takes a columns list.
|
||
Using {% call table_header() %} causes a Jinja2 error:
|
||
"macro 'table_header' was invoked with two values for the special caller argument"
|
||
|
||
Use table_header_custom() instead for custom headers with caller().
|
||
"""
|
||
if "noqa: tpl-008" in content.lower():
|
||
return
|
||
|
||
import re
|
||
pattern = re.compile(r"{%\s*call\s+table_header\s*\(\s*\)\s*%}")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if pattern.search(line):
|
||
self._add_violation(
|
||
rule_id="TPL-008",
|
||
rule_name="Use table_header_custom for custom headers",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="{% call table_header() %} causes Jinja2 error - table_header doesn't support caller()",
|
||
context=line.strip()[:80],
|
||
suggestion="Use {% call table_header_custom() %} for custom headers with th_sortable or custom <th> elements",
|
||
)
|
||
|
||
def _check_valid_block_names(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-009: Check that templates use valid block names from base template"""
|
||
if "noqa: tpl-009" in content.lower():
|
||
return
|
||
|
||
# Skip base templates
|
||
if file_path.name == "base.html":
|
||
return
|
||
|
||
# Valid admin template blocks
|
||
|
||
# Common invalid block names that developers might mistakenly use
|
||
invalid_blocks = {
|
||
"page_scripts": "extra_scripts",
|
||
"scripts": "extra_scripts",
|
||
"js": "extra_scripts",
|
||
"footer_scripts": "extra_scripts",
|
||
"head": "extra_head",
|
||
"body": "content",
|
||
"main": "content",
|
||
}
|
||
|
||
import re
|
||
block_pattern = re.compile(r"{%\s*block\s+(\w+)\s*%}")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
match = block_pattern.search(line)
|
||
if match:
|
||
block_name = match.group(1)
|
||
if block_name in invalid_blocks:
|
||
self._add_violation(
|
||
rule_id="TPL-009",
|
||
rule_name="Use valid block names from base templates",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Invalid block name '{block_name}' - this block doesn't exist in admin/base.html",
|
||
context=line.strip(),
|
||
suggestion=f"Use '{{% block {invalid_blocks[block_name]} %}}' instead",
|
||
)
|
||
|
||
def _check_deprecated_macros(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-011: Check for usage of deprecated macros"""
|
||
if "noqa: tpl-011" in content.lower():
|
||
return
|
||
|
||
# Deprecated macros and their replacements
|
||
deprecated_macros = {
|
||
"pagination_full": {
|
||
"replacement": "pagination",
|
||
"reason": "pagination_full expects flat variables (total, skip, page, limit) but components use nested pagination object",
|
||
},
|
||
}
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
for macro_name, info in deprecated_macros.items():
|
||
if macro_name in line and "# deprecated" not in line.lower():
|
||
# Check if it's an import or usage
|
||
if f"import {macro_name}" in line or f"{{ {macro_name}" in line or f"{{{macro_name}" in line:
|
||
self._add_violation(
|
||
rule_id="TPL-011",
|
||
rule_name="Avoid deprecated macros",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Deprecated macro '{macro_name}' used - {info['reason']}",
|
||
context=line.strip()[:80],
|
||
suggestion=f"Use '{info['replacement']}' macro instead",
|
||
)
|
||
|
||
def _check_escaped_quotes_in_alpine(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""TPL-012: Check for problematic quotes in Alpine copyCode template literals.
|
||
|
||
When using copyCode() with a multi-line template literal, double quotes
|
||
inside the template will prematurely end the outer @click="..." attribute.
|
||
|
||
Bad: @click="copyCode(`{{ func("arg") }}`)" # inner " ends the attribute
|
||
Good: @click='copyCode(`{{ func("arg") }}`)' # single quotes for outer
|
||
"""
|
||
if "noqa: tpl-012" in content.lower():
|
||
return
|
||
|
||
import re
|
||
|
||
# Track multi-line copyCode template literals with double-quoted outer attribute
|
||
in_copycode_template = False
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "noqa: tpl-012" in line.lower():
|
||
continue
|
||
|
||
# Check for start of copyCode with double-quoted attribute and template literal
|
||
# Pattern: @click="copyCode(` where the backtick doesn't close on same line
|
||
if '@click="copyCode(`' in line and "`)" not in line:
|
||
in_copycode_template = True
|
||
continue
|
||
|
||
# Check for end of copyCode template (backtick followed by )" or )')
|
||
if in_copycode_template:
|
||
if '`)"' in line or "`)'" in line:
|
||
in_copycode_template = False
|
||
continue
|
||
|
||
# Check for double quotes that would break the outer attribute
|
||
# These appear in Jinja template code like {{ func("arg") }}
|
||
if re.search(r'\(\s*"[^"]*"\s*[,)]', line):
|
||
self._add_violation(
|
||
rule_id="TPL-012",
|
||
rule_name="Double quotes in copyCode template literal",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Double quotes inside multi-line copyCode template literal will break HTML attribute parsing",
|
||
context=line.strip()[:80],
|
||
suggestion="Change outer attribute to single quotes: @click='copyCode(`...`)'",
|
||
)
|
||
|
||
def _check_pagination_macro_api(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
TPL-013: Check for old pagination macro API.
|
||
|
||
The pagination macro was simplified to only accept show_condition.
|
||
It now relies on standardized Alpine.js component properties.
|
||
|
||
OLD (deprecated): {{ pagination(current_page=..., total_pages=...) }}
|
||
NEW (correct): {{ pagination(show_condition="!loading && pagination.total > 0") }}
|
||
"""
|
||
# Skip the macro definition file
|
||
if "shared/macros/pagination.html" in str(file_path):
|
||
return
|
||
|
||
if "noqa: tpl-013" in content.lower():
|
||
return
|
||
|
||
# Old API patterns that indicate deprecated usage
|
||
old_patterns = [
|
||
(r"pagination\s*\([^)]*current_page\s*=", "current_page"),
|
||
(r"pagination\s*\([^)]*total_pages\s*=", "total_pages"),
|
||
(r"pagination\s*\([^)]*page_numbers\s*=", "page_numbers"),
|
||
(r"pagination\s*\([^)]*start_index\s*=", "start_index"),
|
||
(r"pagination\s*\([^)]*end_index\s*=", "end_index"),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
for pattern, param_name in old_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="TPL-013",
|
||
rule_name="Use new pagination macro API",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Old pagination API with '{param_name}' parameter",
|
||
context=line.strip()[:80],
|
||
suggestion='Use: {{ pagination(show_condition="!loading && pagination.total > 0") }}',
|
||
)
|
||
break # Only report once per line
|
||
|
||
def _check_modal_simple_macro_api(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
TPL-014: Check for old modal_simple macro API.
|
||
|
||
The modal_simple macro now uses {% call %}...{% endcall %} syntax.
|
||
Content (including buttons) goes inside the call block.
|
||
|
||
OLD (deprecated): {{ modal_simple(show_var=..., icon=..., confirm_fn=...) }}
|
||
NEW (correct): {% call modal_simple('id', 'Title', show_var='...') %}...{% endcall %}
|
||
"""
|
||
# Skip the macro definition file
|
||
if "shared/macros/modals.html" in str(file_path):
|
||
return
|
||
|
||
if "noqa: tpl-014" in content.lower():
|
||
return
|
||
|
||
# Old API patterns - using {{ }} instead of {% call %}
|
||
# Also checking for old parameters that don't exist anymore
|
||
old_patterns = [
|
||
(r"\{\{\s*modal_simple\s*\(", "{{ modal_simple( instead of {% call modal_simple("),
|
||
(r"modal_simple\s*\([^)]*icon\s*=", "icon parameter"),
|
||
(r"modal_simple\s*\([^)]*icon_color\s*=", "icon_color parameter"),
|
||
(r"modal_simple\s*\([^)]*confirm_text\s*=", "confirm_text parameter"),
|
||
(r"modal_simple\s*\([^)]*confirm_fn\s*=", "confirm_fn parameter"),
|
||
(r"modal_simple\s*\([^)]*confirm_class\s*=", "confirm_class parameter"),
|
||
(r"modal_simple\s*\([^)]*loading_var\s*=", "loading_var parameter"),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
for pattern, issue in old_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="TPL-014",
|
||
rule_name="Use new modal_simple macro API with call block",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Old modal_simple API: {issue}",
|
||
context=line.strip()[:80],
|
||
suggestion="Use: {{% call modal_simple('id', 'Title', show_var='...') %}}...{{% endcall %}}",
|
||
)
|
||
break # Only report once per line
|
||
|
||
def _check_page_header_macro_api(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
TPL-015: Check for old page_header macro API.
|
||
|
||
The page_header macro does not accept a buttons=[] parameter.
|
||
Use action_label/action_onclick for single button, or page_header_flex for multiple.
|
||
|
||
OLD (deprecated): {{ page_header('Title', buttons=[...]) }}
|
||
NEW (correct): {{ page_header('Title', action_label='Add', action_onclick='...') }}
|
||
"""
|
||
# Skip the macro definition file
|
||
if "shared/macros/headers.html" in str(file_path):
|
||
return
|
||
|
||
if "noqa: tpl-015" in content.lower():
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"page_header\s*\([^)]*buttons\s*=", line):
|
||
self._add_violation(
|
||
rule_id="TPL-015",
|
||
rule_name="Use correct page_header macro API",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="page_header does not accept 'buttons' parameter",
|
||
context=line.strip()[:80],
|
||
suggestion="Use action_label/action_onclick, or page_header_flex with {% call %}",
|
||
)
|
||
|
||
def _check_alpine_template_vars(
|
||
self, file_path: Path, content: str, lines: list[str], js_content: str
|
||
):
|
||
"""TPL-010: Check that Alpine variables used in templates are defined in JS"""
|
||
if "noqa: tpl-010" in content.lower():
|
||
return
|
||
|
||
import re
|
||
|
||
# Common Alpine variable patterns that MUST be defined in the component
|
||
# These are variables that are directly referenced in templates
|
||
required_vars = set()
|
||
|
||
# Check for error_state macro usage (requires 'error' var)
|
||
if "error_state(" in content and "error_var=" not in content:
|
||
required_vars.add("error")
|
||
|
||
# Check for action_dropdown macro with custom vars
|
||
dropdown_pattern = re.compile(
|
||
r"action_dropdown\([^)]*open_var=['\"](\w+)['\"]"
|
||
)
|
||
for match in dropdown_pattern.finditer(content):
|
||
required_vars.add(match.group(1))
|
||
|
||
dropdown_pattern2 = re.compile(
|
||
r"action_dropdown\([^)]*loading_var=['\"](\w+)['\"]"
|
||
)
|
||
for match in dropdown_pattern2.finditer(content):
|
||
required_vars.add(match.group(1))
|
||
|
||
# Check if variables are defined in JS
|
||
for var in required_vars:
|
||
# Check if variable is defined in JS (look for "varname:" pattern)
|
||
var_pattern = re.compile(rf"\b{var}\s*:")
|
||
if not var_pattern.search(js_content):
|
||
# Find the line where it's used in template
|
||
for i, line in enumerate(lines, 1):
|
||
if var in line and ("error_state" in line or "action_dropdown" in line):
|
||
self._add_violation(
|
||
rule_id="TPL-010",
|
||
rule_name="Alpine variables must be defined in JS component",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Template uses Alpine variable '{var}' but it's not defined in the JS component",
|
||
context=line.strip()[:80],
|
||
suggestion=f"Add '{var}: null,' or '{var}: false,' to the Alpine component's return object",
|
||
)
|
||
break
|
||
|
||
def _check_pagination_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-001: Check for inline pagination that should use macro"""
|
||
# Check if already using the pagination macro
|
||
uses_macro = any(
|
||
"from 'shared/macros/pagination.html'" in line for line in lines
|
||
)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa: FE-001 comment
|
||
has_noqa = any("noqa: fe-001" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for signs of inline pagination
|
||
pagination_indicators = [
|
||
('aria-label="Table navigation"', "Inline table navigation found"),
|
||
("previousPage()", "Inline pagination controls found"),
|
||
("nextPage()", "Inline pagination controls found"),
|
||
("goToPage(", "Inline pagination controls found"),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
for pattern, message in pagination_indicators:
|
||
if pattern in line:
|
||
# Skip if it's in a comment
|
||
stripped = line.strip()
|
||
if stripped.startswith(("{#", "<!--")):
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="FE-001",
|
||
rule_name="Use pagination macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message + " - use shared macro instead",
|
||
context=stripped[:60],
|
||
suggestion="{% from 'shared/macros/pagination.html' import pagination %}\n{{ pagination() }}",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_icon_helper_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""FE-002: Check for inline SVGs that should use $icon() helper"""
|
||
# Check for noqa: FE-002 comment
|
||
has_noqa = any("noqa: fe-002" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Pattern to find inline SVGs
|
||
svg_pattern = re.compile(
|
||
r"<svg[^>]*viewBox[^>]*>.*?</svg>", re.DOTALL | re.IGNORECASE
|
||
)
|
||
|
||
# Find all SVG occurrences
|
||
for match in svg_pattern.finditer(content):
|
||
# Find line number
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Skip if this is likely in a code example (inside <pre> or <code>)
|
||
context_before = content[max(0, match.start() - 200) : match.start()]
|
||
if "<pre" in context_before or "<code" in context_before:
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="FE-002",
|
||
rule_name="Use $icon() helper",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message="Inline SVG found - use $icon() helper for consistency",
|
||
context="<svg...>",
|
||
suggestion="<span x-html=\"$icon('icon-name', 'w-4 h-4')\"></span>",
|
||
)
|
||
|
||
def _check_alerts_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-003: Check for inline loading/error states that should use alerts macro"""
|
||
# Check if already using the alerts macro
|
||
uses_macro = any("from 'shared/macros/alerts.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-003" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for inline loading states
|
||
for i, line in enumerate(lines, 1):
|
||
# Loading state pattern: text-center py-12 with loading content
|
||
if 'x-show="loading"' in line or "x-show='loading'" in line:
|
||
# Check if next few lines have spinner pattern
|
||
context_lines = "\n".join(lines[i - 1 : i + 3])
|
||
if "text-center" in context_lines and "py-12" in context_lines:
|
||
self._add_violation(
|
||
rule_id="FE-003",
|
||
rule_name="Use alerts macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline loading state found - use loading_state macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/alerts.html' import loading_state %}\n{{ loading_state('Loading...') }}",
|
||
)
|
||
return # Only report once per file
|
||
|
||
# Error state pattern: bg-red-100 border-red-400
|
||
if "bg-red-100" in line and "border-red-400" in line:
|
||
self._add_violation(
|
||
rule_id="FE-003",
|
||
rule_name="Use alerts macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline error state found - use error_state macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/alerts.html' import error_state %}\n{{ error_state('Error', 'error') }}",
|
||
)
|
||
return
|
||
|
||
def _check_modals_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-004: Check for inline modals that should use modals macro"""
|
||
# Check if already using the modals macro
|
||
uses_macro = any("from 'shared/macros/modals.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-004" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for modal patterns: fixed inset-0 with role="dialog" or modal backdrop
|
||
for i, line in enumerate(lines, 1):
|
||
if "fixed inset-0" in line and (
|
||
"z-50" in line or "z-30" in line or "z-40" in line
|
||
):
|
||
# Check context for modal indicators
|
||
context_lines = "\n".join(lines[max(0, i - 1) : min(len(lines), i + 5)])
|
||
if (
|
||
'role="dialog"' in context_lines
|
||
or "bg-opacity-50" in context_lines
|
||
or "bg-black/50" in context_lines
|
||
):
|
||
self._add_violation(
|
||
rule_id="FE-004",
|
||
rule_name="Use modals macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline modal found - use modal macro for consistency",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/modals.html' import modal %}\n{% call modal('myModal', 'Title', 'isModalOpen') %}...{% endcall %}",
|
||
)
|
||
return
|
||
|
||
def _check_tables_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-005: Check for inline table wrappers that should use tables macro"""
|
||
# Check if already using the tables macro
|
||
uses_macro = any("from 'shared/macros/tables.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-005" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for table wrapper pattern: overflow-hidden rounded-lg shadow-xs
|
||
for i, line in enumerate(lines, 1):
|
||
if (
|
||
"overflow-hidden" in line
|
||
and "rounded-lg" in line
|
||
and "shadow-xs" in line
|
||
):
|
||
# Check if there's a table nearby
|
||
context_lines = "\n".join(lines[i - 1 : min(len(lines), i + 10)])
|
||
if "<table" in context_lines:
|
||
self._add_violation(
|
||
rule_id="FE-005",
|
||
rule_name="Use tables macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline table wrapper found - use table_wrapper macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/tables.html' import table_wrapper, table_header %}\n{% call table_wrapper() %}...{% endcall %}",
|
||
)
|
||
return
|
||
|
||
def _check_dropdowns_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-006: Check for inline dropdowns that should use dropdowns macro"""
|
||
# Check if already using the dropdowns macro
|
||
uses_macro = any(
|
||
"from 'shared/macros/dropdowns.html'" in line for line in lines
|
||
)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-006" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for dropdown patterns: @click.outside or @click.away with menu positioning
|
||
for i, line in enumerate(lines, 1):
|
||
# Match @click.away="something = false" or @click.outside="..."
|
||
if "@click.away=" in line or "@click.outside=" in line:
|
||
# Skip if this is a modal (modals also use @click.away but have different structure)
|
||
context_lines = "\n".join(
|
||
lines[max(0, i - 5) : min(len(lines), i + 10)]
|
||
)
|
||
|
||
# Skip if it looks like a modal (fixed inset-0)
|
||
if "fixed inset-0" in context_lines:
|
||
continue
|
||
|
||
# Look for dropdown menu styling: absolute positioning with z-index
|
||
if "absolute" in context_lines and (
|
||
"z-10" in context_lines
|
||
or "z-20" in context_lines
|
||
or "z-50" in context_lines
|
||
):
|
||
# Additional indicators: shadow, border, bg-white/dark, rounded
|
||
if "shadow" in context_lines or "border" in context_lines:
|
||
self._add_violation(
|
||
rule_id="FE-006",
|
||
rule_name="Use dropdowns macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline dropdown menu found - use dropdown macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/dropdowns.html' import dropdown, dropdown_item %}\n{% call dropdown('Label', 'isOpen') %}...{% endcall %}",
|
||
)
|
||
return
|
||
|
||
def _check_headers_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-007: Check for inline page headers that should use headers macro"""
|
||
# Check if already using the headers macro
|
||
uses_macro = any("from 'shared/macros/headers.html'" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for noqa comment
|
||
has_noqa = any("noqa: fe-007" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for page header pattern: flex with h2 text-2xl font-semibold
|
||
for i, line in enumerate(lines, 1):
|
||
if "<h2" in line and "text-2xl" in line and "font-semibold" in line:
|
||
# Check context for typical page header pattern
|
||
context_before = "\n".join(lines[max(0, i - 3) : i])
|
||
if "flex" in context_before and (
|
||
"justify-between" in context_before
|
||
or "items-center" in context_before
|
||
):
|
||
self._add_violation(
|
||
rule_id="FE-007",
|
||
rule_name="Use headers macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Inline page header found - use page_header macro",
|
||
context=line.strip()[:60],
|
||
suggestion="{% from 'shared/macros/headers.html' import page_header %}\n{{ page_header('Title', action_label='Create', action_url='/create') }}",
|
||
)
|
||
return
|
||
|
||
def _check_number_stepper_macro_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""FE-008: Check for raw number inputs that should use number_stepper macro
|
||
|
||
Detects <input type="number"> that should use the number_stepper macro for
|
||
consistent styling and dark mode support.
|
||
|
||
Exceptions:
|
||
- ID fields (placeholder contains 'id' or 'ID')
|
||
- Files already importing number_stepper
|
||
- Lines with noqa: FE-008 comment
|
||
"""
|
||
# Check if already using the number_stepper macro
|
||
uses_macro = any("number_stepper" in line for line in lines)
|
||
if uses_macro:
|
||
return
|
||
|
||
# Check for file-level noqa comment
|
||
has_noqa = any("noqa: fe-008" in line.lower() for line in lines)
|
||
if has_noqa:
|
||
return
|
||
|
||
# Look for raw number inputs
|
||
for i, line in enumerate(lines, 1):
|
||
if 'type="number"' in line or "type='number'" in line:
|
||
# Skip if line has noqa comment
|
||
if "noqa" in line.lower():
|
||
continue
|
||
|
||
# Skip if it looks like an ID field (check surrounding lines for context)
|
||
context_lines = "\n".join(
|
||
lines[max(0, i - 3) : min(len(lines), i + 2)]
|
||
).lower()
|
||
if (
|
||
"user id" in context_lines
|
||
or "placeholder" in context_lines
|
||
and "id" in context_lines
|
||
):
|
||
continue
|
||
|
||
# Skip if it's in a comment
|
||
stripped = line.strip()
|
||
if (
|
||
stripped.startswith(("{#", "<!--", "//"))
|
||
):
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="FE-008",
|
||
rule_name="Use number_stepper macro",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Raw number input found - use number_stepper macro for consistent styling",
|
||
context=stripped[:70],
|
||
suggestion="{% from 'shared/macros/inputs.html' import number_stepper %}\n{{ number_stepper(model='fieldName', min=1, max=100) }}\nOr add {# noqa: FE-008 #} if this is intentional (e.g., ID field)",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _validate_api_endpoints(self, target_path: Path):
|
||
"""Validate API endpoint rules (API-001, API-002, API-003, API-004)"""
|
||
print("📡 Validating API endpoints...")
|
||
|
||
api_files = list(target_path.glob("app/api/v1/**/*.py"))
|
||
api_files += list(target_path.glob("app/modules/*/routes/api/**/*.py"))
|
||
self.result.files_checked += len(api_files)
|
||
|
||
for file_path in api_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# API-001: Check for Pydantic model usage
|
||
self._check_pydantic_usage(file_path, content, lines)
|
||
|
||
# API-002: Check for business logic in endpoints
|
||
self._check_no_business_logic_in_endpoints(file_path, content, lines)
|
||
|
||
# API-003: Check exception handling
|
||
self._check_endpoint_exception_handling(file_path, content, lines)
|
||
|
||
# API-004: Check authentication
|
||
self._check_endpoint_authentication(file_path, content, lines)
|
||
|
||
# API-005: Check store_id scoping for store/shop endpoints
|
||
self._check_store_scoping(file_path, content, lines)
|
||
|
||
# API-007: Check for direct model imports
|
||
self._check_no_model_imports(file_path, content, lines)
|
||
|
||
def _check_pydantic_usage(self, file_path: Path, content: str, lines: list[str]):
|
||
"""API-001: Ensure endpoints use Pydantic models"""
|
||
rule = self._get_rule("API-001")
|
||
if not rule:
|
||
return
|
||
|
||
# Check for response_model in route decorators
|
||
route_pattern = r"@router\.(get|post|put|delete|patch)"
|
||
dict_return_pattern = r"return\s+\{.*\}"
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Check for dict returns in endpoints
|
||
if re.search(route_pattern, line):
|
||
# Look ahead for function body
|
||
func_start = i
|
||
len(line) - len(line.lstrip())
|
||
|
||
# Find function body
|
||
for j in range(func_start, min(func_start + 20, len(lines))):
|
||
if j >= len(lines):
|
||
break
|
||
|
||
func_line = lines[j]
|
||
if re.search(dict_return_pattern, func_line):
|
||
self._add_violation(
|
||
rule_id="API-001",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=j + 1,
|
||
message="Endpoint returns raw dict instead of Pydantic model",
|
||
context=func_line.strip(),
|
||
suggestion="Define a Pydantic response model and use response_model parameter",
|
||
)
|
||
|
||
def _check_no_business_logic_in_endpoints(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""API-002: Ensure no business logic in endpoints"""
|
||
rule = self._get_rule("API-002")
|
||
if not rule:
|
||
return
|
||
|
||
# NOTE: db.commit() is intentionally NOT included here
|
||
# Transaction control (commit) is allowed at endpoint level
|
||
# Only business logic operations are flagged
|
||
anti_patterns = [
|
||
(r"db\.add\(", "Creating entities should be in service layer"),
|
||
(r"db\.delete\(", "Deleting entities should be in service layer"),
|
||
(r"db\.query\(", "Database queries should be in service layer"),
|
||
(r"db\.execute\(", "Database operations should be in service layer"),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip service method calls (allowed)
|
||
if "_service." in line or "service." in line:
|
||
continue
|
||
|
||
for pattern, message in anti_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="API-002",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=line.strip(),
|
||
suggestion="Move database operations to service layer",
|
||
)
|
||
|
||
def _check_endpoint_exception_handling(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""API-003: Check that endpoints do NOT raise exceptions directly.
|
||
|
||
The architecture uses:
|
||
- Dependencies (deps.py) for authentication/authorization validation
|
||
- Services for business logic validation
|
||
- Global exception handler that catches OrionException subclasses
|
||
|
||
Endpoints should be a thin orchestration layer that trusts dependencies
|
||
and services to handle all validation. They should NOT raise exceptions.
|
||
"""
|
||
rule = self._get_rule("API-003")
|
||
if not rule:
|
||
return
|
||
|
||
# Skip exception handler file and deps.py - they're allowed to raise exceptions
|
||
file_path_str = str(file_path)
|
||
if "exceptions/handler.py" in file_path_str or file_path_str.endswith(
|
||
"deps.py"
|
||
):
|
||
return
|
||
|
||
# Patterns that indicate endpoints are raising exceptions (BAD)
|
||
exception_patterns = [
|
||
("raise HTTPException", "Endpoint raises HTTPException directly"),
|
||
(
|
||
"raise InvalidTokenException",
|
||
"Endpoint raises InvalidTokenException - move to dependency",
|
||
),
|
||
(
|
||
"raise InsufficientPermissionsException",
|
||
"Endpoint raises permission exception - move to dependency",
|
||
),
|
||
(
|
||
"raise UnauthorizedStoreAccessException",
|
||
"Endpoint raises auth exception - move to dependency or service",
|
||
),
|
||
]
|
||
|
||
# Pattern that indicates redundant validation (BAD)
|
||
redundant_patterns = [
|
||
(
|
||
r"if not hasattr\(current_user.*token_store",
|
||
"Redundant token_store check - get_current_store_api guarantees this",
|
||
),
|
||
(
|
||
r"if not hasattr\(current_user.*token_store_id",
|
||
"Redundant token_store_id check - dependency guarantees this",
|
||
),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Check for direct exception raising
|
||
for pattern, message in exception_patterns:
|
||
if pattern in line:
|
||
self._add_violation(
|
||
rule_id="API-003",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=stripped[:80],
|
||
suggestion="Let dependencies or services handle validation and raise exceptions",
|
||
)
|
||
|
||
# Check for redundant validation patterns
|
||
for pattern, message in redundant_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="API-003",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=stripped[:80],
|
||
suggestion="Remove redundant check - auth dependency guarantees this attribute is present",
|
||
)
|
||
|
||
def _check_endpoint_authentication(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""API-004: Check authentication on endpoints
|
||
|
||
Automatically skips:
|
||
- Auth endpoint files (*/auth.py) - login/logout are intentionally public
|
||
- Endpoints marked with '# public' comment
|
||
"""
|
||
rule = self._get_rule("API-004")
|
||
if not rule:
|
||
return
|
||
|
||
# Skip auth endpoint files entirely - they are intentionally public
|
||
file_path_str = str(file_path)
|
||
if file_path_str.endswith(("/auth.py", "\\auth.py")):
|
||
return
|
||
if "_auth.py" in file_path.name:
|
||
return
|
||
# Skip webhook files - they receive external callbacks
|
||
if file_path.name == "webhooks.py":
|
||
return
|
||
|
||
# This is a warning-level check
|
||
# Look for endpoints without proper authentication
|
||
# Valid auth patterns:
|
||
# - Depends(get_current_*) - direct user authentication
|
||
# - Depends(require_store_*) - store permission dependencies
|
||
# - Depends(require_any_store_*) - any permission check
|
||
# - Depends(require_all_store*) - all permissions check
|
||
# - Depends(get_user_permissions) - permission fetching
|
||
auth_patterns = [
|
||
"Depends(get_current_",
|
||
"Depends(get_merchant_for_current_user",
|
||
"Depends(require_store_",
|
||
"Depends(require_any_store_",
|
||
"Depends(require_all_store",
|
||
"Depends(get_user_permissions",
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "@router." in line and (
|
||
"post" in line or "put" in line or "delete" in line
|
||
):
|
||
# Check if decorator line itself has a public/authenticated marker
|
||
if (
|
||
"# public" in line.lower()
|
||
or "# authenticated" in line.lower()
|
||
or "# noqa: api-004" in line.lower()
|
||
):
|
||
continue
|
||
|
||
# Check previous line and next 15 lines for auth or public marker
|
||
# (increased from 5 to handle multi-line decorators and long function signatures)
|
||
has_auth = False
|
||
is_public = False
|
||
# i is 1-indexed, lines is 0-indexed
|
||
# So lines[i-1] is the decorator line, lines[i-2] is the line before
|
||
start_idx = max(0, i - 2) # Line before decorator (1-2 lines up)
|
||
end_idx = i + 15 # 15 lines after decorator
|
||
context_lines = lines[start_idx:end_idx]
|
||
|
||
for ctx_line in context_lines:
|
||
# Check for any valid auth pattern
|
||
if any(pattern in ctx_line for pattern in auth_patterns):
|
||
has_auth = True
|
||
break
|
||
# Check for public endpoint markers
|
||
if (
|
||
"# public" in ctx_line.lower()
|
||
or "# authenticated" in ctx_line.lower()
|
||
or "# noqa: api-004" in ctx_line.lower()
|
||
):
|
||
is_public = True
|
||
break
|
||
|
||
if (
|
||
not has_auth
|
||
and not is_public
|
||
and "include_in_schema=False" not in " ".join(lines[i : i + 15])
|
||
):
|
||
# Determine appropriate suggestion based on file path
|
||
file_path_str = str(file_path)
|
||
if "/store/" in file_path_str:
|
||
suggestion = "Add Depends(get_current_store_api) or permission dependency, or mark as '# public'"
|
||
elif "/admin/" in file_path_str:
|
||
suggestion = (
|
||
"Add Depends(get_current_admin_api), or mark as '# public'"
|
||
)
|
||
elif "/storefront/" in file_path_str:
|
||
suggestion = "Add Depends(get_current_customer_api), or mark as '# public'"
|
||
else:
|
||
suggestion = "Add authentication dependency or mark as '# public' if intentionally unauthenticated"
|
||
|
||
self._add_violation(
|
||
rule_id="API-004",
|
||
rule_name=rule["name"],
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Endpoint may be missing authentication",
|
||
context=line.strip(),
|
||
suggestion=suggestion,
|
||
)
|
||
|
||
def _check_store_scoping(self, file_path: Path, content: str, lines: list[str]):
|
||
"""API-005: Check that store/storefront endpoints scope queries to store_id"""
|
||
file_path_str = str(file_path)
|
||
|
||
# Only check store and storefront API files
|
||
if "/store/" not in file_path_str and "/storefront/" not in file_path_str:
|
||
return
|
||
|
||
# Skip auth files
|
||
if file_path_str.endswith("auth.py"):
|
||
return
|
||
|
||
# Check for noqa
|
||
if "noqa: api-005" in content.lower():
|
||
return
|
||
|
||
# Look for database queries without store_id filtering
|
||
# This is a heuristic check - not perfect
|
||
for i, line in enumerate(lines, 1):
|
||
# Look for .query().all() patterns without store filtering
|
||
if ".query(" in line and ".all()" in line:
|
||
# Check if store_id filter is nearby
|
||
context_start = max(0, i - 5)
|
||
context_end = min(len(lines), i + 3)
|
||
context_lines = "\n".join(lines[context_start:context_end])
|
||
|
||
if (
|
||
"store_id" not in context_lines
|
||
and "token_store_id" not in context_lines
|
||
):
|
||
self._add_violation(
|
||
rule_id="API-005",
|
||
rule_name="Multi-tenant queries must scope to store_id",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Query in store/storefront endpoint may not be scoped to store_id",
|
||
context=line.strip()[:60],
|
||
suggestion="Add .filter(Model.store_id == store_id) to ensure tenant isolation",
|
||
)
|
||
return # Only report once per file
|
||
|
||
def _check_no_model_imports(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""API-007: Check that API endpoints do NOT import database models directly.
|
||
|
||
Routes should follow layered architecture: Routes → Services → Models.
|
||
Database models should only be imported in services, not in API routes.
|
||
|
||
Allowed: schemas, services, deps, database session
|
||
Not allowed: models.database.*, app.modules.*.models.*
|
||
"""
|
||
rule = self._get_rule("API-007")
|
||
if not rule:
|
||
return
|
||
|
||
# Skip deps.py - it may need model access for queries
|
||
file_path_str = str(file_path)
|
||
if file_path_str.endswith("deps.py"):
|
||
return
|
||
|
||
# Check for noqa
|
||
if "api-007" in content.lower():
|
||
return
|
||
|
||
# Patterns that indicate direct model imports
|
||
model_import_patterns = [
|
||
(r"from models\.database\.", "Importing from legacy models.database location"),
|
||
(r"from app\.modules\.[a-z_]+\.models\.", "Importing from module models"),
|
||
]
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Check for noqa on this specific line
|
||
if "api-007" in line.lower():
|
||
continue
|
||
|
||
for pattern, message in model_import_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="API-007",
|
||
rule_name="API endpoints must NOT import database models directly",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=message,
|
||
context=line.strip()[:80],
|
||
suggestion="Import from services or schemas instead. Use CustomerContext schema for dependency injection.",
|
||
)
|
||
|
||
def _validate_service_layer(self, target_path: Path):
|
||
"""Validate service layer rules (SVC-001 to SVC-007)"""
|
||
print("🔧 Validating service layer...")
|
||
|
||
service_files = list(target_path.glob("app/services/**/*.py"))
|
||
service_files += list(target_path.glob("app/modules/*/services/**/*.py"))
|
||
self.result.files_checked += len(service_files)
|
||
|
||
for file_path in service_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# SVC-001: No HTTPException in services
|
||
self._check_no_http_exception_in_services(file_path, content, lines)
|
||
|
||
# SVC-002: Proper exception handling
|
||
self._check_service_exceptions(file_path, content, lines)
|
||
|
||
# SVC-003: DB session as parameter
|
||
self._check_db_session_parameter(file_path, content, lines)
|
||
|
||
# SVC-005: Store scoping in multi-tenant services
|
||
self._check_service_store_scoping(file_path, content, lines)
|
||
|
||
# SVC-006: No db.commit() in services
|
||
self._check_no_commit_in_services(file_path, content, lines)
|
||
|
||
def _check_service_store_scoping(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-005: Check that service queries are scoped to store_id in multi-tenant context"""
|
||
# Skip admin services that may legitimately access all stores
|
||
file_path_str = str(file_path)
|
||
if "admin" in file_path_str.lower():
|
||
return
|
||
|
||
if "svc-005" in content.lower():
|
||
return
|
||
|
||
# Look for patterns that suggest unscoped queries
|
||
for i, line in enumerate(lines, 1):
|
||
# Check for .all() queries that might not be scoped
|
||
if ".query(" in line and ".all()" in line:
|
||
# Check context for store filtering
|
||
context_start = max(0, i - 5)
|
||
context_end = min(len(lines), i + 3)
|
||
context_lines = "\n".join(lines[context_start:context_end])
|
||
|
||
if "store_id" not in context_lines:
|
||
# Check if the method has store_id as parameter
|
||
method_start = self._find_method_start(lines, i)
|
||
if method_start:
|
||
method_sig = lines[method_start]
|
||
if "store_id" not in method_sig:
|
||
self._add_violation(
|
||
rule_id="SVC-005",
|
||
rule_name="Service must scope queries to store_id",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Query may not be scoped to store_id",
|
||
context=line.strip()[:60],
|
||
suggestion="Add store_id parameter and filter queries by it",
|
||
)
|
||
return
|
||
|
||
def _find_method_start(self, lines: list[str], current_line: int) -> int | None:
|
||
"""Find the start line of the method containing current_line"""
|
||
for i in range(current_line - 1, max(0, current_line - 30), -1):
|
||
if "def " in lines[i]:
|
||
return i
|
||
return None
|
||
|
||
def _check_no_http_exception_in_services(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-001: Services must not raise HTTPException"""
|
||
rule = self._get_rule("SVC-001")
|
||
if not rule:
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "raise HTTPException" in line:
|
||
self._add_violation(
|
||
rule_id="SVC-001",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service raises HTTPException - use domain exceptions instead",
|
||
context=line.strip(),
|
||
suggestion="Create custom exception class (e.g., StoreNotFoundError) and raise that",
|
||
)
|
||
|
||
if (
|
||
"from fastapi import HTTPException" in line
|
||
or "from fastapi.exceptions import HTTPException" in line
|
||
):
|
||
self._add_violation(
|
||
rule_id="SVC-001",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service imports HTTPException - services should not know about HTTP",
|
||
context=line.strip(),
|
||
suggestion="Remove HTTPException import and use domain exceptions",
|
||
)
|
||
|
||
def _check_service_exceptions(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-002: Check for proper exception handling"""
|
||
rule = self._get_rule("SVC-002")
|
||
if not rule:
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Check for generic Exception raises
|
||
if re.match(r"\s*raise Exception\(", line):
|
||
self._add_violation(
|
||
rule_id="SVC-002",
|
||
rule_name=rule["name"],
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service raises generic Exception - use specific domain exception",
|
||
context=line.strip(),
|
||
suggestion="Create custom exception class for this error case",
|
||
)
|
||
|
||
def _check_db_session_parameter(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-003: Service methods should accept db session as parameter"""
|
||
rule = self._get_rule("SVC-003")
|
||
if not rule:
|
||
return
|
||
|
||
# Check for SessionLocal() creation in service files
|
||
for i, line in enumerate(lines, 1):
|
||
if "SessionLocal()" in line and "class" not in line:
|
||
self._add_violation(
|
||
rule_id="SVC-003",
|
||
rule_name=rule["name"],
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service creates database session internally",
|
||
context=line.strip(),
|
||
suggestion="Accept db: Session as method parameter instead",
|
||
)
|
||
|
||
def _check_no_commit_in_services(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""SVC-006: Services should NOT call db.commit()
|
||
|
||
Transaction control belongs at the API endpoint level.
|
||
Exception: log_service.py may need immediate commits for audit logs.
|
||
Exception: Background task processing may need incremental commits.
|
||
"""
|
||
rule = self._get_rule("SVC-006")
|
||
if not rule:
|
||
return
|
||
|
||
# Check exceptions from YAML config
|
||
exceptions = rule.get("pattern", {}).get("exceptions", [])
|
||
for exc in exceptions:
|
||
if exc in str(file_path):
|
||
return
|
||
|
||
# Check for file-level noqa comment
|
||
if "svc-006" in content.lower():
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if "db.commit()" in line:
|
||
# Skip if it's a comment
|
||
stripped = line.strip()
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Skip if line has inline noqa comment
|
||
if "svc-006" in line.lower():
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="SVC-006",
|
||
rule_name=rule["name"],
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Service calls db.commit() - transaction control should be at endpoint level",
|
||
context=stripped,
|
||
suggestion="Remove db.commit() from service; let endpoint handle transaction. For background tasks, add # SVC-006",
|
||
)
|
||
|
||
def _validate_models(self, target_path: Path):
|
||
"""Validate model rules (MDL-001 to MDL-004)"""
|
||
print("📦 Validating models...")
|
||
|
||
# Validate database models
|
||
db_model_files = list(target_path.glob("models/database/**/*.py"))
|
||
db_model_files += list(target_path.glob("app/modules/*/models/**/*.py"))
|
||
self.result.files_checked += len(db_model_files)
|
||
|
||
for file_path in db_model_files:
|
||
if self._should_ignore_file(file_path) or file_path.name == "__init__.py":
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# MDL-001: Check for SQLAlchemy Base inheritance
|
||
self._check_sqlalchemy_base_usage(file_path, content, lines)
|
||
|
||
# MDL-004: Check table naming (singular)
|
||
self._check_table_naming(file_path, content)
|
||
|
||
# Validate schema models
|
||
schema_model_files = list(target_path.glob("models/schema/**/*.py"))
|
||
schema_model_files += list(target_path.glob("app/modules/*/schemas/**/*.py"))
|
||
self.result.files_checked += len(schema_model_files)
|
||
|
||
for file_path in schema_model_files:
|
||
if self._should_ignore_file(file_path) or file_path.name == "__init__.py":
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# MDL-002: Check for mixing SQLAlchemy and Pydantic
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"class.*\(Base.*,.*BaseModel.*\)", line):
|
||
self._add_violation(
|
||
rule_id="MDL-002",
|
||
rule_name="Separate SQLAlchemy and Pydantic models",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Model mixes SQLAlchemy Base and Pydantic BaseModel",
|
||
context=line.strip(),
|
||
suggestion="Keep SQLAlchemy models and Pydantic models separate",
|
||
)
|
||
|
||
# MDL-003: Check for from_attributes in Pydantic response models
|
||
self._check_pydantic_from_attributes(file_path, content)
|
||
|
||
def _check_sqlalchemy_base_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""MDL-001: Check that database models inherit from SQLAlchemy Base"""
|
||
# Look for class definitions
|
||
class_pattern = re.compile(r"class\s+(\w+)\s*\(([^)]+)\):")
|
||
|
||
for match in class_pattern.finditer(content):
|
||
class_name = match.group(1)
|
||
base_classes = match.group(2)
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Skip non-model classes (mixins, helpers, etc.)
|
||
if class_name.endswith("Mixin") or class_name.startswith("_"):
|
||
continue
|
||
|
||
# Skip Enum classes - they inherit from (str, Enum) or (Enum)
|
||
if "Enum" in base_classes:
|
||
continue
|
||
|
||
# Check if it inherits from Base
|
||
if "Base" not in base_classes and "db.Model" not in base_classes:
|
||
# Check if it might be a model - MUST have __tablename__ in its own class body
|
||
class_body_start = match.end()
|
||
# Find the end of this class body (next class definition at same indentation or EOF)
|
||
next_class_match = class_pattern.search(content, class_body_start)
|
||
if next_class_match:
|
||
class_body = content[class_body_start : next_class_match.start()]
|
||
else:
|
||
class_body = content[class_body_start:]
|
||
|
||
# Only flag if this specific class has __tablename__ (definite model indicator)
|
||
if "__tablename__" in class_body:
|
||
self._add_violation(
|
||
rule_id="MDL-001",
|
||
rule_name="Database models must use SQLAlchemy Base",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Model '{class_name}' appears to be a database model but doesn't inherit from Base",
|
||
context=f"class {class_name}({base_classes})",
|
||
suggestion="Inherit from Base: class MyModel(Base):",
|
||
)
|
||
|
||
def _check_table_naming(self, file_path: Path, content: str):
|
||
"""MDL-004: Check that table names are plural (industry standard)
|
||
|
||
Following Rails, Django, Laravel conventions - tables represent collections
|
||
of entities so should use plural names: users, orders, products.
|
||
"""
|
||
# Look for __tablename__ definitions
|
||
tablename_pattern = re.compile(r'__tablename__\s*=\s*["\'](\w+)["\']')
|
||
|
||
for match in tablename_pattern.finditer(content):
|
||
table_name = match.group(1)
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Common singular names that should be plural
|
||
singular_indicators = {
|
||
"store": "stores",
|
||
"product": "products",
|
||
"order": "orders",
|
||
"user": "users",
|
||
"customer": "customers",
|
||
"category": "categories",
|
||
"payment": "payments",
|
||
"setting": "settings",
|
||
"item": "items",
|
||
"image": "images",
|
||
"role": "roles",
|
||
"merchant": "merchants",
|
||
}
|
||
|
||
if table_name in singular_indicators:
|
||
plural = singular_indicators[table_name]
|
||
self._add_violation(
|
||
rule_id="MDL-004",
|
||
rule_name="Database tables use plural names",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Table name '{table_name}' should be plural (industry standard)",
|
||
context=f'__tablename__ = "{table_name}"',
|
||
suggestion=f'Use __tablename__ = "{plural}"',
|
||
)
|
||
|
||
def _check_pydantic_from_attributes(self, file_path: Path, content: str):
|
||
"""MDL-003: Check that Pydantic response models have from_attributes"""
|
||
# Look for response model classes that likely represent ORM entities
|
||
# Skip stats/aggregate models which use plain dicts
|
||
response_pattern = re.compile(
|
||
r"class\s+(\w*Response\w*|\w*Out\w*|\w*Read\w*)\s*\(([^)]+)\):"
|
||
)
|
||
|
||
for match in response_pattern.finditer(content):
|
||
class_name = match.group(1)
|
||
base_classes = match.group(2)
|
||
class_start = match.end()
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Skip stats/aggregate response models - they use plain dicts, not ORM objects
|
||
stats_patterns = [
|
||
"Stats",
|
||
"Analytics",
|
||
"Dashboard",
|
||
"Summary",
|
||
"Count",
|
||
"Total",
|
||
]
|
||
if any(p in class_name for p in stats_patterns):
|
||
continue
|
||
|
||
# Get class body - find the next class definition to properly scope
|
||
next_class = re.search(r"\nclass\s+\w+", content[class_start:])
|
||
if next_class:
|
||
class_body = content[class_start : class_start + next_class.start()]
|
||
else:
|
||
class_body = content[class_start : class_start + 800]
|
||
|
||
# Check for from_attributes in Config or model_config
|
||
# Supports multiple formats:
|
||
# - model_config = {"from_attributes": True}
|
||
# - model_config = ConfigDict(from_attributes=True)
|
||
# - class Config: from_attributes = True
|
||
# - orm_mode = True (Pydantic v1)
|
||
has_from_attributes = any(
|
||
[
|
||
"from_attributes" in class_body and "True" in class_body,
|
||
"orm_mode" in class_body and "True" in class_body,
|
||
]
|
||
)
|
||
|
||
if not has_from_attributes:
|
||
# Only flag if it looks like an ORM entity response:
|
||
# - Has an 'id: int' field as a direct attribute (typical ORM primary key)
|
||
# - AND has at least one other typical ORM field (created_at, updated_at, etc.)
|
||
# Skip wrapper/list responses that just contain other models
|
||
has_id_field = re.search(r"^\s+id\s*:\s*int", class_body, re.MULTILINE)
|
||
has_timestamp = (
|
||
"created_at:" in class_body or "updated_at:" in class_body
|
||
)
|
||
|
||
# Skip list/wrapper responses (contain 'list[' or just have message/total fields)
|
||
is_wrapper = any(
|
||
[
|
||
": list[" in class_body
|
||
and "id:" not in class_body.split(": list[")[0],
|
||
re.search(r"^\s+items\s*:", class_body, re.MULTILINE),
|
||
re.search(r"^\s+total\s*:", class_body, re.MULTILINE)
|
||
and not has_id_field,
|
||
]
|
||
)
|
||
|
||
# Only flag if it has id AND timestamps, or id and multiple other ORM-like fields
|
||
looks_like_orm = has_id_field and (
|
||
has_timestamp
|
||
or (
|
||
# Has multiple fields suggesting ORM entity
|
||
sum(
|
||
[
|
||
"store_id:" in class_body,
|
||
"user_id:" in class_body,
|
||
"is_active:" in class_body,
|
||
"email:" in class_body,
|
||
]
|
||
)
|
||
>= 2
|
||
)
|
||
)
|
||
|
||
if "BaseModel" in base_classes and looks_like_orm and not is_wrapper:
|
||
self._add_violation(
|
||
rule_id="MDL-003",
|
||
rule_name="Pydantic response models must use from_attributes",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Response model '{class_name}' may need from_attributes for ORM compatibility",
|
||
context=f"class {class_name}({base_classes})",
|
||
suggestion="Add 'model_config = ConfigDict(from_attributes=True)' or 'class Config: from_attributes = True'",
|
||
)
|
||
|
||
def _validate_exceptions(self, target_path: Path):
|
||
"""Validate exception handling patterns"""
|
||
print("⚠️ Validating exception handling...")
|
||
|
||
py_files = list(target_path.glob("**/*.py"))
|
||
|
||
for file_path in py_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# EXC-002: Check for bare except
|
||
for i, line in enumerate(lines, 1):
|
||
if re.match(r"\s*except\s*:", line):
|
||
self._add_violation(
|
||
rule_id="EXC-002",
|
||
rule_name="No bare except clauses",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Bare except clause catches all exceptions including system exits",
|
||
context=line.strip(),
|
||
suggestion="Specify exception type: except ValueError: or except Exception:",
|
||
)
|
||
|
||
# EXC-003: Check for broad 'except Exception' in service files
|
||
exempt_patterns = {"_metrics.py", "_features.py", "_widgets.py", "_aggregator", "definition.py", "__init__.py"}
|
||
service_files = list(target_path.glob("app/modules/*/services/*.py"))
|
||
for file_path in service_files:
|
||
if any(pat in file_path.name for pat in exempt_patterns):
|
||
continue
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
try:
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
except Exception:
|
||
continue
|
||
for i, line in enumerate(lines, 1):
|
||
if re.match(r"\s*except\s+Exception\s*(as\s+\w+)?\s*:", line):
|
||
if "noqa: EXC-003" in line or "noqa: exc-003" in line:
|
||
continue
|
||
self._add_violation(
|
||
rule_id="EXC-003",
|
||
rule_name="Broad except Exception in service",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Broad 'except Exception' catches too many error types",
|
||
context=line.strip(),
|
||
suggestion="Catch specific exceptions instead, or add '# noqa: EXC-003' to suppress",
|
||
)
|
||
|
||
# EXC-004: Check exception inheritance in exceptions module
|
||
exception_files = list(target_path.glob("app/exceptions/**/*.py"))
|
||
exception_files += list(target_path.glob("app/modules/*/exceptions.py"))
|
||
for file_path in exception_files:
|
||
if "__init__" in file_path.name or "handler" in file_path.name:
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_exception_inheritance(file_path, content)
|
||
|
||
# EXC-005: Check exception handler registration in main.py
|
||
main_py = target_path / "app" / "main.py"
|
||
if main_py.exists():
|
||
content = main_py.read_text()
|
||
if (
|
||
"setup_exception_handlers" not in content
|
||
and "exception_handler" not in content
|
||
):
|
||
self._add_violation(
|
||
rule_id="EXC-005",
|
||
rule_name="Exception handler must be registered",
|
||
severity=Severity.ERROR,
|
||
file_path=main_py,
|
||
line_number=1,
|
||
message="No exception handler registration found in main.py",
|
||
context="app/main.py",
|
||
suggestion="Add setup_exception_handlers(app) or register exception handlers",
|
||
)
|
||
|
||
def _check_exception_inheritance(self, file_path: Path, content: str):
|
||
"""EXC-004: Check that custom exceptions inherit from OrionException"""
|
||
# Look for class definitions that look like exceptions
|
||
exception_pattern = re.compile(r"class\s+(\w+Exception|\w+Error)\s*\(([^)]+)\)")
|
||
|
||
for match in exception_pattern.finditer(content):
|
||
class_name = match.group(1)
|
||
base_classes = match.group(2)
|
||
line_num = content[: match.start()].count("\n") + 1
|
||
|
||
# Check if it inherits from appropriate base
|
||
valid_bases = [
|
||
"OrionException",
|
||
"ResourceNotFoundException",
|
||
"ValidationException",
|
||
"AuthenticationException",
|
||
"AuthorizationException",
|
||
"ConflictException",
|
||
"BusinessLogicException",
|
||
"MarketplaceException",
|
||
"LetzshopClientError",
|
||
"LoyaltyException",
|
||
"Exception", # Allow base Exception for some cases
|
||
]
|
||
|
||
has_valid_base = any(base in base_classes for base in valid_bases)
|
||
if not has_valid_base:
|
||
self._add_violation(
|
||
rule_id="EXC-004",
|
||
rule_name="Domain exceptions must inherit from OrionException",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=line_num,
|
||
message=f"Exception {class_name} should inherit from OrionException hierarchy",
|
||
context=f"class {class_name}({base_classes})",
|
||
suggestion="Inherit from OrionException or one of its subclasses",
|
||
)
|
||
|
||
def _validate_naming_conventions(self, target_path: Path):
|
||
"""Validate naming convention rules (NAM-001 to NAM-005)"""
|
||
print("📛 Validating naming conventions...")
|
||
|
||
# NAM-001: API files use PLURAL names
|
||
nam001_rule = self._get_rule("NAM-001")
|
||
nam001_exceptions = (
|
||
nam001_rule.get("pattern", {}).get("exceptions", []) if nam001_rule else []
|
||
)
|
||
api_files = list(target_path.glob("app/api/v1/**/*.py"))
|
||
api_files += list(target_path.glob("app/modules/*/routes/api/**/*.py"))
|
||
for file_path in api_files:
|
||
if file_path.name in nam001_exceptions:
|
||
continue
|
||
if "_auth.py" in file_path.name:
|
||
continue
|
||
self._check_api_file_naming(file_path)
|
||
|
||
# NAM-002: Service files use SINGULAR + 'service' suffix
|
||
nam002_rule = self._get_rule("NAM-002")
|
||
nam002_exceptions = (
|
||
nam002_rule.get("pattern", {}).get("exceptions", []) if nam002_rule else []
|
||
)
|
||
service_files = list(target_path.glob("app/services/**/*.py"))
|
||
service_files += list(target_path.glob("app/modules/*/services/**/*.py"))
|
||
for file_path in service_files:
|
||
if file_path.name == "__init__.py":
|
||
continue
|
||
# Check glob-style exceptions (e.g. *_features.py)
|
||
if any(file_path.match(exc) for exc in nam002_exceptions):
|
||
continue
|
||
self._check_service_file_naming(file_path)
|
||
|
||
# NAM-003: Model files use SINGULAR names
|
||
model_files = list(target_path.glob("models/**/*.py"))
|
||
model_files += list(target_path.glob("app/modules/*/models/**/*.py"))
|
||
for file_path in model_files:
|
||
if file_path.name == "__init__.py":
|
||
continue
|
||
self._check_model_file_naming(file_path)
|
||
|
||
# NAM-004 & NAM-005: Check terminology consistency
|
||
py_files = list(target_path.glob("app/**/*.py"))
|
||
for file_path in py_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_terminology_consistency(file_path, content)
|
||
|
||
def _check_api_file_naming(self, file_path: Path):
|
||
"""NAM-001: Check that API files use plural names"""
|
||
name = file_path.stem
|
||
|
||
# Common singular forms that should be plural
|
||
singular_to_plural = {
|
||
"store": "stores",
|
||
"product": "products",
|
||
"order": "orders",
|
||
"user": "users",
|
||
"category": "categories",
|
||
"import": "imports",
|
||
"export": "exports",
|
||
"customer": "customers",
|
||
"cart": "carts",
|
||
"payment": "payments",
|
||
"setting": "settings",
|
||
}
|
||
|
||
if name in singular_to_plural:
|
||
self._add_violation(
|
||
rule_id="NAM-001",
|
||
rule_name="API files use PLURAL names",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"API file '{name}.py' should use plural form",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{singular_to_plural[name]}.py'",
|
||
)
|
||
|
||
def _check_service_file_naming(self, file_path: Path):
|
||
"""NAM-002: Check that service files use singular + _service suffix"""
|
||
# Check for noqa comment
|
||
try:
|
||
content = file_path.read_text()
|
||
if "noqa: nam-002" in content.lower():
|
||
return
|
||
except Exception:
|
||
pass
|
||
|
||
name = file_path.stem
|
||
|
||
# Check for _service suffix
|
||
if not name.endswith("_service"):
|
||
self._add_violation(
|
||
rule_id="NAM-002",
|
||
rule_name="Service files use SINGULAR + 'service' suffix",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"Service file '{name}.py' should end with '_service'",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{name}_service.py' or ensure it's a service file",
|
||
)
|
||
return
|
||
|
||
# Check if the base name is plural (should be singular)
|
||
base_name = name.replace("_service", "")
|
||
plurals = [
|
||
"stores",
|
||
"products",
|
||
"orders",
|
||
"users",
|
||
"categories",
|
||
"customers",
|
||
"payments",
|
||
]
|
||
if base_name in plurals:
|
||
singular = base_name.rstrip("s")
|
||
if base_name == "categories":
|
||
singular = "category"
|
||
self._add_violation(
|
||
rule_id="NAM-002",
|
||
rule_name="Service files use SINGULAR + 'service' suffix",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"Service file '{name}.py' should use singular form",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{singular}_service.py'",
|
||
)
|
||
|
||
def _check_model_file_naming(self, file_path: Path):
|
||
"""NAM-003: Check that model files use singular names"""
|
||
name = file_path.stem
|
||
|
||
# Common plural forms that should be singular
|
||
plural_to_singular = {
|
||
"stores": "store",
|
||
"products": "product",
|
||
"orders": "order",
|
||
"users": "user",
|
||
"categories": "category",
|
||
"customers": "customer",
|
||
"payments": "payment",
|
||
"settings": "setting",
|
||
}
|
||
|
||
if name in plural_to_singular:
|
||
self._add_violation(
|
||
rule_id="NAM-003",
|
||
rule_name="Model files use SINGULAR names",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"Model file '{name}.py' should use singular form",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{plural_to_singular[name]}.py'",
|
||
)
|
||
|
||
def _check_terminology_consistency(self, file_path: Path, content: str):
|
||
"""NAM-004 & NAM-005: Check for inconsistent terminology"""
|
||
lines = content.split("\n")
|
||
|
||
# NAM-004: Check for 'shop_id' (should be store_id)
|
||
# Skip shop-specific files where shop_id might be legitimate
|
||
# Use word boundary to avoid matching 'letzshop_id' etc.
|
||
shop_id_pattern = re.compile(r"\bshop_id\b")
|
||
if "/shop/" not in str(file_path):
|
||
for i, line in enumerate(lines, 1):
|
||
if shop_id_pattern.search(line) and "# noqa" not in line.lower():
|
||
# Avoid false positives for legitimate shop context
|
||
if "shop_service" not in str(file_path):
|
||
self._add_violation(
|
||
rule_id="NAM-004",
|
||
rule_name="Use 'store' not 'shop'",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Consider using 'store_id' instead of 'shop_id'",
|
||
context=line.strip()[:60],
|
||
suggestion="Use store_id for multi-tenant context",
|
||
)
|
||
return # Only report once per file
|
||
|
||
# NAM-005: Check for 'stock' (should be inventory)
|
||
for i, line in enumerate(lines, 1):
|
||
if "stock_service" in line or "stock_id" in line:
|
||
if "# noqa" not in line.lower():
|
||
self._add_violation(
|
||
rule_id="NAM-005",
|
||
rule_name="Use 'inventory' not 'stock'",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Consider using 'inventory' instead of 'stock'",
|
||
context=line.strip()[:60],
|
||
suggestion="Use inventory_service or inventory_id for consistency",
|
||
)
|
||
return
|
||
|
||
def _validate_auth_patterns(self, target_path: Path):
|
||
"""Validate authentication patterns (AUTH-001 to AUTH-004)"""
|
||
print("🔐 Validating auth patterns...")
|
||
|
||
# AUTH-003: Check password hashing in auth service
|
||
auth_service = target_path / "app" / "services" / "auth_service.py"
|
||
if auth_service.exists():
|
||
content = auth_service.read_text()
|
||
if "bcrypt" not in content and "hash" not in content.lower():
|
||
self._add_violation(
|
||
rule_id="AUTH-003",
|
||
rule_name="Never store plain passwords",
|
||
severity=Severity.ERROR,
|
||
file_path=auth_service,
|
||
line_number=1,
|
||
message="Auth service may not be hashing passwords",
|
||
context="auth_service.py",
|
||
suggestion="Use bcrypt or similar library to hash passwords before storing",
|
||
)
|
||
|
||
# AUTH-004: Check store context patterns
|
||
store_api_files = list(target_path.glob("app/api/v1/store/**/*.py"))
|
||
store_api_files += list(target_path.glob("app/modules/*/routes/api/store*.py"))
|
||
for file_path in store_api_files:
|
||
if file_path.name == "__init__.py" or file_path.name == "auth.py":
|
||
continue
|
||
# storefront*.py files are handled separately - they SHOULD use require_store_context
|
||
if file_path.name.startswith("storefront"):
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_store_context_pattern(file_path, content)
|
||
|
||
storefront_api_files = list(target_path.glob("app/api/v1/storefront/**/*.py"))
|
||
storefront_api_files += list(target_path.glob("app/modules/*/routes/api/storefront*.py"))
|
||
for file_path in storefront_api_files:
|
||
if file_path.name == "__init__.py" or file_path.name == "auth.py":
|
||
continue
|
||
content = file_path.read_text()
|
||
self._check_storefront_context_pattern(file_path, content)
|
||
|
||
def _check_store_context_pattern(self, file_path: Path, content: str):
|
||
"""AUTH-004: Check that store API endpoints use token-based store context"""
|
||
if "noqa: auth-004" in content.lower():
|
||
return
|
||
|
||
# Store APIs should NOT use require_store_context() - that's for shop
|
||
if "require_store_context()" in content:
|
||
lines = content.split("\n")
|
||
for i, line in enumerate(lines, 1):
|
||
if "require_store_context()" in line:
|
||
self._add_violation(
|
||
rule_id="AUTH-004",
|
||
rule_name="Use correct store context pattern",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Store API should use token_store_id, not require_store_context()",
|
||
context=line.strip()[:60],
|
||
suggestion="Use current_user.token_store_id from JWT token instead",
|
||
)
|
||
return
|
||
|
||
def _check_storefront_context_pattern(self, file_path: Path, content: str):
|
||
"""AUTH-004: Check that storefront API endpoints use proper store context"""
|
||
if "noqa: auth-004" in content.lower():
|
||
return
|
||
|
||
# Storefront APIs that need store context should use require_store_context,
|
||
# # public, or # authenticated (customer auth includes store context)
|
||
has_store_context = (
|
||
"require_store_context" in content
|
||
or "# public" in content
|
||
or "# authenticated" in content
|
||
)
|
||
|
||
# Check for routes that might need store context
|
||
if "@router." in content and not has_store_context:
|
||
# Only flag if there are non-public endpoints without store context
|
||
lines = content.split("\n")
|
||
for i, line in enumerate(lines, 1):
|
||
if "@router." in line:
|
||
# Check next few lines for public/authenticated marker or store context
|
||
context_lines = "\n".join(lines[i - 1 : i + 10])
|
||
if (
|
||
"# public" not in context_lines
|
||
and "# authenticated" not in context_lines
|
||
and "require_store_context" not in context_lines
|
||
):
|
||
self._add_violation(
|
||
rule_id="AUTH-004",
|
||
rule_name="Storefront endpoints need store context",
|
||
severity=Severity.INFO,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Storefront endpoint may need store context dependency",
|
||
context=line.strip()[:60],
|
||
suggestion="Add Depends(require_store_context()) or mark as '# public'",
|
||
)
|
||
return
|
||
|
||
def _validate_middleware(self, target_path: Path):
|
||
"""Validate middleware patterns (MDW-001, MDW-002)"""
|
||
print("🔌 Validating middleware...")
|
||
|
||
middleware_dir = target_path / "middleware"
|
||
if not middleware_dir.exists():
|
||
return
|
||
|
||
middleware_files = list(middleware_dir.glob("*.py"))
|
||
|
||
for file_path in middleware_files:
|
||
if file_path.name == "__init__.py":
|
||
continue
|
||
|
||
# MDW-001: Check naming convention
|
||
if "_middleware" in file_path.stem:
|
||
self._add_violation(
|
||
rule_id="MDW-001",
|
||
rule_name="Middleware uses simple noun naming",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Middleware file should use simple noun naming",
|
||
context=file_path.name,
|
||
suggestion=f"Rename to '{file_path.stem.replace('_middleware', '')}.py'",
|
||
)
|
||
|
||
# MDW-002: Check store context middleware exists and sets proper state
|
||
store_context = middleware_dir / "store_context.py"
|
||
if store_context.exists():
|
||
content = store_context.read_text()
|
||
# The middleware can set either request.state.store (full object)
|
||
# or request.state.store_id - store object is preferred as it allows
|
||
# accessing store_id via request.state.store.id
|
||
if "request.state.store" not in content:
|
||
self._add_violation(
|
||
rule_id="MDW-002",
|
||
rule_name="Store context middleware must set state",
|
||
severity=Severity.ERROR,
|
||
file_path=store_context,
|
||
line_number=1,
|
||
message="Store context middleware should set request.state.store",
|
||
context="store_context.py",
|
||
suggestion="Add 'request.state.store = store' in the middleware",
|
||
)
|
||
|
||
def _validate_javascript(self, target_path: Path):
|
||
"""Validate JavaScript patterns"""
|
||
print("🟨 Validating JavaScript...")
|
||
|
||
# Include admin, store, and shared JS files
|
||
# Also include self-contained module JS files
|
||
js_files = (
|
||
list(target_path.glob("static/admin/js/**/*.js"))
|
||
+ list(target_path.glob("static/store/js/**/*.js"))
|
||
+ list(target_path.glob("static/shared/js/**/*.js"))
|
||
+ list(target_path.glob("app/modules/*/static/admin/js/**/*.js"))
|
||
+ list(target_path.glob("app/modules/*/static/store/js/**/*.js"))
|
||
)
|
||
self.result.files_checked += len(js_files)
|
||
|
||
for file_path in js_files:
|
||
# Skip third-party libraries in static/shared/js/lib/
|
||
# Note: static/store/js/ is our app's store dashboard code (NOT third-party)
|
||
file_path_str = str(file_path)
|
||
if "/shared/js/lib/" in file_path_str or "\\shared\\js\\lib\\" in file_path_str:
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# JS-001: Check for console usage (must use centralized logger)
|
||
# Skip init-*.js files - they run before logger is available
|
||
# Skip files with noqa: js-001 comment
|
||
if not file_path.name.startswith("init-") and "noqa: js-001" not in content.lower():
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"console\.(log|warn|error)", line):
|
||
# Skip if it's a comment or bootstrap message
|
||
if "//" in line or "✅" in line or "eslint-disable" in line:
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="JS-001",
|
||
rule_name="Use centralized logger",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use centralized logger instead of console",
|
||
context=line.strip()[:80],
|
||
suggestion="Use window.LogConfig.createLogger('moduleName')",
|
||
)
|
||
|
||
# JS-002: Check for window.apiClient (must use lowercase apiClient)
|
||
for i, line in enumerate(lines, 1):
|
||
if "window.apiClient" in line:
|
||
# Check if it's not in a comment
|
||
before_occurrence = line[: line.find("window.apiClient")]
|
||
if "//" not in before_occurrence:
|
||
self._add_violation(
|
||
rule_id="JS-002",
|
||
rule_name="Use lowercase apiClient",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use apiClient directly instead of window.apiClient",
|
||
context=line.strip(),
|
||
suggestion="Replace window.apiClient with apiClient",
|
||
)
|
||
|
||
# JS-003: Check Alpine components spread ...data()
|
||
self._check_alpine_data_spread(file_path, content, lines)
|
||
|
||
# JS-004: Check Alpine components set currentPage
|
||
self._check_alpine_current_page(file_path, content, lines)
|
||
|
||
# JS-005: Check initialization guard
|
||
self._check_init_guard(file_path, content, lines)
|
||
|
||
# JS-006: Check async error handling
|
||
self._check_async_error_handling(file_path, content, lines)
|
||
|
||
# JS-007: Check loading state management
|
||
self._check_loading_state(file_path, content, lines)
|
||
|
||
# JS-008: Check for raw fetch() calls instead of apiClient
|
||
self._check_fetch_vs_api_client(file_path, content, lines)
|
||
|
||
# JS-009: Check for alert() or window.showToast instead of Utils.showToast()
|
||
self._check_toast_usage(file_path, content, lines)
|
||
|
||
# JS-010: Check PlatformSettings usage for pagination
|
||
self._check_platform_settings_usage(file_path, content, lines)
|
||
|
||
# JS-011: Check standard pagination structure
|
||
self._check_pagination_structure(file_path, content, lines)
|
||
|
||
# JS-012: Check for double /api/v1 prefix in API calls
|
||
self._check_api_prefix_usage(file_path, content, lines)
|
||
|
||
# JS-013: Check that components overriding init() call parent init
|
||
self._check_parent_init_call(file_path, content, lines)
|
||
|
||
# JS-014: Check that store API calls don't include storeCode in path
|
||
self._check_store_api_paths(file_path, content, lines)
|
||
|
||
def _check_platform_settings_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-010: Check that pages with pagination use PlatformSettings.getRowsPerPage()
|
||
"""
|
||
# Skip excluded files
|
||
excluded_files = ["init-alpine.js", "init-api-client.js", "settings.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
# Check for various pagination patterns:
|
||
# - Standard: pagination: { ... }
|
||
# - Named: jobsPagination: { ... }, ordersPagination: { ... }
|
||
# - Separate vars: ordersLimit, productsLimit, etc.
|
||
pagination_patterns = [
|
||
r"pagination:\s*\{",
|
||
r"\w+Pagination:\s*\{",
|
||
r"(orders|products|exceptions|jobs|items)Limit\s*:",
|
||
r"per_page\s*:\s*\d+",
|
||
]
|
||
|
||
has_pagination = any(re.search(p, content) for p in pagination_patterns)
|
||
if not has_pagination:
|
||
return
|
||
|
||
# Check if file uses PlatformSettings
|
||
uses_platform_settings = "PlatformSettings" in content
|
||
|
||
if not uses_platform_settings:
|
||
# Find the line where pagination is defined
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"(pagination|Pagination|Limit|per_page)\s*[:{]", line):
|
||
self._add_violation(
|
||
rule_id="JS-010",
|
||
rule_name="Use PlatformSettings for pagination",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Page with pagination must use PlatformSettings.getRowsPerPage()",
|
||
context=line.strip()[:80],
|
||
suggestion="Add: if (window.PlatformSettings) { this.pagination.per_page = await window.PlatformSettings.getRowsPerPage(); }",
|
||
)
|
||
break
|
||
|
||
def _check_pagination_structure(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-011: Check that pagination uses standard structure (pagination.per_page not page_size)
|
||
"""
|
||
# Skip excluded files
|
||
excluded_files = ["init-alpine.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
# Check if this file has pagination
|
||
has_pagination = re.search(r"pagination:\s*\{", content)
|
||
if not has_pagination:
|
||
return
|
||
|
||
# Check for wrong property names in pagination object definition
|
||
# Look for pagination object with wrong property names
|
||
wrong_patterns = [
|
||
(r"page_size\s*:", "page_size", "per_page"),
|
||
(r"pageSize\s*:", "pageSize", "per_page"),
|
||
(r"total_pages\s*:", "total_pages", "pages"),
|
||
(r"totalPages\s*:", "totalPages (in pagination object)", "pages"),
|
||
]
|
||
|
||
# Find the pagination object definition and check its properties
|
||
in_pagination_block = False
|
||
brace_count = 0
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Detect start of pagination object
|
||
if "pagination:" in line and "{" in line:
|
||
in_pagination_block = True
|
||
brace_count = line.count("{") - line.count("}")
|
||
continue
|
||
|
||
if in_pagination_block:
|
||
brace_count += line.count("{") - line.count("}")
|
||
|
||
# Check for wrong property names only inside the pagination block
|
||
for pattern, wrong_name, correct_name in wrong_patterns:
|
||
if re.search(pattern, line):
|
||
self._add_violation(
|
||
rule_id="JS-011",
|
||
rule_name="Use standard pagination structure",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Use '{correct_name}' instead of '{wrong_name}' in pagination",
|
||
context=line.strip()[:80],
|
||
suggestion=f"Rename '{wrong_name}' to '{correct_name}'",
|
||
)
|
||
|
||
# Exit pagination block when braces close
|
||
if brace_count <= 0:
|
||
in_pagination_block = False
|
||
|
||
def _check_api_prefix_usage(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-012: Check that apiClient calls don't include /api/v1 prefix
|
||
(apiClient adds this prefix automatically)
|
||
"""
|
||
# Skip excluded files
|
||
excluded_files = ["init-api-client.js", "api-client.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith(("//", "*")):
|
||
continue
|
||
|
||
# Check for apiClient calls with /api/v1 prefix
|
||
if re.search(r"apiClient\.(get|post|put|delete|patch)\s*\(\s*['\"`]/api/v1", line):
|
||
self._add_violation(
|
||
rule_id="JS-012",
|
||
rule_name="Do not include /api/v1 prefix",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="apiClient already adds /api/v1 prefix - remove it from the endpoint",
|
||
context=line.strip()[:80],
|
||
suggestion="Change '/api/v1/admin/...' to '/admin/...'",
|
||
)
|
||
|
||
def _check_parent_init_call(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-013: Check that store components overriding init() call parent init.
|
||
|
||
When a component uses ...data() to inherit base layout functionality AND
|
||
defines its own init() method, it MUST call the parent init first to set
|
||
critical properties like storeCode.
|
||
|
||
Note: This only applies to store JS files because the store data() has
|
||
an init() method that extracts storeCode from URL. Admin data() does not.
|
||
"""
|
||
# Only check store JS files (admin data() doesn't have init())
|
||
if "/store/js/" not in str(file_path):
|
||
return
|
||
|
||
# Skip files that shouldn't have this pattern
|
||
excluded_files = ["init-alpine.js", "login.js", "onboarding.js"]
|
||
if file_path.name in excluded_files:
|
||
return
|
||
|
||
# Check if file uses ...data() spread operator
|
||
uses_data_spread = "...data()" in content
|
||
|
||
# Check if file defines its own async init()
|
||
has_own_init = re.search(r"async\s+init\s*\(\s*\)", content)
|
||
|
||
# If both conditions are true, check for parent init call
|
||
if uses_data_spread and has_own_init:
|
||
# Check for parent init call patterns
|
||
calls_parent_init = (
|
||
"data().init" in content
|
||
or "parentInit" in content
|
||
or "parent.init" in content
|
||
)
|
||
|
||
if not calls_parent_init:
|
||
# Find the line with async init() to report
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(r"async\s+init\s*\(\s*\)", line):
|
||
self._add_violation(
|
||
rule_id="JS-013",
|
||
rule_name="Components overriding init() must call parent init",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Component with ...data() must call parent init() to set storeCode",
|
||
context=line.strip()[:80],
|
||
suggestion="Add: const parentInit = data().init; if (parentInit) { await parentInit.call(this); }",
|
||
)
|
||
break
|
||
|
||
def _check_store_api_paths(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""
|
||
JS-014: Check that store API calls don't include storeCode in path.
|
||
|
||
Store API endpoints use JWT token authentication, NOT URL path parameters.
|
||
The storeCode is only used for page URLs (navigation), not API calls.
|
||
|
||
Incorrect: apiClient.get(`/store/${this.storeCode}/orders`)
|
||
Correct: apiClient.get(`/store/orders`)
|
||
|
||
Exceptions (these DO use storeCode in path):
|
||
- /store/{store_code} (public store info)
|
||
- /store/{store_code}/content-pages (public content)
|
||
"""
|
||
# Only check store JS files
|
||
if "/store/js/" not in str(file_path):
|
||
return
|
||
|
||
# Pattern to match apiClient calls with storeCode in the path
|
||
# Matches patterns like:
|
||
# apiClient.get(`/store/${this.storeCode}/
|
||
# apiClient.post(`/store/${storeCode}/
|
||
# apiClient.put(`/store/${this.storeCode}/
|
||
# apiClient.delete(`/store/${this.storeCode}/
|
||
pattern = r"apiClient\.(get|post|put|delete|patch)\s*\(\s*[`'\"]\/store\/\$\{(?:this\.)?storeCode\}\/"
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
if re.search(pattern, line):
|
||
# Check if this is an allowed exception
|
||
# content-pages uses storeCode for public content access
|
||
is_exception = (
|
||
"/content-pages" in line
|
||
or "content-page" in file_path.name
|
||
)
|
||
|
||
if not is_exception:
|
||
self._add_violation(
|
||
rule_id="JS-014",
|
||
rule_name="Store API calls must not include storeCode in path",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Store API endpoints use JWT authentication, not URL path parameters",
|
||
context=line.strip()[:100],
|
||
suggestion="Remove storeCode from path: /store/orders instead of /store/${this.storeCode}/orders",
|
||
)
|
||
|
||
def _validate_templates(self, target_path: Path):
|
||
"""Validate template patterns"""
|
||
print("📄 Validating templates...")
|
||
|
||
# Include admin, store, and shop templates
|
||
# Also include self-contained module templates
|
||
template_files = (
|
||
list(target_path.glob("app/templates/admin/**/*.html")) +
|
||
list(target_path.glob("app/templates/store/**/*.html")) +
|
||
list(target_path.glob("app/templates/shop/**/*.html")) +
|
||
list(target_path.glob("app/modules/*/templates/*/admin/**/*.html")) +
|
||
list(target_path.glob("app/modules/*/templates/*/store/**/*.html"))
|
||
)
|
||
self.result.files_checked += len(template_files)
|
||
|
||
# TPL-001 exclusion patterns
|
||
tpl_001_exclusions = [
|
||
"login.html", # Standalone login page
|
||
"errors/", # Error pages extend errors/base.html
|
||
"test-", # Test templates
|
||
]
|
||
|
||
for file_path in template_files:
|
||
file_path_str = str(file_path)
|
||
|
||
# Skip base template and partials
|
||
is_base_or_partial = (
|
||
"base.html" in file_path.name or "partials" in file_path_str
|
||
)
|
||
|
||
# Skip macros directory for FE rules
|
||
is_macro = (
|
||
"shared/macros/" in file_path_str or "shared\\macros\\" in file_path_str
|
||
)
|
||
|
||
# Skip components showcase page
|
||
is_components_page = "components.html" in file_path.name
|
||
|
||
# Determine template type
|
||
is_admin = "/admin/" in file_path_str or "\\admin\\" in file_path_str
|
||
is_store = "/store/" in file_path_str or "\\store\\" in file_path_str
|
||
is_shop = "/shop/" in file_path_str or "\\shop\\" in file_path_str
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# FE-001: Check for inline pagination (should use macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_pagination_macro_usage(file_path, content, lines)
|
||
|
||
# FE-002: Check for inline SVGs (should use $icon())
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_icon_helper_usage(file_path, content, lines)
|
||
|
||
# FE-003: Check for inline loading/error states (should use alerts macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_alerts_macro_usage(file_path, content, lines)
|
||
|
||
# FE-004: Check for inline modals (should use modals macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_modals_macro_usage(file_path, content, lines)
|
||
|
||
# FE-005: Check for inline table wrappers (should use tables macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_tables_macro_usage(file_path, content, lines)
|
||
|
||
# FE-006: Check for inline dropdowns (should use dropdowns macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_dropdowns_macro_usage(file_path, content, lines)
|
||
|
||
# FE-007: Check for inline page headers (should use headers macro)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_headers_macro_usage(file_path, content, lines)
|
||
|
||
# FE-008: Check for raw number inputs (should use number_stepper)
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
self._check_number_stepper_macro_usage(file_path, content, lines)
|
||
|
||
# TPL-008: Check for call table_header() pattern
|
||
self._check_table_header_call_pattern(file_path, content, lines)
|
||
|
||
# TPL-009: Check for invalid block names
|
||
if not is_base_or_partial:
|
||
self._check_valid_block_names(file_path, content, lines)
|
||
|
||
# TPL-010: Check Alpine variables are defined in JS
|
||
if not is_base_or_partial and not is_macro and not is_components_page:
|
||
# Try to find corresponding JS file based on template type
|
||
# Template: app/templates/admin/messages.html -> JS: static/admin/js/messages.js
|
||
# Template: app/templates/store/analytics.html -> JS: static/store/js/analytics.js
|
||
template_name = file_path.stem # e.g., "messages"
|
||
if is_admin:
|
||
js_dir = "admin"
|
||
elif is_store:
|
||
js_dir = "store"
|
||
elif is_shop:
|
||
js_dir = "shop"
|
||
else:
|
||
js_dir = None
|
||
|
||
if js_dir:
|
||
# Check standard static path first
|
||
js_file = target_path / f"static/{js_dir}/js/{template_name}.js"
|
||
|
||
# For module templates, check module static path
|
||
# e.g., app/modules/cms/templates/cms/admin/content-pages.html
|
||
# -> app/modules/cms/static/admin/js/content-pages.js
|
||
if not js_file.exists() and "/modules/" in file_path_str:
|
||
# Extract module name from path
|
||
parts = file_path_str.split("/modules/")
|
||
if len(parts) > 1:
|
||
module_part = parts[1].split("/")[0] # e.g., "cms"
|
||
js_file = target_path / f"app/modules/{module_part}/static/{js_dir}/js/{template_name}.js"
|
||
|
||
if js_file.exists():
|
||
js_content = js_file.read_text()
|
||
self._check_alpine_template_vars(file_path, content, lines, js_content)
|
||
|
||
# TPL-011: Check for deprecated macros
|
||
self._check_deprecated_macros(file_path, content, lines)
|
||
|
||
# TPL-012: Check for escaped quotes in Alpine template literals
|
||
self._check_escaped_quotes_in_alpine(file_path, content, lines)
|
||
|
||
# TPL-013: Check for old pagination macro API
|
||
self._check_pagination_macro_api(file_path, content, lines)
|
||
|
||
# TPL-014: Check for old modal_simple macro API
|
||
self._check_modal_simple_macro_api(file_path, content, lines)
|
||
|
||
# TPL-015: Check for old page_header macro API
|
||
self._check_page_header_macro_api(file_path, content, lines)
|
||
|
||
# Skip base/partials for TPL-001 check
|
||
if is_base_or_partial:
|
||
continue
|
||
|
||
# Check exclusion patterns for TPL-001
|
||
skip = False
|
||
for exclusion in tpl_001_exclusions:
|
||
if exclusion in file_path_str:
|
||
skip = True
|
||
break
|
||
if skip:
|
||
continue
|
||
|
||
# Check for standalone marker in template (first 5 lines)
|
||
first_lines = "\n".join(lines[:5]).lower()
|
||
if "standalone" in first_lines or "noqa: tpl-001" in first_lines:
|
||
continue
|
||
|
||
# TPL-001: Check for extends (template-type specific)
|
||
if is_admin:
|
||
expected_base = "admin/base.html"
|
||
rule_id = "TPL-001"
|
||
elif is_store:
|
||
expected_base = "store/base.html"
|
||
rule_id = "TPL-001"
|
||
elif is_shop:
|
||
expected_base = "shop/base.html"
|
||
rule_id = "TPL-001"
|
||
else:
|
||
continue # Skip unknown template types
|
||
|
||
has_extends = any(
|
||
"{% extends" in line and expected_base in line for line in lines
|
||
)
|
||
|
||
if not has_extends:
|
||
template_type = "Admin" if is_admin else "Store" if is_store else "Shop"
|
||
self._add_violation(
|
||
rule_id=rule_id,
|
||
rule_name="Templates must extend base",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message=f"{template_type} template does not extend {expected_base}",
|
||
context=file_path.name,
|
||
suggestion=f"Add {{% extends '{expected_base}' %}} at the top, or add {{# standalone #}} if intentional",
|
||
)
|
||
|
||
# =========================================================================
|
||
# LANGUAGE/I18N VALIDATION
|
||
# =========================================================================
|
||
|
||
def _validate_language_rules(self, target_path: Path):
|
||
"""Validate language/i18n patterns"""
|
||
print("\n🌐 Validating language/i18n rules...")
|
||
|
||
# LANG-001: Check for invalid language codes in Python files
|
||
self._check_language_codes(target_path)
|
||
|
||
# LANG-002, LANG-003: Check inline Alpine.js and tojson|safe usage
|
||
self._check_template_language_inline_patterns(target_path)
|
||
|
||
# LANG-004: Check language selector function exists in JS files
|
||
self._check_language_selector_function(target_path)
|
||
|
||
# LANG-005, LANG-006, LANG-008: Check language names, flag codes, and API usage in JS files
|
||
self._check_language_js_patterns(target_path)
|
||
|
||
# LANG-007, LANG-009: Check language patterns in templates
|
||
self._check_language_template_patterns(target_path)
|
||
|
||
# LANG-010: Check translation files are valid JSON
|
||
self._check_translation_files(target_path)
|
||
|
||
def _check_language_codes(self, target_path: Path):
|
||
"""LANG-001: Check for invalid language codes in Python files"""
|
||
py_files = list(target_path.glob("app/**/*.py"))
|
||
py_files.extend(list(target_path.glob("models/**/*.py")))
|
||
|
||
invalid_codes = [
|
||
("'english'", "'en'"),
|
||
("'french'", "'fr'"),
|
||
("'german'", "'de'"),
|
||
("'luxembourgish'", "'lb'"),
|
||
("'lux'", "'lb'"),
|
||
('"english"', '"en"'),
|
||
('"french"', '"fr"'),
|
||
('"german"', '"de"'),
|
||
('"luxembourgish"', '"lb"'),
|
||
('"lux"', '"lb"'),
|
||
]
|
||
|
||
for file_path in py_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
try:
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# Track if we're inside LANGUAGE_NAMES dicts or SUPPORTED_LANGUAGES lists
|
||
# (allowed to use language names as display values)
|
||
in_language_names_block = False
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Track LANGUAGE_NAMES, LANGUAGE_NAMES_EN, or SUPPORTED_LANGUAGES/LOCALES blocks
|
||
# - name values are allowed in these structures
|
||
if any(
|
||
name in line
|
||
for name in [
|
||
"LANGUAGE_NAMES",
|
||
"LANGUAGE_NAMES_EN",
|
||
"SUPPORTED_LANGUAGES",
|
||
"SUPPORTED_LOCALES",
|
||
]
|
||
) and "=" in line:
|
||
in_language_names_block = True
|
||
if in_language_names_block and stripped in ("}", "]"):
|
||
in_language_names_block = False
|
||
continue
|
||
if in_language_names_block:
|
||
continue
|
||
|
||
for wrong, correct in invalid_codes:
|
||
if wrong in line.lower():
|
||
self._add_violation(
|
||
rule_id="LANG-001",
|
||
rule_name="Only use supported language codes",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message=f"Invalid language code '{wrong}' - use ISO code instead",
|
||
context=stripped[:80],
|
||
suggestion=f"Change to: {correct}",
|
||
)
|
||
except Exception:
|
||
pass # Skip files that can't be read
|
||
|
||
def _check_template_language_inline_patterns(self, target_path: Path):
|
||
"""LANG-002, LANG-003: Check inline Alpine.js and tojson|safe usage in templates"""
|
||
template_files = list(target_path.glob("app/templates/**/*.html"))
|
||
template_files += list(target_path.glob("app/modules/*/templates/**/*.html"))
|
||
|
||
for file_path in template_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
file_path_str = str(file_path)
|
||
|
||
# Skip partials/header.html which may use hardcoded arrays (allowed)
|
||
if "partials/header.html" in file_path_str:
|
||
continue
|
||
|
||
try:
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# LANG-002: Check for inline complex x-data with language-related properties
|
||
if 'x-data="{' in line or "x-data='{" in line:
|
||
# Check next few lines for language-related properties
|
||
context_lines = "\n".join(lines[i - 1 : i + 10])
|
||
if (
|
||
"languages:" in context_lines
|
||
or "languageNames:" in context_lines
|
||
or "languageFlags:" in context_lines
|
||
):
|
||
# Check if it's using a function call (allowed)
|
||
if "languageSelector(" not in context_lines:
|
||
self._add_violation(
|
||
rule_id="LANG-002",
|
||
rule_name="Never use inline Alpine.js x-data for language selector",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Complex language selector should use languageSelector() function",
|
||
context=line.strip()[:80],
|
||
suggestion="Use: x-data='languageSelector(\"{{ lang }}\", {{ langs|tojson }})'",
|
||
)
|
||
|
||
# LANG-003: Check for double-quoted x-data with tojson (breaks parsing)
|
||
# Double quotes + tojson outputs raw JSON double quotes that break HTML
|
||
# Use single quotes for x-data when using tojson
|
||
if 'x-data="' in line and "|tojson" in line:
|
||
self._add_violation(
|
||
rule_id="LANG-003",
|
||
rule_name="Use single quotes for x-data with tojson",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Double-quoted x-data with |tojson breaks HTML (JSON has double quotes)",
|
||
context=line.strip()[:80],
|
||
suggestion="Use single quotes: x-data='func({{ data|tojson }})'",
|
||
)
|
||
except Exception:
|
||
pass # Skip files that can't be read
|
||
|
||
def _check_language_selector_function(self, target_path: Path):
|
||
"""LANG-004: Check that languageSelector function exists and is exported"""
|
||
required_files = [
|
||
target_path / "static/shop/js/shop-layout.js",
|
||
target_path / "static/store/js/init-alpine.js",
|
||
]
|
||
|
||
for file_path in required_files:
|
||
if not file_path.exists():
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
|
||
# Check for function definition
|
||
has_function = "function languageSelector" in content
|
||
has_export = "window.languageSelector" in content
|
||
|
||
if not has_function:
|
||
self._add_violation(
|
||
rule_id="LANG-004",
|
||
rule_name="Language selector function must be defined",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Missing languageSelector function definition",
|
||
context=file_path.name,
|
||
suggestion="Add: function languageSelector(currentLang, enabledLanguages) { return {...}; }",
|
||
)
|
||
|
||
if has_function and not has_export:
|
||
self._add_violation(
|
||
rule_id="LANG-004",
|
||
rule_name="Language selector function must be exported",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="languageSelector function not exported to window",
|
||
context=file_path.name,
|
||
suggestion="Add: window.languageSelector = languageSelector;",
|
||
)
|
||
|
||
def _check_language_js_patterns(self, target_path: Path):
|
||
"""LANG-005, LANG-006: Check language names and flag codes"""
|
||
js_files = list(target_path.glob("static/**/js/**/*.js"))
|
||
|
||
for file_path in js_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments
|
||
stripped = line.strip()
|
||
if stripped.startswith(("//", "/*")):
|
||
continue
|
||
|
||
# LANG-005: Check for English language names instead of native
|
||
english_names = [
|
||
("'fr': 'French'", "'fr': 'Français'"),
|
||
("'de': 'German'", "'de': 'Deutsch'"),
|
||
("'lb': 'Luxembourgish'", "'lb': 'Lëtzebuergesch'"),
|
||
('"fr": "French"', '"fr": "Français"'),
|
||
('"de": "German"', '"de": "Deutsch"'),
|
||
('"lb": "Luxembourgish"', '"lb": "Lëtzebuergesch"'),
|
||
]
|
||
|
||
for wrong, correct in english_names:
|
||
if wrong.lower().replace(" ", "") in line.lower().replace(" ", ""):
|
||
self._add_violation(
|
||
rule_id="LANG-005",
|
||
rule_name="Use native language names",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Use native language name instead of English",
|
||
context=stripped[:80],
|
||
suggestion=f"Change to: {correct}",
|
||
)
|
||
|
||
# LANG-006: Check for incorrect flag codes
|
||
wrong_flags = [
|
||
("'en': 'us'", "'en': 'gb'"),
|
||
("'en': 'en'", "'en': 'gb'"),
|
||
("'lb': 'lb'", "'lb': 'lu'"),
|
||
('"en": "us"', '"en": "gb"'),
|
||
('"en": "en"', '"en": "gb"'),
|
||
('"lb": "lb"', '"lb": "lu"'),
|
||
]
|
||
|
||
for wrong, correct in wrong_flags:
|
||
if wrong.lower().replace(" ", "") in line.lower().replace(" ", ""):
|
||
self._add_violation(
|
||
rule_id="LANG-006",
|
||
rule_name="Use correct flag codes",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Invalid flag code mapping",
|
||
context=stripped[:80],
|
||
suggestion=f"Change to: {correct}",
|
||
)
|
||
|
||
# LANG-008: Check for wrong API method (GET instead of POST)
|
||
if "/language/set" in line:
|
||
if "method: 'GET'" in line or 'method: "GET"' in line:
|
||
self._add_violation(
|
||
rule_id="LANG-008",
|
||
rule_name="Language API endpoint must use POST method",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Language set endpoint must use POST, not GET",
|
||
context=stripped[:80],
|
||
suggestion="Change to: method: 'POST'",
|
||
)
|
||
if "/language/set?" in line:
|
||
self._add_violation(
|
||
rule_id="LANG-008",
|
||
rule_name="Language API endpoint must use POST method",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Language set endpoint must use POST with body, not GET with query params",
|
||
context=stripped[:80],
|
||
suggestion="Use POST with JSON body: { language: lang }",
|
||
)
|
||
|
||
def _check_language_template_patterns(self, target_path: Path):
|
||
"""LANG-007, LANG-009: Check language patterns in templates"""
|
||
template_files = list(target_path.glob("app/templates/**/*.html"))
|
||
|
||
for file_path in template_files:
|
||
if self._should_ignore_file(file_path):
|
||
continue
|
||
|
||
file_path_str = str(file_path)
|
||
is_shop = "/shop/" in file_path_str or "\\shop\\" in file_path_str
|
||
|
||
content = file_path.read_text()
|
||
lines = content.split("\n")
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# LANG-009: Check for request.state.language without default
|
||
if "request.state.language" in line:
|
||
# Check if it has |default
|
||
if "request.state.language" in line and "|default" not in line:
|
||
# Make sure it's not just part of a longer expression
|
||
if re.search(r'request\.state\.language[\'"\s\}\)]', line):
|
||
self._add_violation(
|
||
rule_id="LANG-009",
|
||
rule_name="Always provide language default",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="request.state.language used without default",
|
||
context=line.strip()[:80],
|
||
suggestion='Use: request.state.language|default("fr")',
|
||
)
|
||
|
||
# LANG-007: Shop templates must use store.storefront_languages
|
||
if is_shop and "languageSelector" in content:
|
||
if "store.storefront_languages" not in content:
|
||
# Check if file has any language selector
|
||
if "enabled_langs" in content or "languages" in content:
|
||
self._add_violation(
|
||
rule_id="LANG-007",
|
||
rule_name="Storefront must respect store languages",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Shop template should use store.storefront_languages",
|
||
context=file_path.name,
|
||
suggestion="Use: {% set enabled_langs = store.storefront_languages if store else ['fr', 'de', 'en'] %}",
|
||
)
|
||
|
||
def _check_translation_files(self, target_path: Path):
|
||
"""LANG-010: Check that translation files are valid JSON"""
|
||
import json
|
||
|
||
locales_dir = target_path / "static/locales"
|
||
if not locales_dir.exists():
|
||
return
|
||
|
||
required_files = ["en.json", "fr.json", "de.json", "lb.json"]
|
||
|
||
for filename in required_files:
|
||
file_path = locales_dir / filename
|
||
if not file_path.exists():
|
||
self._add_violation(
|
||
rule_id="LANG-010",
|
||
rule_name="Translation files required",
|
||
severity=Severity.ERROR,
|
||
file_path=locales_dir,
|
||
line_number=1,
|
||
message=f"Missing translation file: {filename}",
|
||
context=str(locales_dir),
|
||
suggestion=f"Create {filename} with all required translation keys",
|
||
)
|
||
continue
|
||
|
||
try:
|
||
with open(file_path) as f:
|
||
json.load(f)
|
||
except json.JSONDecodeError as e:
|
||
self._add_violation(
|
||
rule_id="LANG-010",
|
||
rule_name="Translation files must be valid JSON",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=e.lineno or 1,
|
||
message=f"Invalid JSON: {e.msg}",
|
||
context=str(e),
|
||
suggestion="Fix JSON syntax error (check for trailing commas, missing quotes)",
|
||
)
|
||
|
||
def _check_migration_batch_mode(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""MIG-001: Check that alter_column, drop_constraint, create_foreign_key use batch mode"""
|
||
# Track if we're inside a batch_alter_table context
|
||
in_batch_context = False
|
||
batch_indent = 0
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
stripped = line.strip()
|
||
|
||
# Track batch_alter_table context entry
|
||
if "batch_alter_table(" in line or "with op.batch_alter_table" in line:
|
||
in_batch_context = True
|
||
# Get indent level of the 'with' statement
|
||
batch_indent = len(line) - len(line.lstrip())
|
||
continue
|
||
|
||
# Track batch_alter_table context exit (dedent)
|
||
if in_batch_context and stripped and not stripped.startswith("#"):
|
||
current_indent = len(line) - len(line.lstrip())
|
||
# If we're back at or before the 'with' indent level, we've exited
|
||
if current_indent <= batch_indent and not line.strip().startswith(
|
||
"with"
|
||
):
|
||
in_batch_context = False
|
||
|
||
# Skip comments
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Check for direct op.alter_column (not batch_op.alter_column)
|
||
if re.search(r"\bop\.alter_column\(", line):
|
||
self._add_violation(
|
||
rule_id="MIG-001",
|
||
rule_name="Use batch_alter_table for column modifications",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="op.alter_column() not supported by SQLite - use batch mode",
|
||
context=stripped[:100],
|
||
suggestion="Use: with op.batch_alter_table('table') as batch_op: batch_op.alter_column(...)",
|
||
)
|
||
|
||
# Check for direct op.drop_constraint (not batch_op.drop_constraint)
|
||
if re.search(r"\bop\.drop_constraint\(", line):
|
||
self._add_violation(
|
||
rule_id="MIG-001",
|
||
rule_name="Use batch_alter_table for constraint modifications",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="op.drop_constraint() not supported by SQLite - use batch mode",
|
||
context=stripped[:100],
|
||
suggestion="Use: with op.batch_alter_table('table') as batch_op: batch_op.drop_constraint(...)",
|
||
)
|
||
|
||
# Check for direct op.create_foreign_key (not batch_op.create_foreign_key)
|
||
if re.search(r"\bop\.create_foreign_key\(", line):
|
||
self._add_violation(
|
||
rule_id="MIG-001",
|
||
rule_name="Use batch_alter_table for foreign key creation",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="op.create_foreign_key() not supported by SQLite - use batch mode",
|
||
context=stripped[:100],
|
||
suggestion="Use: with op.batch_alter_table('table') as batch_op: batch_op.create_foreign_key(...)",
|
||
)
|
||
|
||
def _check_migration_constraint_names(
|
||
self, file_path: Path, content: str, lines: list[str]
|
||
):
|
||
"""MIG-002: Check that constraints have explicit names (not None)"""
|
||
for i, line in enumerate(lines, 1):
|
||
stripped = line.strip()
|
||
|
||
# Skip comments
|
||
if stripped.startswith("#"):
|
||
continue
|
||
|
||
# Check for create_foreign_key(None, ...)
|
||
if re.search(r"create_foreign_key\s*\(\s*None\s*,", line):
|
||
self._add_violation(
|
||
rule_id="MIG-002",
|
||
rule_name="Constraints must have explicit names",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Foreign key constraint must have an explicit name, not None",
|
||
context=stripped[:100],
|
||
suggestion="Use: create_foreign_key('fk_table_column', ...)",
|
||
)
|
||
|
||
# Check for create_unique_constraint(None, ...)
|
||
if re.search(r"create_unique_constraint\s*\(\s*None\s*,", line):
|
||
self._add_violation(
|
||
rule_id="MIG-002",
|
||
rule_name="Constraints must have explicit names",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Unique constraint must have an explicit name, not None",
|
||
context=stripped[:100],
|
||
suggestion="Use: create_unique_constraint('uq_table_columns', ...)",
|
||
)
|
||
|
||
# Check for drop_constraint(None, ...)
|
||
if re.search(r"drop_constraint\s*\(\s*None\s*,", line):
|
||
self._add_violation(
|
||
rule_id="MIG-002",
|
||
rule_name="Constraints must have explicit names",
|
||
severity=Severity.ERROR,
|
||
file_path=file_path,
|
||
line_number=i,
|
||
message="Cannot drop constraint with None name",
|
||
context=stripped[:100],
|
||
suggestion="Specify the constraint name to drop",
|
||
)
|
||
|
||
def _validate_modules(self, target_path: Path):
|
||
"""Validate module structure rules (MOD-001 to MOD-012)"""
|
||
print("📦 Validating module structure...")
|
||
|
||
modules_path = target_path / "app" / "modules"
|
||
if not modules_path.exists():
|
||
return
|
||
|
||
# Get all module directories (excluding __pycache__ and special files)
|
||
module_dirs = [
|
||
d for d in modules_path.iterdir()
|
||
if d.is_dir() and d.name != "__pycache__" and not d.name.startswith(".")
|
||
]
|
||
|
||
for module_dir in module_dirs:
|
||
module_name = module_dir.name
|
||
definition_file = module_dir / "definition.py"
|
||
init_file = module_dir / "__init__.py"
|
||
|
||
# MOD-009: Check for definition.py (required for auto-discovery)
|
||
if not definition_file.exists():
|
||
# Only report if it looks like a module (has __init__.py or other .py files)
|
||
has_py_files = any(f.suffix == ".py" for f in module_dir.iterdir() if f.is_file())
|
||
if has_py_files or init_file.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-009",
|
||
rule_name="Module must have definition.py for auto-discovery",
|
||
severity=Severity.ERROR,
|
||
file_path=module_dir / "__init__.py" if init_file.exists() else module_dir,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' missing definition.py - won't be auto-discovered",
|
||
context="Module directory exists but no definition.py",
|
||
suggestion="Create definition.py with ModuleDefinition instance",
|
||
)
|
||
continue
|
||
|
||
# Check for __init__.py
|
||
if not init_file.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-009",
|
||
rule_name="Module must have definition.py for auto-discovery",
|
||
severity=Severity.ERROR,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' missing __init__.py",
|
||
context="definition.py exists but no __init__.py",
|
||
suggestion="Create __init__.py to make module importable",
|
||
)
|
||
|
||
# Read definition to check if is_self_contained
|
||
definition_content = definition_file.read_text()
|
||
|
||
is_self_contained = "is_self_contained=True" in definition_content or "is_self_contained = True" in definition_content
|
||
|
||
if not is_self_contained:
|
||
continue
|
||
|
||
# MOD-001: Check required directories for self-contained modules
|
||
required_dirs = ["services", "models", "schemas", "routes"]
|
||
for req_dir in required_dirs:
|
||
dir_path = module_dir / req_dir
|
||
if not dir_path.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-001",
|
||
rule_name="Self-contained modules must have required directories",
|
||
severity=Severity.ERROR,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' is self-contained but missing '{req_dir}/' directory",
|
||
context="is_self_contained=True",
|
||
suggestion=f"Create '{req_dir}/' directory with __init__.py",
|
||
)
|
||
elif req_dir != "routes":
|
||
# Check for __init__.py
|
||
init_file = dir_path / "__init__.py"
|
||
if not init_file.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-001",
|
||
rule_name="Self-contained modules must have required directories",
|
||
severity=Severity.ERROR,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' missing '{req_dir}/__init__.py'",
|
||
context="is_self_contained=True",
|
||
suggestion=f"Create '{req_dir}/__init__.py' with exports",
|
||
)
|
||
|
||
# Check routes/api/ exists
|
||
routes_api_path = module_dir / "routes" / "api"
|
||
if not routes_api_path.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-001",
|
||
rule_name="Self-contained modules must have required directories",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' is self-contained but missing 'routes/api/' directory",
|
||
context="is_self_contained=True",
|
||
suggestion="Create 'routes/api/' directory for API endpoints",
|
||
)
|
||
|
||
# MOD-002: Check services contain actual code, not re-exports
|
||
services_dir = module_dir / "services"
|
||
if services_dir.exists():
|
||
self._check_module_contains_actual_code(
|
||
services_dir, module_name, "services", "MOD-002",
|
||
["from app.services.", "from app/services/"]
|
||
)
|
||
|
||
# MOD-003: Check schemas contain actual code, not re-exports
|
||
schemas_dir = module_dir / "schemas"
|
||
if schemas_dir.exists():
|
||
self._check_module_contains_actual_code(
|
||
schemas_dir, module_name, "schemas", "MOD-003",
|
||
["from models.schema.", "from models/schema/"]
|
||
)
|
||
|
||
# MOD-004: Check routes import from module, not legacy locations
|
||
routes_dir = module_dir / "routes"
|
||
if routes_dir.exists():
|
||
for route_file in routes_dir.rglob("*.py"):
|
||
if route_file.name == "__init__.py":
|
||
continue
|
||
content = route_file.read_text()
|
||
lines = content.split("\n")
|
||
for i, line in enumerate(lines, 1):
|
||
if "from app.services." in line:
|
||
# Skip if line has noqa comment
|
||
if "mod-004" in line.lower():
|
||
continue
|
||
self._add_violation(
|
||
rule_id="MOD-004",
|
||
rule_name="Module routes must use module-internal implementations",
|
||
severity=Severity.WARNING,
|
||
file_path=route_file,
|
||
line_number=i,
|
||
message="Route imports from legacy 'app.services' instead of module services",
|
||
context=line.strip()[:80],
|
||
suggestion=f"Import from 'app.modules.{module_name}.services' or '..services'",
|
||
)
|
||
|
||
# MOD-005: Check for templates/static if module has UI (menu_items)
|
||
has_menu_items = "menu_items" in definition_content and "FrontendType." in definition_content
|
||
# Check if menu_items has actual entries (not just empty dict)
|
||
has_actual_menu = (
|
||
has_menu_items and
|
||
not re.search(r"menu_items\s*=\s*\{\s*\}", definition_content) and
|
||
('"' in definition_content[definition_content.find("menu_items"):definition_content.find("menu_items")+200] if "menu_items" in definition_content else False)
|
||
)
|
||
|
||
if has_actual_menu:
|
||
templates_dir = module_dir / "templates"
|
||
static_dir = module_dir / "static"
|
||
|
||
if not templates_dir.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-005",
|
||
rule_name="Modules with UI must have templates and static directories",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' has menu_items but missing 'templates/' directory",
|
||
context="has_menu_items=True",
|
||
suggestion=f"Create 'templates/{module_name}/admin/' and/or 'templates/{module_name}/store/'",
|
||
)
|
||
|
||
if not static_dir.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-005",
|
||
rule_name="Modules with UI must have templates and static directories",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' has menu_items but missing 'static/' directory",
|
||
context="has_menu_items=True",
|
||
suggestion="Create 'static/admin/js/' and/or 'static/store/js/'",
|
||
)
|
||
|
||
# MOD-006: Check for locales (info level)
|
||
locales_dir = module_dir / "locales"
|
||
if not locales_dir.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-006",
|
||
rule_name="Module locales should exist for internationalization",
|
||
severity=Severity.INFO,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' is missing 'locales/' directory for translations",
|
||
context="is_self_contained=True",
|
||
suggestion="Create 'locales/' with en.json, de.json, fr.json, lb.json",
|
||
)
|
||
|
||
# MOD-008: Check for exceptions.py
|
||
exceptions_file = module_dir / "exceptions.py"
|
||
exceptions_pkg = module_dir / "exceptions" / "__init__.py"
|
||
if not exceptions_file.exists() and not exceptions_pkg.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-008",
|
||
rule_name="Self-contained modules must have exceptions.py",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' is missing 'exceptions.py' for module-specific exceptions",
|
||
context="is_self_contained=True",
|
||
suggestion="Create 'exceptions.py' with module-specific exception classes",
|
||
)
|
||
|
||
# MOD-010: Check route files export 'router' variable
|
||
self._check_route_exports(module_dir, module_name)
|
||
|
||
# MOD-011: Check tasks directory has __init__.py
|
||
tasks_dir = module_dir / "tasks"
|
||
if tasks_dir.exists():
|
||
tasks_init = tasks_dir / "__init__.py"
|
||
if not tasks_init.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-011",
|
||
rule_name="Module tasks must have __init__.py for Celery discovery",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' has tasks/ but missing tasks/__init__.py",
|
||
context="tasks/ directory exists",
|
||
suggestion="Create 'tasks/__init__.py' for Celery auto-discovery",
|
||
)
|
||
|
||
# MOD-012: Check locales has all language files
|
||
if locales_dir.exists():
|
||
required_langs = ["en.json", "de.json", "fr.json", "lb.json"]
|
||
missing_langs = [lang for lang in required_langs if not (locales_dir / lang).exists()]
|
||
if missing_langs:
|
||
self._add_violation(
|
||
rule_id="MOD-012",
|
||
rule_name="Module locales should have all supported language files",
|
||
severity=Severity.INFO,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' missing locale files: {', '.join(missing_langs)}",
|
||
context="locales/ directory exists",
|
||
suggestion=f"Add translation files: {', '.join(missing_langs)}",
|
||
)
|
||
|
||
# MOD-007: Validate definition paths match directory structure
|
||
self._validate_module_definition_paths(module_dir, module_name, definition_file, definition_content)
|
||
|
||
def _check_module_contains_actual_code(
|
||
self,
|
||
dir_path: Path,
|
||
module_name: str,
|
||
dir_type: str,
|
||
rule_id: str,
|
||
anti_patterns: list[str]
|
||
):
|
||
"""Check if module directory contains actual code vs re-exports"""
|
||
py_files = list(dir_path.glob("*.py"))
|
||
|
||
# Track if we found any file with actual code (not just re-exports)
|
||
|
||
for py_file in py_files:
|
||
if py_file.name == "__init__.py":
|
||
continue
|
||
|
||
content = py_file.read_text()
|
||
lines = content.split("\n")
|
||
|
||
# Check if file has actual class/function definitions
|
||
has_definitions = any(
|
||
re.match(r"^(class|def|async def)\s+\w+", line)
|
||
for line in lines
|
||
)
|
||
|
||
# Check if file only has re-exports
|
||
has_reexport = any(
|
||
any(pattern in line for pattern in anti_patterns)
|
||
for line in lines
|
||
)
|
||
|
||
if has_definitions and not has_reexport:
|
||
pass
|
||
elif has_reexport and not has_definitions:
|
||
# File is a re-export only
|
||
for i, line in enumerate(lines, 1):
|
||
for pattern in anti_patterns:
|
||
if pattern in line:
|
||
self._add_violation(
|
||
rule_id=rule_id,
|
||
rule_name=f"Module {dir_type} must contain actual code, not re-exports",
|
||
severity=Severity.WARNING,
|
||
file_path=py_file,
|
||
line_number=i,
|
||
message="File re-exports from legacy location instead of containing actual code",
|
||
context=line.strip()[:80],
|
||
suggestion="Move actual code to this file and update legacy to re-export from here",
|
||
)
|
||
break
|
||
|
||
def _check_route_exports(self, module_dir: Path, module_name: str):
|
||
"""MOD-010: Check that route files export 'router' variable for auto-discovery."""
|
||
routes_dir = module_dir / "routes"
|
||
if not routes_dir.exists():
|
||
return
|
||
|
||
# Check api and pages subdirectories
|
||
for route_type in ["api", "pages"]:
|
||
type_dir = routes_dir / route_type
|
||
if not type_dir.exists():
|
||
continue
|
||
|
||
for route_file in ["admin.py", "store.py", "shop.py"]:
|
||
file_path = type_dir / route_file
|
||
if not file_path.exists():
|
||
continue
|
||
|
||
content = file_path.read_text()
|
||
|
||
# Check for 'router' variable definition
|
||
has_router = (
|
||
"router = APIRouter" in content or
|
||
"router: APIRouter" in content or
|
||
"\nrouter = " in content
|
||
)
|
||
|
||
if not has_router:
|
||
self._add_violation(
|
||
rule_id="MOD-010",
|
||
rule_name="Module routes must export router variable for auto-discovery",
|
||
severity=Severity.WARNING,
|
||
file_path=file_path,
|
||
line_number=1,
|
||
message="Route file missing 'router' variable for auto-discovery",
|
||
context=f"routes/{route_type}/{route_file}",
|
||
suggestion="Add: router = APIRouter() and use @router.get/post decorators",
|
||
)
|
||
|
||
def _validate_module_definition_paths(
|
||
self,
|
||
module_dir: Path,
|
||
module_name: str,
|
||
definition_file: Path,
|
||
definition_content: str
|
||
):
|
||
"""MOD-007: Validate that paths in definition match actual directories"""
|
||
path_checks = [
|
||
("services_path", "services/__init__.py"),
|
||
("models_path", "models/__init__.py"),
|
||
("schemas_path", "schemas/__init__.py"),
|
||
("templates_path", "templates"),
|
||
("locales_path", "locales"),
|
||
]
|
||
|
||
for path_attr, expected_path in path_checks:
|
||
# Check if path is defined in definition
|
||
if f'{path_attr}="' in definition_content or f"{path_attr}='" in definition_content:
|
||
expected_full_path = module_dir / expected_path
|
||
if not expected_full_path.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-007",
|
||
rule_name="Module definition must match directory structure",
|
||
severity=Severity.ERROR,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module definition specifies '{path_attr}' but '{expected_path}' does not exist",
|
||
context=f"{path_attr}=app.modules.{module_name}.*",
|
||
suggestion=f"Create '{expected_path}' or remove '{path_attr}' from definition",
|
||
)
|
||
|
||
# Check exceptions_path specially (can be .py or package)
|
||
if 'exceptions_path="' in definition_content or "exceptions_path='" in definition_content:
|
||
exceptions_file = module_dir / "exceptions.py"
|
||
exceptions_pkg = module_dir / "exceptions" / "__init__.py"
|
||
if not exceptions_file.exists() and not exceptions_pkg.exists():
|
||
self._add_violation(
|
||
rule_id="MOD-007",
|
||
rule_name="Module definition must match directory structure",
|
||
severity=Severity.ERROR,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message="Module definition specifies 'exceptions_path' but no exceptions module exists",
|
||
context=f"exceptions_path=app.modules.{module_name}.exceptions",
|
||
suggestion="Create 'exceptions.py' or 'exceptions/__init__.py'",
|
||
)
|
||
|
||
# MOD-020: Check module definition has required attributes
|
||
self._validate_module_definition_completeness(module_dir, module_name, definition_file, definition_content)
|
||
|
||
def _validate_module_definition_completeness(
|
||
self,
|
||
module_dir: Path,
|
||
module_name: str,
|
||
definition_file: Path,
|
||
definition_content: str
|
||
):
|
||
"""
|
||
Validate module definition completeness (MOD-020 to MOD-023).
|
||
|
||
Checks:
|
||
- MOD-020: Required attributes (code, name, description, version, features, permissions)
|
||
- MOD-021: Modules with menus should have features
|
||
- MOD-022: Feature modules should have permissions
|
||
- MOD-023: Modules with routers should use get_*_with_routers pattern
|
||
"""
|
||
# Detect module characteristics
|
||
is_internal = "is_internal=True" in definition_content or "is_internal = True" in definition_content
|
||
is_core_infrastructure = (
|
||
("is_core=True" in definition_content or "is_core = True" in definition_content) and
|
||
("menu_items={}" in definition_content or "menu_items = {}" in definition_content or "menu_items" not in definition_content)
|
||
)
|
||
|
||
has_features = "features=[" in definition_content or "features = [" in definition_content
|
||
has_permissions = "permissions=[" in definition_content or "permissions = [" in definition_content
|
||
has_menus = "menus={" in definition_content or "menus = {" in definition_content
|
||
has_menu_items = (
|
||
"menu_items={" in definition_content and
|
||
not re.search(r"menu_items\s*=\s*\{\s*\}", definition_content)
|
||
)
|
||
|
||
# Check for router lazy import pattern
|
||
has_router_imports = "_get_admin_router" in definition_content or "_get_store_router" in definition_content
|
||
has_get_with_routers = re.search(r"def get_\w+_module_with_routers\s*\(", definition_content)
|
||
|
||
# MOD-020: Check required attributes
|
||
# Basic required attributes
|
||
required_attrs = ["code=", "name=", "description=", "version="]
|
||
for attr in required_attrs:
|
||
if attr not in definition_content:
|
||
attr_name = attr.replace("=", "")
|
||
self._add_violation(
|
||
rule_id="MOD-020",
|
||
rule_name="Module definition must have required attributes",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' missing required attribute '{attr_name}'",
|
||
context="ModuleDefinition()",
|
||
suggestion=f"Add '{attr_name}' to ModuleDefinition",
|
||
)
|
||
|
||
# Check features (unless infrastructure module)
|
||
if not has_features and not is_core_infrastructure:
|
||
self._add_violation(
|
||
rule_id="MOD-020",
|
||
rule_name="Module definition must have required attributes",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' missing 'features' list",
|
||
context="ModuleDefinition() without features",
|
||
suggestion="Add 'features=[...]' to describe module capabilities",
|
||
)
|
||
|
||
# MOD-021: Modules with menus should have features
|
||
if (has_menus or has_menu_items) and not has_features:
|
||
self._add_violation(
|
||
rule_id="MOD-021",
|
||
rule_name="Modules with menus should have features",
|
||
severity=Severity.WARNING,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' has menus but no features defined",
|
||
context="menus={...} without features=[...]",
|
||
suggestion="Add 'features=[...]' to describe what the module provides",
|
||
)
|
||
|
||
# MOD-022: Feature modules should have permissions
|
||
if has_features and not has_permissions and not is_internal:
|
||
# Check if it's a storefront-only module (cart, checkout)
|
||
is_storefront_only = module_name in ["cart", "checkout"]
|
||
|
||
if not is_storefront_only:
|
||
self._add_violation(
|
||
rule_id="MOD-022",
|
||
rule_name="Feature modules should have permissions",
|
||
severity=Severity.INFO,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' has features but no permissions defined",
|
||
context="features=[...] without permissions=[...]",
|
||
suggestion="Add 'permissions=[PermissionDefinition(...), ...]' for RBAC",
|
||
)
|
||
|
||
# MOD-023: Modules with routers should use get_*_with_routers pattern
|
||
if has_router_imports and not has_get_with_routers:
|
||
self._add_violation(
|
||
rule_id="MOD-023",
|
||
rule_name="Modules with routers should use get_*_with_routers pattern",
|
||
severity=Severity.INFO,
|
||
file_path=definition_file,
|
||
line_number=1,
|
||
message=f"Module '{module_name}' has router imports but no get_*_with_routers() function",
|
||
context="_get_admin_router() or _get_store_router() without wrapper",
|
||
suggestion=f"Add 'def get_{module_name}_module_with_routers()' function",
|
||
)
|
||
|
||
def _validate_cross_module_imports(self, target_path: Path):
|
||
"""
|
||
Validate cross-module import rules (IMPORT-001 to IMPORT-003).
|
||
|
||
Core architectural rule: Core modules NEVER import from optional modules.
|
||
This ensures optional modules are truly optional and can be disabled without breaking the app.
|
||
|
||
Rules:
|
||
- IMPORT-001: Core module imports from optional module (ERROR)
|
||
- IMPORT-002: Optional module imports from unrelated optional module (WARNING)
|
||
- IMPORT-003: Suggest using protocol pattern instead of direct import (INFO)
|
||
|
||
Uses provider patterns (MetricsProvider, WidgetProvider) for cross-module data.
|
||
See docs/architecture/cross-module-import-rules.md for details.
|
||
"""
|
||
print("🔗 Validating cross-module imports...")
|
||
|
||
modules_path = target_path / "app" / "modules"
|
||
if not modules_path.exists():
|
||
return
|
||
|
||
# Use class-level module definitions
|
||
CORE_MODULES = self.CORE_MODULES
|
||
OPTIONAL_MODULES = self.OPTIONAL_MODULES
|
||
|
||
# contracts module cannot import from any other module
|
||
CONTRACTS_FORBIDDEN_IMPORTS = CORE_MODULES | OPTIONAL_MODULES - {"contracts"}
|
||
|
||
# Check each module directory
|
||
module_dirs = [
|
||
d for d in modules_path.iterdir()
|
||
if d.is_dir() and d.name != "__pycache__" and not d.name.startswith(".")
|
||
]
|
||
|
||
for module_dir in module_dirs:
|
||
module_name = module_dir.name
|
||
|
||
# Skip if not a recognized module
|
||
if module_name not in CORE_MODULES and module_name not in OPTIONAL_MODULES:
|
||
continue
|
||
|
||
# Check all Python files in this module
|
||
for py_file in module_dir.rglob("*.py"):
|
||
if "__pycache__" in str(py_file):
|
||
continue
|
||
|
||
try:
|
||
content = py_file.read_text()
|
||
lines = content.split("\n")
|
||
except Exception:
|
||
continue
|
||
|
||
# Track if we're inside a docstring
|
||
in_docstring = False
|
||
docstring_delimiter = None
|
||
# Track if we're inside a TYPE_CHECKING block
|
||
in_type_checking = False
|
||
type_checking_indent = 0
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
# Skip comments and empty lines
|
||
stripped = line.strip()
|
||
if stripped.startswith("#") or not stripped:
|
||
continue
|
||
|
||
# Track docstring boundaries
|
||
# Count triple-quote occurrences to handle docstrings
|
||
if '"""' in line or "'''" in line:
|
||
# Check for triple quotes
|
||
for delim in ['"""', "'''"]:
|
||
count = line.count(delim)
|
||
if count == 1:
|
||
# Single delimiter - toggle docstring state
|
||
if not in_docstring:
|
||
in_docstring = True
|
||
docstring_delimiter = delim
|
||
elif docstring_delimiter == delim:
|
||
in_docstring = False
|
||
docstring_delimiter = None
|
||
elif count == 2:
|
||
# Two delimiters on same line (single-line docstring or open+close)
|
||
# This doesn't change state as it opens and closes on same line
|
||
pass
|
||
|
||
# Skip lines inside docstrings (documentation examples)
|
||
if in_docstring:
|
||
continue
|
||
|
||
# Track TYPE_CHECKING blocks
|
||
current_indent = len(line) - len(line.lstrip())
|
||
if "if TYPE_CHECKING:" in stripped or "if TYPE_CHECKING" in stripped:
|
||
in_type_checking = True
|
||
type_checking_indent = current_indent
|
||
continue
|
||
# Exit TYPE_CHECKING block when we see code at same or lower indentation
|
||
if in_type_checking and current_indent <= type_checking_indent and stripped:
|
||
in_type_checking = False
|
||
|
||
# Skip lines inside TYPE_CHECKING blocks (these are fine for type hints)
|
||
if in_type_checking:
|
||
continue
|
||
|
||
# Skip lazy imports inside try/except blocks
|
||
# These are the approved pattern for cross-module imports
|
||
# Check if line is indented (inside function) and look for nearby try:
|
||
if stripped.startswith(("from ", "import ")) and line.startswith(" "):
|
||
# Check previous few lines for try: at same or lower indentation
|
||
current_indent = len(line) - len(line.lstrip())
|
||
is_lazy_import = False
|
||
for lookback in range(1, min(10, i)):
|
||
prev_line = lines[i - lookback - 1]
|
||
prev_stripped = prev_line.strip()
|
||
prev_indent = len(prev_line) - len(prev_line.lstrip())
|
||
if prev_stripped.startswith("try:") and prev_indent < current_indent:
|
||
# This is a lazy import inside a try block - skip it
|
||
is_lazy_import = True
|
||
break
|
||
if is_lazy_import:
|
||
continue
|
||
|
||
# Look for import statements
|
||
import_match = re.match(
|
||
r"^\s*(?:from\s+(app\.modules\.(\w+))|import\s+(app\.modules\.(\w+)))",
|
||
line
|
||
)
|
||
|
||
if not import_match:
|
||
continue
|
||
|
||
# Extract the imported module name
|
||
imported_module = import_match.group(2) or import_match.group(4)
|
||
|
||
if not imported_module:
|
||
continue
|
||
|
||
# Skip self-imports
|
||
if imported_module == module_name:
|
||
continue
|
||
|
||
# Skip suppression comments (# IMPORT-002)
|
||
if "import-002" in line.lower():
|
||
continue
|
||
|
||
# contracts module cannot import from any module
|
||
if module_name == "contracts":
|
||
if imported_module in CONTRACTS_FORBIDDEN_IMPORTS:
|
||
self._add_violation(
|
||
rule_id="IMPORT-001",
|
||
rule_name="Contracts module cannot import from other modules",
|
||
severity=Severity.ERROR,
|
||
file_path=py_file,
|
||
line_number=i,
|
||
message=f"contracts module imports from '{imported_module}' - contracts should only define protocols",
|
||
context=stripped[:80],
|
||
suggestion="Contracts should only import from stdlib/typing. Move implementation elsewhere.",
|
||
)
|
||
continue
|
||
|
||
# IMPORT-001: Core module importing from optional module
|
||
if module_name in CORE_MODULES and imported_module in OPTIONAL_MODULES:
|
||
self._add_violation(
|
||
rule_id="IMPORT-001",
|
||
rule_name="Core module cannot import from optional module",
|
||
severity=Severity.ERROR,
|
||
file_path=py_file,
|
||
line_number=i,
|
||
message=f"Core module '{module_name}' imports from optional module '{imported_module}'",
|
||
context=stripped[:80],
|
||
suggestion="Use provider pattern (MetricsProvider, WidgetProvider) or contracts protocol instead. See docs/architecture/cross-module-import-rules.md",
|
||
)
|
||
|
||
# IMPORT-002: Optional module importing from unrelated optional module
|
||
# This is a warning because it may indicate missing dependency declaration
|
||
elif module_name in OPTIONAL_MODULES and imported_module in OPTIONAL_MODULES:
|
||
# Check if there's a dependency declared (would need to read definition.py)
|
||
definition_file = module_dir / "definition.py"
|
||
has_dependency = False
|
||
if definition_file.exists():
|
||
try:
|
||
def_content = definition_file.read_text()
|
||
# Check if requires=[...imported_module...] or requires = [...imported_module...]
|
||
if f'"{imported_module}"' in def_content or f"'{imported_module}'" in def_content:
|
||
# Likely has the dependency declared
|
||
has_dependency = True
|
||
except Exception:
|
||
pass
|
||
|
||
if not has_dependency:
|
||
self._add_violation(
|
||
rule_id="IMPORT-002",
|
||
rule_name="Optional module imports from another optional module without dependency",
|
||
severity=Severity.WARNING,
|
||
file_path=py_file,
|
||
line_number=i,
|
||
message=f"Module '{module_name}' imports from '{imported_module}' without declaring dependency",
|
||
context=stripped[:80],
|
||
suggestion=f"Add '{imported_module}' to requires=[...] in definition.py, or use protocol pattern",
|
||
)
|
||
|
||
def _validate_circular_dependencies(self, target_path: Path):
|
||
"""
|
||
IMPORT-004: Detect circular module dependencies.
|
||
|
||
Parses requires=[...] from all definition.py files and runs DFS cycle detection.
|
||
Severity: ERROR (blocks commits).
|
||
"""
|
||
print("🔄 Checking circular module dependencies...")
|
||
|
||
modules_path = target_path / "app" / "modules"
|
||
if not modules_path.exists():
|
||
return
|
||
|
||
# Build dependency graph from definition.py requires=[...]
|
||
dep_graph: dict[str, list[str]] = {}
|
||
requires_pattern = re.compile(r"requires\s*=\s*\[([^\]]*)\]", re.DOTALL)
|
||
module_name_pattern = re.compile(r'["\'](\w+)["\']')
|
||
|
||
for definition_file in modules_path.glob("*/definition.py"):
|
||
module_name = definition_file.parent.name
|
||
try:
|
||
content = definition_file.read_text()
|
||
except Exception:
|
||
continue
|
||
|
||
match = requires_pattern.search(content)
|
||
if match:
|
||
deps = module_name_pattern.findall(match.group(1))
|
||
dep_graph[module_name] = deps
|
||
else:
|
||
dep_graph[module_name] = []
|
||
|
||
# DFS cycle detection
|
||
found_cycles: list[tuple[str, ...]] = []
|
||
visited: set[str] = set()
|
||
on_stack: set[str] = set()
|
||
path: list[str] = []
|
||
|
||
def dfs(node: str) -> None:
|
||
visited.add(node)
|
||
on_stack.add(node)
|
||
path.append(node)
|
||
|
||
for neighbor in dep_graph.get(node, []):
|
||
if neighbor not in dep_graph:
|
||
continue # Skip unknown modules
|
||
if neighbor in on_stack:
|
||
# Found a cycle - extract it
|
||
cycle_start = path.index(neighbor)
|
||
cycle = tuple(path[cycle_start:])
|
||
# Normalize: rotate so smallest element is first (dedup)
|
||
min_idx = cycle.index(min(cycle))
|
||
normalized = cycle[min_idx:] + cycle[:min_idx]
|
||
if normalized not in found_cycles:
|
||
found_cycles.append(normalized)
|
||
elif neighbor not in visited:
|
||
dfs(neighbor)
|
||
|
||
path.pop()
|
||
on_stack.remove(node)
|
||
|
||
for module in dep_graph:
|
||
if module not in visited:
|
||
dfs(module)
|
||
|
||
for cycle in found_cycles:
|
||
cycle_str = " → ".join(cycle) + " → " + cycle[0]
|
||
self._add_violation(
|
||
rule_id="IMPORT-004",
|
||
rule_name="Circular module dependency detected",
|
||
severity=Severity.WARNING,
|
||
file_path=modules_path / cycle[0] / "definition.py",
|
||
line_number=1,
|
||
message=f"Circular dependency: {cycle_str}",
|
||
context="requires=[...] in definition.py files",
|
||
suggestion="Break the cycle by using protocols/contracts or restructuring module boundaries",
|
||
)
|
||
|
||
def _validate_service_test_coverage(self, target_path: Path):
|
||
"""
|
||
MOD-024: Check that core module services have corresponding test files.
|
||
|
||
Severity: WARNING for core modules, INFO for optional.
|
||
"""
|
||
print("🧪 Checking service test coverage...")
|
||
|
||
modules_path = target_path / "app" / "modules"
|
||
if not modules_path.exists():
|
||
return
|
||
|
||
skip_patterns = {"__init__.py", "_metrics.py", "_features.py", "_widgets.py", "_aggregator.py"}
|
||
|
||
# Collect all test file names across the project
|
||
test_files: set[str] = set()
|
||
for test_file in target_path.rglob("test_*.py"):
|
||
test_files.add(test_file.name)
|
||
|
||
module_dirs = [
|
||
d for d in modules_path.iterdir()
|
||
if d.is_dir() and d.name != "__pycache__" and not d.name.startswith(".")
|
||
]
|
||
|
||
for module_dir in module_dirs:
|
||
module_name = module_dir.name
|
||
services_dir = module_dir / "services"
|
||
if not services_dir.exists():
|
||
continue
|
||
|
||
is_core = module_name in self.CORE_MODULES
|
||
severity = Severity.WARNING if is_core else Severity.INFO
|
||
|
||
for service_file in services_dir.glob("*.py"):
|
||
if service_file.name in skip_patterns:
|
||
continue
|
||
if any(pat in service_file.name for pat in {"_metrics", "_features", "_widgets", "_aggregator"}):
|
||
continue
|
||
|
||
expected_test = f"test_{service_file.name}"
|
||
if expected_test not in test_files:
|
||
self._add_violation(
|
||
rule_id="MOD-024",
|
||
rule_name="Service missing test file",
|
||
severity=severity,
|
||
file_path=service_file,
|
||
line_number=1,
|
||
message=f"No test file '{expected_test}' found for service '{service_file.name}' in {'core' if is_core else 'optional'} module '{module_name}'",
|
||
context=str(service_file.relative_to(target_path)),
|
||
suggestion=f"Create '{expected_test}' in the module's tests directory",
|
||
)
|
||
|
||
def _validate_unused_exceptions(self, target_path: Path):
|
||
"""
|
||
MOD-025: Detect unused exception classes.
|
||
|
||
Finds exception classes in */exceptions.py and checks if they are used
|
||
anywhere in the codebase (raise, except, pytest.raises, or as base class).
|
||
Severity: INFO.
|
||
"""
|
||
print("🗑️ Checking for unused exception classes...")
|
||
|
||
exception_files = list(target_path.glob("app/modules/*/exceptions.py"))
|
||
if not exception_files:
|
||
return
|
||
|
||
# Build list of all exception class names and their files
|
||
exception_class_pattern = re.compile(r"class\s+(\w+(?:Exception|Error))\s*\(")
|
||
exception_classes: list[tuple[str, Path, int]] = []
|
||
|
||
for exc_file in exception_files:
|
||
try:
|
||
content = exc_file.read_text()
|
||
lines = content.split("\n")
|
||
except Exception:
|
||
continue
|
||
|
||
for i, line in enumerate(lines, 1):
|
||
match = exception_class_pattern.match(line)
|
||
if match:
|
||
# Check for noqa suppression
|
||
if "noqa: MOD-025" in line or "noqa: mod-025" in line:
|
||
continue
|
||
exception_classes.append((match.group(1), exc_file, i))
|
||
|
||
if not exception_classes:
|
||
return
|
||
|
||
# Collect all Python file contents (excluding __pycache__)
|
||
all_py_files: list[tuple[Path, str]] = []
|
||
for py_file in target_path.rglob("*.py"):
|
||
if "__pycache__" in str(py_file):
|
||
continue
|
||
try:
|
||
all_py_files.append((py_file, py_file.read_text()))
|
||
except Exception:
|
||
continue
|
||
|
||
for class_name, exc_file, line_num in exception_classes:
|
||
module_name = exc_file.parent.name
|
||
usage_found = False
|
||
|
||
for py_file, content in all_py_files:
|
||
# Skip the definition file itself
|
||
if py_file == exc_file:
|
||
continue
|
||
|
||
# Skip same-module __init__.py re-exports
|
||
if py_file.name == "__init__.py" and module_name in str(py_file):
|
||
continue
|
||
|
||
# Check for usage patterns
|
||
if (
|
||
f"raise {class_name}" in content
|
||
or f"except {class_name}" in content
|
||
or f"pytest.raises({class_name}" in content
|
||
or f"({class_name})" in content # base class usage
|
||
):
|
||
usage_found = True
|
||
break
|
||
|
||
if not usage_found:
|
||
self._add_violation(
|
||
rule_id="MOD-025",
|
||
rule_name="Unused exception class",
|
||
severity=Severity.INFO,
|
||
file_path=exc_file,
|
||
line_number=line_num,
|
||
message=f"Exception class '{class_name}' appears to be unused",
|
||
context=f"class {class_name}(...)",
|
||
suggestion=f"Remove '{class_name}' if it is no longer needed",
|
||
)
|
||
|
||
def _validate_legacy_locations(self, target_path: Path):
|
||
"""
|
||
Validate that code is not in legacy locations (MOD-016 to MOD-019).
|
||
|
||
All routes, services, tasks, and schemas should be in module directories,
|
||
not in the legacy centralized locations.
|
||
"""
|
||
print("🚫 Checking legacy locations...")
|
||
|
||
# MOD-016: Routes must be in modules, not app/api/v1/
|
||
self._check_legacy_routes(target_path)
|
||
|
||
# MOD-017: Services must be in modules, not app/services/
|
||
self._check_legacy_services(target_path)
|
||
|
||
# MOD-018: Tasks must be in modules, not app/tasks/
|
||
self._check_legacy_tasks(target_path)
|
||
|
||
# MOD-019: Schemas must be in modules, not models/schema/
|
||
self._check_legacy_schemas(target_path)
|
||
|
||
def _check_legacy_routes(self, target_path: Path):
|
||
"""MOD-016: Check for routes in legacy app/api/v1/ locations."""
|
||
# Check store routes
|
||
store_api_path = target_path / "app" / "api" / "v1" / "store"
|
||
if store_api_path.exists():
|
||
for py_file in store_api_path.glob("*.py"):
|
||
if py_file.name == "__init__.py":
|
||
continue
|
||
# Allow auth.py for now (core authentication)
|
||
if py_file.name == "auth.py":
|
||
continue
|
||
|
||
# Check for noqa comment
|
||
content = py_file.read_text()
|
||
if "noqa: mod-016" in content.lower():
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="MOD-016",
|
||
rule_name="Routes must be in modules, not app/api/v1/",
|
||
severity=Severity.ERROR,
|
||
file_path=py_file,
|
||
line_number=1,
|
||
message=f"Route file '{py_file.name}' in legacy location - should be in module",
|
||
context="app/api/v1/store/",
|
||
suggestion="Move to app/modules/{module}/routes/api/store.py",
|
||
)
|
||
|
||
# Check admin routes
|
||
admin_api_path = target_path / "app" / "api" / "v1" / "admin"
|
||
if admin_api_path.exists():
|
||
for py_file in admin_api_path.glob("*.py"):
|
||
if py_file.name == "__init__.py":
|
||
continue
|
||
# Allow auth.py for now (core authentication)
|
||
if py_file.name == "auth.py":
|
||
continue
|
||
|
||
# Check for noqa comment
|
||
content = py_file.read_text()
|
||
if "noqa: mod-016" in content.lower():
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="MOD-016",
|
||
rule_name="Routes must be in modules, not app/api/v1/",
|
||
severity=Severity.ERROR,
|
||
file_path=py_file,
|
||
line_number=1,
|
||
message=f"Route file '{py_file.name}' in legacy location - should be in module",
|
||
context="app/api/v1/admin/",
|
||
suggestion="Move to app/modules/{module}/routes/api/admin.py",
|
||
)
|
||
|
||
def _check_legacy_services(self, target_path: Path):
|
||
"""MOD-017: Check for services in legacy app/services/ location."""
|
||
services_path = target_path / "app" / "services"
|
||
if not services_path.exists():
|
||
return
|
||
|
||
for py_file in services_path.glob("*.py"):
|
||
if py_file.name == "__init__.py":
|
||
continue
|
||
|
||
# Check for noqa comment
|
||
content = py_file.read_text()
|
||
if "noqa: mod-017" in content.lower():
|
||
continue
|
||
|
||
# Check if file is a pure re-export (only imports, no class/def)
|
||
lines = content.split("\n")
|
||
has_definitions = any(
|
||
re.match(r"^(class|def|async def)\s+\w+", line)
|
||
for line in lines
|
||
)
|
||
|
||
# If it's a re-export only file, it's a warning not error
|
||
if not has_definitions:
|
||
# Check if it imports from modules
|
||
imports_from_module = "from app.modules." in content
|
||
if imports_from_module:
|
||
# Re-export from module - this is acceptable during migration
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="MOD-017",
|
||
rule_name="Services must be in modules, not app/services/",
|
||
severity=Severity.ERROR,
|
||
file_path=py_file,
|
||
line_number=1,
|
||
message=f"Service file '{py_file.name}' in legacy location - should be in module",
|
||
context="app/services/",
|
||
suggestion="Move to app/modules/{module}/services/",
|
||
)
|
||
|
||
def _check_legacy_tasks(self, target_path: Path):
|
||
"""MOD-018: Check for tasks in legacy app/tasks/ location."""
|
||
tasks_path = target_path / "app" / "tasks"
|
||
if not tasks_path.exists():
|
||
return
|
||
|
||
for py_file in tasks_path.glob("*.py"):
|
||
if py_file.name == "__init__.py":
|
||
continue
|
||
# Allow dispatcher.py (infrastructure)
|
||
if py_file.name == "dispatcher.py":
|
||
continue
|
||
|
||
# Check for noqa comment
|
||
content = py_file.read_text()
|
||
if "noqa: mod-018" in content.lower():
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="MOD-018",
|
||
rule_name="Tasks must be in modules, not app/tasks/",
|
||
severity=Severity.ERROR,
|
||
file_path=py_file,
|
||
line_number=1,
|
||
message=f"Task file '{py_file.name}' in legacy location - should be in module",
|
||
context="app/tasks/",
|
||
suggestion="Move to app/modules/{module}/tasks/",
|
||
)
|
||
|
||
def _check_legacy_schemas(self, target_path: Path):
|
||
"""MOD-019: Check for schemas in legacy models/schema/ location."""
|
||
schemas_path = target_path / "models" / "schema"
|
||
if not schemas_path.exists():
|
||
return
|
||
|
||
for py_file in schemas_path.glob("*.py"):
|
||
if py_file.name == "__init__.py":
|
||
continue
|
||
# Allow base.py (base schema classes - infrastructure)
|
||
if py_file.name == "base.py":
|
||
continue
|
||
# Allow auth.py (core authentication schemas - cross-cutting)
|
||
if py_file.name == "auth.py":
|
||
continue
|
||
|
||
# Check for noqa comment
|
||
content = py_file.read_text()
|
||
if "noqa: mod-019" in content.lower():
|
||
continue
|
||
|
||
# Check if file is a pure re-export
|
||
lines = content.split("\n")
|
||
has_definitions = any(
|
||
re.match(r"^class\s+\w+", line)
|
||
for line in lines
|
||
)
|
||
|
||
# If it's a re-export only file, allow it during migration
|
||
if not has_definitions:
|
||
imports_from_module = "from app.modules." in content
|
||
if imports_from_module:
|
||
continue
|
||
|
||
self._add_violation(
|
||
rule_id="MOD-019",
|
||
rule_name="Schemas must be in modules, not models/schema/",
|
||
severity=Severity.ERROR,
|
||
file_path=py_file,
|
||
line_number=1,
|
||
message=f"Schema file '{py_file.name}' in legacy location - should be in module",
|
||
context="models/schema/",
|
||
suggestion="Move to app/modules/{module}/schemas/",
|
||
)
|
||
|
||
def _get_rule(self, rule_id: str) -> dict[str, Any]:
|
||
"""Get rule configuration by ID"""
|
||
# Look in different rule categories
|
||
for category in [
|
||
"api_endpoint_rules",
|
||
"service_layer_rules",
|
||
"model_rules",
|
||
"exception_rules",
|
||
"naming_rules",
|
||
"auth_rules",
|
||
"javascript_rules",
|
||
"template_rules",
|
||
"frontend_component_rules",
|
||
"language_rules",
|
||
"migration_rules",
|
||
"module_rules",
|
||
]:
|
||
rules = self.config.get(category, [])
|
||
for rule in rules:
|
||
if rule.get("id") == rule_id:
|
||
return rule
|
||
return None
|
||
|
||
def _should_ignore_file(self, file_path: Path) -> bool:
|
||
"""Check if file should be ignored"""
|
||
ignore_patterns = self.config.get("ignore", {}).get("files", [])
|
||
|
||
# Convert to string for easier matching
|
||
file_path_str = str(file_path)
|
||
|
||
for pattern in ignore_patterns:
|
||
# Check if any part of the path matches the pattern
|
||
if file_path.match(pattern):
|
||
return True
|
||
# Also check if pattern appears in the path (for .venv, venv, etc.)
|
||
if "/.venv/" in file_path_str or file_path_str.startswith(".venv/"):
|
||
return True
|
||
if "/venv/" in file_path_str or file_path_str.startswith("venv/"):
|
||
return True
|
||
|
||
return False
|
||
|
||
def _add_violation(
|
||
self,
|
||
rule_id: str,
|
||
rule_name: str,
|
||
severity: Severity,
|
||
file_path: Path,
|
||
line_number: int,
|
||
message: str,
|
||
context: str = "",
|
||
suggestion: str = "",
|
||
):
|
||
"""Add a violation to results"""
|
||
violation = Violation(
|
||
rule_id=rule_id,
|
||
rule_name=rule_name,
|
||
severity=severity,
|
||
file_path=file_path,
|
||
line_number=line_number,
|
||
message=message,
|
||
context=context,
|
||
suggestion=suggestion,
|
||
)
|
||
self.result.violations.append(violation)
|
||
|
||
def print_report(self):
|
||
"""Print validation report"""
|
||
print("\n" + "=" * 80)
|
||
print("📊 ARCHITECTURE VALIDATION REPORT")
|
||
print("=" * 80 + "\n")
|
||
|
||
# Group by severity
|
||
errors = [v for v in self.result.violations if v.severity == Severity.ERROR]
|
||
warnings = [v for v in self.result.violations if v.severity == Severity.WARNING]
|
||
infos = [v for v in self.result.violations if v.severity == Severity.INFO]
|
||
|
||
print(f"Files checked: {self.result.files_checked}")
|
||
print(
|
||
f"Findings: {len(errors)} errors, {len(warnings)} warnings, {len(infos)} info\n"
|
||
)
|
||
|
||
# Print file summary table if we have file results
|
||
if self.result.file_results:
|
||
self._print_summary_table()
|
||
|
||
if errors:
|
||
print(f"\n❌ ERRORS ({len(errors)}):")
|
||
print("-" * 80)
|
||
for violation in errors:
|
||
self._print_violation(violation)
|
||
|
||
if warnings:
|
||
print(f"\n⚠️ WARNINGS ({len(warnings)}):")
|
||
print("-" * 80)
|
||
for violation in warnings:
|
||
self._print_violation(violation)
|
||
|
||
if infos and self.verbose:
|
||
print(f"\nℹ️ INFO ({len(infos)}):")
|
||
print("-" * 80)
|
||
for violation in infos:
|
||
self._print_violation(violation)
|
||
|
||
# Summary
|
||
print("\n" + "=" * 80)
|
||
if self.result.has_errors():
|
||
print(
|
||
f"❌ VALIDATION FAILED - {len(errors)} error(s) must be fixed before committing"
|
||
)
|
||
print("=" * 80)
|
||
return 1
|
||
if self.result.has_warnings():
|
||
print(f"⚠️ VALIDATION PASSED WITH {len(warnings)} WARNING(S)")
|
||
print("=" * 80)
|
||
return 0
|
||
print("✅ VALIDATION PASSED")
|
||
print("=" * 80)
|
||
return 0
|
||
|
||
def _print_summary_table(self):
|
||
"""Print a summary table of file results"""
|
||
print("📋 FILE SUMMARY:")
|
||
print("-" * 80)
|
||
|
||
# Calculate column widths
|
||
max_path_len = max(
|
||
len(
|
||
str(
|
||
fr.file_path.relative_to(self.project_root)
|
||
if self.project_root in fr.file_path.parents
|
||
else fr.file_path
|
||
)
|
||
)
|
||
for fr in self.result.file_results
|
||
)
|
||
max_path_len = min(max_path_len, 55) # Cap at 55 chars
|
||
|
||
# Print header
|
||
print(
|
||
f" {'File':<{max_path_len}} {'Status':<8} {'Errors':<7} {'Warnings':<8}"
|
||
)
|
||
print(f" {'-' * max_path_len} {'-' * 8} {'-' * 7} {'-' * 8}")
|
||
|
||
# Print each file
|
||
for fr in self.result.file_results:
|
||
rel_path = (
|
||
fr.file_path.relative_to(self.project_root)
|
||
if self.project_root in fr.file_path.parents
|
||
else fr.file_path
|
||
)
|
||
path_str = str(rel_path)
|
||
if len(path_str) > max_path_len:
|
||
path_str = "..." + path_str[-(max_path_len - 3) :]
|
||
|
||
print(
|
||
f" {path_str:<{max_path_len}} "
|
||
f"{fr.status_icon} {fr.status:<5} "
|
||
f"{fr.errors:<7} "
|
||
f"{fr.warnings:<8}"
|
||
)
|
||
|
||
print("-" * 80)
|
||
|
||
# Print totals
|
||
total_errors = sum(fr.errors for fr in self.result.file_results)
|
||
total_warnings = sum(fr.warnings for fr in self.result.file_results)
|
||
passed = sum(1 for fr in self.result.file_results if fr.passed)
|
||
failed = len(self.result.file_results) - passed
|
||
|
||
print(
|
||
f"\n Total: {len(self.result.file_results)} files | "
|
||
f"✅ {passed} passed | ❌ {failed} failed | "
|
||
f"{total_errors} errors | {total_warnings} warnings\n"
|
||
)
|
||
|
||
def print_json(self) -> int:
|
||
"""Print validation results as JSON"""
|
||
import json
|
||
|
||
violations_json = []
|
||
for v in self.result.violations:
|
||
rel_path = (
|
||
str(v.file_path.relative_to(self.project_root))
|
||
if self.project_root in v.file_path.parents
|
||
else str(v.file_path)
|
||
)
|
||
violations_json.append(
|
||
{
|
||
"rule_id": v.rule_id,
|
||
"rule_name": v.rule_name,
|
||
"severity": v.severity.value,
|
||
"file_path": rel_path,
|
||
"line_number": v.line_number,
|
||
"message": v.message,
|
||
"context": v.context or "",
|
||
"suggestion": v.suggestion or "",
|
||
}
|
||
)
|
||
|
||
output = {
|
||
"files_checked": self.result.files_checked,
|
||
"total_violations": len(self.result.violations),
|
||
"errors": len(
|
||
[v for v in self.result.violations if v.severity == Severity.ERROR]
|
||
),
|
||
"warnings": len(
|
||
[v for v in self.result.violations if v.severity == Severity.WARNING]
|
||
),
|
||
"violations": violations_json,
|
||
}
|
||
|
||
print(json.dumps(output, indent=2))
|
||
|
||
return 1 if self.result.has_errors() else 0
|
||
|
||
def _print_violation(self, v: Violation):
|
||
"""Print a single violation"""
|
||
rel_path = (
|
||
v.file_path.relative_to(self.project_root)
|
||
if self.project_root in v.file_path.parents
|
||
else v.file_path
|
||
)
|
||
|
||
print(f"\n [{v.rule_id}] {v.rule_name}")
|
||
print(f" File: {rel_path}:{v.line_number}")
|
||
print(f" Issue: {v.message}")
|
||
|
||
if v.context and self.verbose:
|
||
print(f" Context: {v.context}")
|
||
|
||
if v.suggestion:
|
||
print(f" 💡 Suggestion: {v.suggestion}")
|
||
|
||
|
||
def main():
|
||
"""Main entry point"""
|
||
parser = argparse.ArgumentParser(
|
||
description="Validate architecture patterns in codebase",
|
||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||
epilog=__doc__,
|
||
)
|
||
|
||
# Target options (mutually exclusive)
|
||
target_group = parser.add_mutually_exclusive_group()
|
||
|
||
target_group.add_argument(
|
||
"-f",
|
||
"--file",
|
||
type=Path,
|
||
metavar="PATH",
|
||
help="Validate a single file (.py, .js, or .html)",
|
||
)
|
||
|
||
target_group.add_argument(
|
||
"-d",
|
||
"--folder",
|
||
type=Path,
|
||
metavar="PATH",
|
||
help="Validate all files in a directory (recursive)",
|
||
)
|
||
|
||
target_group.add_argument(
|
||
"-o",
|
||
"--object",
|
||
type=str,
|
||
metavar="NAME",
|
||
help="Validate all files related to an entity (e.g., merchant, store, order)",
|
||
)
|
||
|
||
parser.add_argument(
|
||
"-c",
|
||
"--config",
|
||
type=Path,
|
||
default=Path.cwd() / ".architecture-rules.yaml",
|
||
help="Path to architecture rules config (default: .architecture-rules.yaml)",
|
||
)
|
||
|
||
parser.add_argument(
|
||
"-v",
|
||
"--verbose",
|
||
action="store_true",
|
||
help="Show detailed output including context",
|
||
)
|
||
|
||
parser.add_argument(
|
||
"--errors-only", action="store_true", help="Only show errors, suppress warnings"
|
||
)
|
||
|
||
parser.add_argument(
|
||
"--json",
|
||
action="store_true",
|
||
help="Output results as JSON (for programmatic use)",
|
||
)
|
||
|
||
args = parser.parse_args()
|
||
|
||
# Create validator
|
||
validator = ArchitectureValidator(args.config, verbose=args.verbose)
|
||
|
||
# Determine validation mode
|
||
if args.file:
|
||
# Validate single file
|
||
validator.validate_file(args.file)
|
||
elif args.folder:
|
||
# Validate directory
|
||
if not args.folder.is_dir():
|
||
print(f"❌ Not a directory: {args.folder}")
|
||
sys.exit(1)
|
||
validator.validate_all(args.folder)
|
||
elif args.object:
|
||
# Validate all files related to an entity
|
||
validator.validate_object(args.object)
|
||
else:
|
||
# Default: validate current directory
|
||
validator.validate_all(Path.cwd())
|
||
|
||
# Output results
|
||
exit_code = validator.print_json() if args.json else validator.print_report()
|
||
|
||
sys.exit(exit_code)
|
||
|
||
|
||
if __name__ == "__main__":
|
||
main()
|