fix: add .dockerignore and env_file to docker-compose
Some checks failed
CI / ruff (push) Successful in 9s
CI / architecture (push) Has been cancelled
CI / dependency-scanning (push) Has been cancelled
CI / audit (push) Has been cancelled
CI / docs (push) Has been cancelled
CI / deploy (push) Has been cancelled
CI / pytest (push) Has been cancelled
Some checks failed
CI / ruff (push) Successful in 9s
CI / architecture (push) Has been cancelled
CI / dependency-scanning (push) Has been cancelled
CI / audit (push) Has been cancelled
CI / docs (push) Has been cancelled
CI / deploy (push) Has been cancelled
CI / pytest (push) Has been cancelled
Prevents .env from being baked into Docker image (was overriding config defaults). Adds env_file directive so containers load host .env properly. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -4,6 +4,7 @@ Base Validator Class
|
||||
Shared functionality for all validators.
|
||||
"""
|
||||
|
||||
import re
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
@@ -62,8 +63,18 @@ class BaseValidator(ABC):
|
||||
".venv", "venv", "node_modules", "__pycache__", ".git",
|
||||
".pytest_cache", ".mypy_cache", "dist", "build", "*.egg-info",
|
||||
"migrations", "alembic/versions", ".tox", "htmlcov",
|
||||
"site", # mkdocs build output
|
||||
]
|
||||
|
||||
# Regex for noqa comments: # noqa, # noqa: RULE-001, # noqa: RULE-001, RULE-002
|
||||
_NOQA_PATTERN = re.compile(
|
||||
r"#\s*noqa(?::\s*([A-Z]+-\d+(?:\s*,\s*[A-Z]+-\d+)*))?",
|
||||
)
|
||||
# Same for HTML comments: <!-- noqa: RULE-001 -->
|
||||
_NOQA_HTML_PATTERN = re.compile(
|
||||
r"<!--\s*noqa(?::\s*([A-Z]+-\d+(?:\s*,\s*[A-Z]+-\d+)*))?\s*-->",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
rules_dir: str = "",
|
||||
@@ -180,6 +191,26 @@ class BaseValidator(ABC):
|
||||
path_str = str(file_path)
|
||||
return any(pattern in path_str for pattern in self.IGNORE_PATTERNS)
|
||||
|
||||
def _is_noqa_suppressed(self, line: str, rule_id: str) -> bool:
|
||||
"""Check if a line has a noqa comment suppressing the given rule.
|
||||
|
||||
Supports:
|
||||
- ``# noqa`` — suppresses all rules
|
||||
- ``# noqa: SEC-001`` — suppresses specific rule
|
||||
- ``# noqa: SEC-001, SEC-002`` — suppresses multiple rules
|
||||
- ``<!-- noqa: SEC-015 -->`` — HTML comment variant
|
||||
"""
|
||||
for pattern in (self._NOQA_PATTERN, self._NOQA_HTML_PATTERN):
|
||||
match = pattern.search(line)
|
||||
if match:
|
||||
rule_list = match.group(1)
|
||||
if not rule_list:
|
||||
return True # bare # noqa → suppress everything
|
||||
suppressed = [r.strip() for r in rule_list.split(",")]
|
||||
if rule_id in suppressed:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _add_violation(
|
||||
self,
|
||||
rule_id: str,
|
||||
|
||||
@@ -196,7 +196,7 @@ class AuditValidator(BaseValidator):
|
||||
r"logger\.\w+\(.*password\s*[=:]\s*['\"]?%", # password=%s
|
||||
r"logger\.\w+\(.*password\s*[=:]\s*\{", # password={var}
|
||||
r"logging\.\w+\(.*password\s*[=:]\s*['\"]?%", # password=%s
|
||||
r"print\(.*password\s*=", # print(password=xxx)
|
||||
r"print\(.*password\s*=", # print(password=xxx) # noqa: SEC-021
|
||||
r"logger.*credit.*card.*\d", # credit card with numbers
|
||||
r"logger.*\bssn\b.*\d", # SSN with numbers
|
||||
],
|
||||
|
||||
@@ -199,6 +199,8 @@ class PerformanceValidator(BaseValidator):
|
||||
if re.search(r"\.\w+\.\w+", line) and "(" not in line:
|
||||
# Could be accessing a relationship
|
||||
if any(rel in line for rel in [".customer.", ".store.", ".order.", ".product.", ".user."]):
|
||||
if self._is_noqa_suppressed(line, "PERF-001"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-001",
|
||||
rule_name="N+1 query detection",
|
||||
@@ -225,7 +227,7 @@ class PerformanceValidator(BaseValidator):
|
||||
context_text = "\n".join(context_lines)
|
||||
|
||||
if "limit" not in context_text.lower() and "filter" not in context_text.lower():
|
||||
if "# noqa" in line or "# bounded" in line:
|
||||
if self._is_noqa_suppressed(line, "PERF-003") or "# bounded" in line:
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-003",
|
||||
@@ -256,6 +258,8 @@ class PerformanceValidator(BaseValidator):
|
||||
if current_indent <= for_indent and stripped:
|
||||
in_for_loop = False
|
||||
elif "db.add(" in line or ".save(" in line:
|
||||
if self._is_noqa_suppressed(line, "PERF-006"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-006",
|
||||
rule_name="Bulk operations for multiple records",
|
||||
@@ -278,6 +282,8 @@ class PerformanceValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern, issue in patterns:
|
||||
if re.search(pattern, line):
|
||||
if self._is_noqa_suppressed(line, "PERF-008"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-008",
|
||||
rule_name="Use EXISTS for existence checks",
|
||||
@@ -311,17 +317,18 @@ class PerformanceValidator(BaseValidator):
|
||||
in_for_loop = False
|
||||
elif loop_var and f"{loop_var}." in line and "=" in line and "==" not in line:
|
||||
# Attribute assignment in loop
|
||||
if "# noqa" not in line:
|
||||
self._add_violation(
|
||||
rule_id="PERF-009",
|
||||
rule_name="Batch updates instead of loops",
|
||||
severity=Severity.INFO,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="Individual updates in loop - consider batch update",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use .update({...}) with filters for batch updates",
|
||||
)
|
||||
if self._is_noqa_suppressed(line, "PERF-009"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-009",
|
||||
rule_name="Batch updates instead of loops",
|
||||
severity=Severity.INFO,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="Individual updates in loop - consider batch update",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use .update({...}) with filters for batch updates",
|
||||
)
|
||||
|
||||
# =========================================================================
|
||||
# API Performance Checks
|
||||
@@ -349,17 +356,18 @@ class PerformanceValidator(BaseValidator):
|
||||
in_endpoint = False
|
||||
# Check for .all() without pagination
|
||||
if ".all()" in line and not has_pagination:
|
||||
if "# noqa" not in line:
|
||||
self._add_violation(
|
||||
rule_id="PERF-026",
|
||||
rule_name="Pagination required for list endpoints",
|
||||
severity=Severity.WARNING,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="List endpoint may lack pagination",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Add skip/limit parameters for pagination",
|
||||
)
|
||||
if self._is_noqa_suppressed(line, "PERF-026"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-026",
|
||||
rule_name="Pagination required for list endpoints",
|
||||
severity=Severity.WARNING,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="List endpoint may lack pagination",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Add skip/limit parameters for pagination",
|
||||
)
|
||||
|
||||
# =========================================================================
|
||||
# Async Performance Checks
|
||||
@@ -381,6 +389,10 @@ class PerformanceValidator(BaseValidator):
|
||||
if await_count >= 3:
|
||||
# Verify they're sequential (within 5 lines of each other)
|
||||
if all(await_lines[j+1] - await_lines[j] <= 2 for j in range(len(await_lines)-1)):
|
||||
if self._is_noqa_suppressed(line, "PERF-037"):
|
||||
await_count = 0
|
||||
await_lines = []
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-037",
|
||||
rule_name="Parallel independent operations",
|
||||
@@ -412,6 +424,8 @@ class PerformanceValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, line) and "timeout" not in line:
|
||||
if self._is_noqa_suppressed(line, "PERF-040"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-040",
|
||||
rule_name="Timeout configuration",
|
||||
@@ -436,22 +450,25 @@ class PerformanceValidator(BaseValidator):
|
||||
if i < len(lines):
|
||||
next_lines = "\n".join(lines[i:min(i+3, len(lines))])
|
||||
if "for " in next_lines and "in" in next_lines:
|
||||
if "# noqa" not in line:
|
||||
self._add_violation(
|
||||
rule_id="PERF-046",
|
||||
rule_name="Generators for large datasets",
|
||||
severity=Severity.INFO,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message=".all() loads everything into memory before iteration",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use .yield_per(100) for large result sets",
|
||||
)
|
||||
if self._is_noqa_suppressed(line, "PERF-046"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-046",
|
||||
rule_name="Generators for large datasets",
|
||||
severity=Severity.INFO,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message=".all() loads everything into memory before iteration",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use .yield_per(100) for large result sets",
|
||||
)
|
||||
|
||||
def _check_file_streaming(self, file_path: Path, content: str, lines: list[str]):
|
||||
"""PERF-047: Check for loading entire files into memory"""
|
||||
for i, line in enumerate(lines, 1):
|
||||
if re.search(r"await\s+\w+\.read\(\)", line) and "chunk" not in line:
|
||||
if self._is_noqa_suppressed(line, "PERF-047"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-047",
|
||||
rule_name="Stream large file uploads",
|
||||
@@ -468,6 +485,9 @@ class PerformanceValidator(BaseValidator):
|
||||
if "chunk" not in content.lower() and "batch" not in content.lower():
|
||||
# Check if file processes multiple records
|
||||
if "for " in content and ("csv" in content.lower() or "import" in content.lower()):
|
||||
first_line = lines[0] if lines else ""
|
||||
if self._is_noqa_suppressed(first_line, "PERF-048"):
|
||||
return
|
||||
self._add_violation(
|
||||
rule_id="PERF-048",
|
||||
rule_name="Chunked processing for imports",
|
||||
@@ -484,17 +504,18 @@ class PerformanceValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
# Check for file open without 'with'
|
||||
if re.search(r"^\s*\w+\s*=\s*open\s*\(", line):
|
||||
if "# noqa" not in line:
|
||||
self._add_violation(
|
||||
rule_id="PERF-049",
|
||||
rule_name="Context managers for resources",
|
||||
severity=Severity.WARNING,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="File opened without context manager",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use 'with open(...) as f:' to ensure cleanup",
|
||||
)
|
||||
if self._is_noqa_suppressed(line, "PERF-049"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-049",
|
||||
rule_name="Context managers for resources",
|
||||
severity=Severity.WARNING,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="File opened without context manager",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use 'with open(...) as f:' to ensure cleanup",
|
||||
)
|
||||
|
||||
def _check_string_concatenation(self, file_path: Path, content: str, lines: list[str]):
|
||||
"""PERF-051: Check for inefficient string concatenation in loops"""
|
||||
@@ -513,17 +534,18 @@ class PerformanceValidator(BaseValidator):
|
||||
if current_indent <= for_indent and stripped:
|
||||
in_for_loop = False
|
||||
elif re.search(r'\w+\s*\+=\s*["\']|str\s*\(', line):
|
||||
if "# noqa" not in line:
|
||||
self._add_violation(
|
||||
rule_id="PERF-051",
|
||||
rule_name="String concatenation efficiency",
|
||||
severity=Severity.INFO,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="String concatenation in loop",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use ''.join() or StringIO for many concatenations",
|
||||
)
|
||||
if self._is_noqa_suppressed(line, "PERF-051"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-051",
|
||||
rule_name="String concatenation efficiency",
|
||||
severity=Severity.INFO,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message="String concatenation in loop",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use ''.join() or StringIO for many concatenations",
|
||||
)
|
||||
|
||||
# =========================================================================
|
||||
# Frontend Performance Checks
|
||||
@@ -534,6 +556,8 @@ class PerformanceValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
if re.search(r'@(input|keyup)=".*search.*fetch', line, re.IGNORECASE):
|
||||
if "debounce" not in content.lower():
|
||||
if self._is_noqa_suppressed(line, "PERF-056"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-056",
|
||||
rule_name="Debounce search inputs",
|
||||
@@ -552,17 +576,18 @@ class PerformanceValidator(BaseValidator):
|
||||
if match:
|
||||
interval = int(match.group(1))
|
||||
if interval < 10000: # Less than 10 seconds
|
||||
if "# real-time" not in line and "# noqa" not in line:
|
||||
self._add_violation(
|
||||
rule_id="PERF-062",
|
||||
rule_name="Reasonable polling intervals",
|
||||
severity=Severity.WARNING,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message=f"Polling interval {interval}ms is very frequent",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use >= 10 second intervals for non-critical updates",
|
||||
)
|
||||
if "# real-time" in line or self._is_noqa_suppressed(line, "PERF-062"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-062",
|
||||
rule_name="Reasonable polling intervals",
|
||||
severity=Severity.WARNING,
|
||||
file_path=file_path,
|
||||
line_number=i,
|
||||
message=f"Polling interval {interval}ms is very frequent",
|
||||
context=line.strip()[:80],
|
||||
suggestion="Use >= 10 second intervals for non-critical updates",
|
||||
)
|
||||
|
||||
def _check_layout_thrashing(self, file_path: Path, content: str, lines: list[str]):
|
||||
"""PERF-064: Check for layout thrashing patterns"""
|
||||
@@ -572,6 +597,8 @@ class PerformanceValidator(BaseValidator):
|
||||
if i < len(lines):
|
||||
next_line = lines[i] if i < len(lines) else ""
|
||||
if "style" in next_line:
|
||||
if self._is_noqa_suppressed(line, "PERF-064"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-064",
|
||||
rule_name="Avoid layout thrashing",
|
||||
@@ -589,6 +616,8 @@ class PerformanceValidator(BaseValidator):
|
||||
if re.search(r"<img\s+[^>]*src=", line):
|
||||
if 'loading="lazy"' not in line and "x-intersect" not in line:
|
||||
if "logo" not in line.lower() and "icon" not in line.lower():
|
||||
if self._is_noqa_suppressed(line, "PERF-058"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-058",
|
||||
rule_name="Image optimization",
|
||||
@@ -606,6 +635,8 @@ class PerformanceValidator(BaseValidator):
|
||||
if re.search(r"<script\s+[^>]*src=", line):
|
||||
if "defer" not in line and "async" not in line:
|
||||
if "alpine" not in line.lower() and "htmx" not in line.lower():
|
||||
if self._is_noqa_suppressed(line, "PERF-067"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="PERF-067",
|
||||
rule_name="Defer non-critical JavaScript",
|
||||
|
||||
@@ -192,6 +192,8 @@ class SecurityValidator(BaseValidator):
|
||||
# Check for eval usage
|
||||
for i, line in enumerate(lines, 1):
|
||||
if re.search(r"\beval\s*\(", line) and "//" not in line.split("eval")[0]:
|
||||
if self._is_noqa_suppressed(line, "SEC-013"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-013",
|
||||
rule_name="No code execution",
|
||||
@@ -206,6 +208,8 @@ class SecurityValidator(BaseValidator):
|
||||
# Check for innerHTML with user input
|
||||
for i, line in enumerate(lines, 1):
|
||||
if re.search(r"\.innerHTML\s*=", line) and "//" not in line.split("innerHTML")[0]:
|
||||
if self._is_noqa_suppressed(line, "SEC-015"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-015",
|
||||
rule_name="XSS prevention",
|
||||
@@ -222,6 +226,8 @@ class SecurityValidator(BaseValidator):
|
||||
# SEC-015: XSS via |safe filter
|
||||
for i, line in enumerate(lines, 1):
|
||||
if re.search(r"\|\s*safe", line) and "sanitized" not in line.lower():
|
||||
if self._is_noqa_suppressed(line, "SEC-015"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-015",
|
||||
rule_name="XSS prevention in templates",
|
||||
@@ -236,6 +242,8 @@ class SecurityValidator(BaseValidator):
|
||||
# Check for x-html with dynamic content
|
||||
for i, line in enumerate(lines, 1):
|
||||
if re.search(r'x-html="[^"]*\w', line) and "sanitized" not in line.lower():
|
||||
if self._is_noqa_suppressed(line, "SEC-015"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-015",
|
||||
rule_name="XSS prevention in templates",
|
||||
@@ -268,6 +276,8 @@ class SecurityValidator(BaseValidator):
|
||||
# Check for environment variable references
|
||||
if "${" in line or "os.getenv" in line or "environ" in line:
|
||||
continue
|
||||
if self._is_noqa_suppressed(line, "SEC-001"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-001",
|
||||
rule_name="No hardcoded credentials",
|
||||
@@ -296,7 +306,7 @@ class SecurityValidator(BaseValidator):
|
||||
|
||||
exclude_patterns = [
|
||||
"os.getenv", "os.environ", "settings.", '""', "''",
|
||||
"# noqa", "# test", "password_hash", "example"
|
||||
"# test", "password_hash", "example"
|
||||
]
|
||||
|
||||
for i, line in enumerate(lines, 1):
|
||||
@@ -305,6 +315,8 @@ class SecurityValidator(BaseValidator):
|
||||
# Check exclusions
|
||||
if any(exc in line for exc in exclude_patterns):
|
||||
continue
|
||||
if self._is_noqa_suppressed(line, "SEC-001"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-001",
|
||||
rule_name="No hardcoded credentials",
|
||||
@@ -329,7 +341,7 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, line):
|
||||
if "# noqa" in line or "# safe" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-011") or "# safe" in line:
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-011",
|
||||
@@ -345,15 +357,15 @@ class SecurityValidator(BaseValidator):
|
||||
def _check_command_injection(self, file_path: Path, content: str, lines: list[str]):
|
||||
"""SEC-012: Check for command injection vulnerabilities"""
|
||||
patterns = [
|
||||
(r"subprocess.*shell\s*=\s*True", "shell=True in subprocess"),
|
||||
(r"os\.system\s*\(", "os.system()"),
|
||||
(r"os\.popen\s*\(", "os.popen()"),
|
||||
(r"subprocess.*shell\s*=\s*True", "shell=True in subprocess"), # noqa: SEC-012
|
||||
(r"os\.system\s*\(", "os.system()"), # noqa: SEC-012
|
||||
(r"os\.popen\s*\(", "os.popen()"), # noqa: SEC-012
|
||||
]
|
||||
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern, issue in patterns:
|
||||
if re.search(pattern, line):
|
||||
if "# noqa" in line or "# safe" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-012") or "# safe" in line:
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-012",
|
||||
@@ -378,6 +390,8 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern, issue in patterns:
|
||||
if re.search(pattern, line, re.IGNORECASE):
|
||||
if self._is_noqa_suppressed(line, "SEC-013"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-013",
|
||||
rule_name="No code execution",
|
||||
@@ -405,6 +419,8 @@ class SecurityValidator(BaseValidator):
|
||||
if re.search(pattern, line, re.IGNORECASE):
|
||||
if has_secure_filename:
|
||||
continue
|
||||
if self._is_noqa_suppressed(line, "SEC-014"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-014",
|
||||
rule_name="Path traversal prevention",
|
||||
@@ -427,7 +443,7 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern, issue in patterns:
|
||||
if re.search(pattern, line):
|
||||
if "# noqa" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-020"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-020",
|
||||
@@ -449,13 +465,15 @@ class SecurityValidator(BaseValidator):
|
||||
(r"print\s*\([^)]*password", "password in print"),
|
||||
]
|
||||
|
||||
exclude = ["password_hash", "password_reset", "password_changed", "# noqa"]
|
||||
exclude = ["password_hash", "password_reset", "password_changed"]
|
||||
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern, issue in patterns:
|
||||
if re.search(pattern, line, re.IGNORECASE):
|
||||
if any(exc in line for exc in exclude):
|
||||
continue
|
||||
if self._is_noqa_suppressed(line, "SEC-021"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-021",
|
||||
rule_name="PII logging prevention",
|
||||
@@ -478,7 +496,9 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, line):
|
||||
if "logger" in line or "# noqa" in line:
|
||||
if "logger" in line:
|
||||
continue
|
||||
if self._is_noqa_suppressed(line, "SEC-024"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-024",
|
||||
@@ -495,7 +515,7 @@ class SecurityValidator(BaseValidator):
|
||||
"""SEC-034: Check for HTTP instead of HTTPS"""
|
||||
for i, line in enumerate(lines, 1):
|
||||
if re.search(r"http://(?!localhost|127\.0\.0\.1|0\.0\.0\.0|\$)", line):
|
||||
if "# noqa" in line or "example.com" in line or "schemas" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-034") or "example.com" in line or "schemas" in line:
|
||||
continue
|
||||
if "http://www.w3.org" in line:
|
||||
continue
|
||||
@@ -524,6 +544,8 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, line) and "timeout" not in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-040"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-040",
|
||||
rule_name="Timeout configuration",
|
||||
@@ -547,7 +569,7 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern, algo in patterns:
|
||||
if re.search(pattern, line):
|
||||
if "# noqa" in line or "# checksum" in line or "# file hash" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-041") or "# checksum" in line or "# file hash" in line:
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-041",
|
||||
@@ -580,7 +602,7 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, line):
|
||||
if "# noqa" in line or "# not security" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-042") or "# not security" in line:
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-042",
|
||||
@@ -609,6 +631,8 @@ class SecurityValidator(BaseValidator):
|
||||
if re.search(pattern, line):
|
||||
if any(exc in line for exc in exclude):
|
||||
continue
|
||||
if self._is_noqa_suppressed(line, "SEC-043"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-043",
|
||||
rule_name="No hardcoded encryption keys",
|
||||
@@ -631,7 +655,7 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern, issue in patterns:
|
||||
if re.search(pattern, line):
|
||||
if "# noqa" in line or "# test" in line or "DEBUG" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-047") or "# test" in line or "DEBUG" in line:
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-047",
|
||||
@@ -650,6 +674,8 @@ class SecurityValidator(BaseValidator):
|
||||
# Find the jwt.encode line
|
||||
for i, line in enumerate(lines, 1):
|
||||
if "jwt.encode" in line:
|
||||
if self._is_noqa_suppressed(line, "SEC-002"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-002",
|
||||
rule_name="JWT expiry enforcement",
|
||||
@@ -676,6 +702,8 @@ class SecurityValidator(BaseValidator):
|
||||
for i, line in enumerate(lines, 1):
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, line):
|
||||
if self._is_noqa_suppressed(line, "SEC-022"):
|
||||
continue
|
||||
self._add_violation(
|
||||
rule_id="SEC-022",
|
||||
rule_name="Sensitive data in URLs",
|
||||
|
||||
Reference in New Issue
Block a user