feat: add Sentry, Cloudflare R2, and CloudFlare CDN integrations
Production quick wins for improved observability and scalability: Sentry Error Tracking: - Add sentry-sdk[fastapi] dependency - Initialize Sentry in main.py with FastAPI/SQLAlchemy integrations - Add Celery integration for background task error tracking - Feature-flagged via SENTRY_DSN (disabled when empty) Cloudflare R2 Storage: - Add boto3 dependency for S3-compatible API - Create storage_service.py with StorageBackend abstraction - LocalStorageBackend for development (default) - R2StorageBackend for production cloud storage - Feature-flagged via STORAGE_BACKEND setting CloudFlare CDN/Proxy: - Create middleware/cloudflare.py for CF header handling - Extract real client IP from CF-Connecting-IP - Support CF-IPCountry for geo features - Feature-flagged via CLOUDFLARE_ENABLED setting Documentation: - Add docs/deployment/cloudflare.md setup guide - Update infrastructure.md with dev vs prod requirements - Add enterprise upgrade checklist for scaling beyond 1000 users - Update installation.md with new environment variables All features are optional and disabled by default for development. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -7,16 +7,33 @@ It includes:
|
||||
- Task routing to separate queues (default, long_running, scheduled)
|
||||
- Celery Beat schedule for periodic tasks
|
||||
- Task retry policies
|
||||
- Sentry integration for error tracking
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import sentry_sdk
|
||||
from celery import Celery
|
||||
from celery.schedules import crontab
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
|
||||
# Redis URL from environment or default
|
||||
REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
||||
|
||||
# =============================================================================
|
||||
# SENTRY INITIALIZATION FOR CELERY WORKERS
|
||||
# =============================================================================
|
||||
# Celery workers run in separate processes, so Sentry must be initialized here too
|
||||
SENTRY_DSN = os.getenv("SENTRY_DSN")
|
||||
if SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
environment=os.getenv("SENTRY_ENVIRONMENT", "development"),
|
||||
traces_sample_rate=float(os.getenv("SENTRY_TRACES_SAMPLE_RATE", "0.1")),
|
||||
integrations=[CeleryIntegration()],
|
||||
send_default_pii=True,
|
||||
)
|
||||
|
||||
# Create Celery application
|
||||
celery_app = Celery(
|
||||
"wizamart",
|
||||
|
||||
@@ -187,6 +187,28 @@ class Settings(BaseSettings):
|
||||
flower_url: str = "http://localhost:5555"
|
||||
flower_password: str = "changeme" # CHANGE IN PRODUCTION!
|
||||
|
||||
# =============================================================================
|
||||
# SENTRY ERROR TRACKING
|
||||
# =============================================================================
|
||||
sentry_dsn: str | None = None # Set to enable Sentry
|
||||
sentry_environment: str = "development" # development, staging, production
|
||||
sentry_traces_sample_rate: float = 0.1 # 10% of transactions for performance monitoring
|
||||
|
||||
# =============================================================================
|
||||
# CLOUDFLARE R2 STORAGE
|
||||
# =============================================================================
|
||||
storage_backend: str = "local" # "local" or "r2"
|
||||
r2_account_id: str | None = None
|
||||
r2_access_key_id: str | None = None
|
||||
r2_secret_access_key: str | None = None
|
||||
r2_bucket_name: str = "wizamart-media"
|
||||
r2_public_url: str | None = None # Custom domain for public access (e.g., https://media.yoursite.com)
|
||||
|
||||
# =============================================================================
|
||||
# CLOUDFLARE CDN / PROXY
|
||||
# =============================================================================
|
||||
cloudflare_enabled: bool = False # Set to True when using CloudFlare proxy
|
||||
|
||||
model_config = {"env_file": ".env"}
|
||||
|
||||
|
||||
|
||||
295
app/services/storage_service.py
Normal file
295
app/services/storage_service.py
Normal file
@@ -0,0 +1,295 @@
|
||||
# app/services/storage_service.py
|
||||
"""
|
||||
Storage abstraction service for file uploads.
|
||||
|
||||
Provides a unified interface for file storage with support for:
|
||||
- Local filesystem (default, development)
|
||||
- Cloudflare R2 (production, S3-compatible)
|
||||
|
||||
Usage:
|
||||
from app.services.storage_service import get_storage_backend
|
||||
|
||||
storage = get_storage_backend()
|
||||
url = await storage.upload("path/to/file.jpg", file_bytes, "image/jpeg")
|
||||
await storage.delete("path/to/file.jpg")
|
||||
"""
|
||||
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StorageBackend(ABC):
|
||||
"""Abstract base class for storage backends."""
|
||||
|
||||
@abstractmethod
|
||||
async def upload(self, file_path: str, content: bytes, content_type: str) -> str:
|
||||
"""
|
||||
Upload a file to storage.
|
||||
|
||||
Args:
|
||||
file_path: Relative path where file should be stored
|
||||
content: File content as bytes
|
||||
content_type: MIME type of the file
|
||||
|
||||
Returns:
|
||||
Public URL to access the file
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def delete(self, file_path: str) -> bool:
|
||||
"""
|
||||
Delete a file from storage.
|
||||
|
||||
Args:
|
||||
file_path: Relative path of file to delete
|
||||
|
||||
Returns:
|
||||
True if file was deleted, False if not found
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_url(self, file_path: str) -> str:
|
||||
"""
|
||||
Get the public URL for a file.
|
||||
|
||||
Args:
|
||||
file_path: Relative path of the file
|
||||
|
||||
Returns:
|
||||
Public URL to access the file
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def exists(self, file_path: str) -> bool:
|
||||
"""
|
||||
Check if a file exists in storage.
|
||||
|
||||
Args:
|
||||
file_path: Relative path of the file
|
||||
|
||||
Returns:
|
||||
True if file exists
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class LocalStorageBackend(StorageBackend):
|
||||
"""Local filesystem storage backend."""
|
||||
|
||||
def __init__(self, base_dir: str = "uploads"):
|
||||
"""
|
||||
Initialize local storage backend.
|
||||
|
||||
Args:
|
||||
base_dir: Base directory for file storage (relative to project root)
|
||||
"""
|
||||
self.base_dir = Path(base_dir)
|
||||
self.base_dir.mkdir(parents=True, exist_ok=True)
|
||||
logger.info(f"LocalStorageBackend initialized with base_dir: {self.base_dir}")
|
||||
|
||||
async def upload(self, file_path: str, content: bytes, content_type: str) -> str:
|
||||
"""Upload file to local filesystem."""
|
||||
full_path = self.base_dir / file_path
|
||||
|
||||
# Ensure parent directory exists
|
||||
full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write file
|
||||
full_path.write_bytes(content)
|
||||
|
||||
logger.debug(f"Uploaded to local: {file_path} ({len(content)} bytes)")
|
||||
|
||||
return self.get_url(file_path)
|
||||
|
||||
async def delete(self, file_path: str) -> bool:
|
||||
"""Delete file from local filesystem."""
|
||||
full_path = self.base_dir / file_path
|
||||
|
||||
if full_path.exists():
|
||||
full_path.unlink()
|
||||
logger.debug(f"Deleted from local: {file_path}")
|
||||
|
||||
# Clean up empty parent directories
|
||||
self._cleanup_empty_dirs(full_path.parent)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get_url(self, file_path: str) -> str:
|
||||
"""Get URL for local file (served via /uploads mount)."""
|
||||
return f"/uploads/{file_path}"
|
||||
|
||||
async def exists(self, file_path: str) -> bool:
|
||||
"""Check if file exists locally."""
|
||||
return (self.base_dir / file_path).exists()
|
||||
|
||||
def _cleanup_empty_dirs(self, dir_path: Path) -> None:
|
||||
"""Remove empty directories up to base_dir."""
|
||||
try:
|
||||
while dir_path != self.base_dir and dir_path.exists():
|
||||
if not any(dir_path.iterdir()):
|
||||
dir_path.rmdir()
|
||||
dir_path = dir_path.parent
|
||||
else:
|
||||
break
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
class R2StorageBackend(StorageBackend):
|
||||
"""Cloudflare R2 storage backend (S3-compatible)."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize R2 storage backend."""
|
||||
import boto3
|
||||
from botocore.config import Config
|
||||
|
||||
if not all([
|
||||
settings.r2_account_id,
|
||||
settings.r2_access_key_id,
|
||||
settings.r2_secret_access_key,
|
||||
]):
|
||||
raise ValueError(
|
||||
"R2 storage requires R2_ACCOUNT_ID, R2_ACCESS_KEY_ID, "
|
||||
"and R2_SECRET_ACCESS_KEY environment variables"
|
||||
)
|
||||
|
||||
# R2 endpoint URL
|
||||
endpoint_url = f"https://{settings.r2_account_id}.r2.cloudflarestorage.com"
|
||||
|
||||
# Configure boto3 client for R2
|
||||
self.client = boto3.client(
|
||||
"s3",
|
||||
endpoint_url=endpoint_url,
|
||||
aws_access_key_id=settings.r2_access_key_id,
|
||||
aws_secret_access_key=settings.r2_secret_access_key,
|
||||
config=Config(
|
||||
signature_version="s3v4",
|
||||
retries={"max_attempts": 3, "mode": "adaptive"},
|
||||
),
|
||||
)
|
||||
|
||||
self.bucket_name = settings.r2_bucket_name
|
||||
self.public_url = settings.r2_public_url
|
||||
|
||||
logger.info(
|
||||
f"R2StorageBackend initialized: bucket={self.bucket_name}, "
|
||||
f"public_url={self.public_url or 'default'}"
|
||||
)
|
||||
|
||||
async def upload(self, file_path: str, content: bytes, content_type: str) -> str:
|
||||
"""Upload file to R2."""
|
||||
try:
|
||||
self.client.put_object(
|
||||
Bucket=self.bucket_name,
|
||||
Key=file_path,
|
||||
Body=content,
|
||||
ContentType=content_type,
|
||||
)
|
||||
|
||||
logger.debug(f"Uploaded to R2: {file_path} ({len(content)} bytes)")
|
||||
|
||||
return self.get_url(file_path)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"R2 upload failed for {file_path}: {e}")
|
||||
raise
|
||||
|
||||
async def delete(self, file_path: str) -> bool:
|
||||
"""Delete file from R2."""
|
||||
try:
|
||||
# Check if file exists first
|
||||
if not await self.exists(file_path):
|
||||
return False
|
||||
|
||||
self.client.delete_object(
|
||||
Bucket=self.bucket_name,
|
||||
Key=file_path,
|
||||
)
|
||||
|
||||
logger.debug(f"Deleted from R2: {file_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"R2 delete failed for {file_path}: {e}")
|
||||
return False
|
||||
|
||||
def get_url(self, file_path: str) -> str:
|
||||
"""Get public URL for R2 file."""
|
||||
if self.public_url:
|
||||
# Use custom domain
|
||||
return f"{self.public_url.rstrip('/')}/{file_path}"
|
||||
else:
|
||||
# Use default R2 public URL pattern
|
||||
# Note: Bucket must have public access enabled
|
||||
return f"https://{self.bucket_name}.{settings.r2_account_id}.r2.dev/{file_path}"
|
||||
|
||||
async def exists(self, file_path: str) -> bool:
|
||||
"""Check if file exists in R2."""
|
||||
try:
|
||||
self.client.head_object(Bucket=self.bucket_name, Key=file_path)
|
||||
return True
|
||||
except self.client.exceptions.ClientError as e:
|
||||
if e.response.get("Error", {}).get("Code") == "404":
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# STORAGE BACKEND FACTORY
|
||||
# =============================================================================
|
||||
|
||||
_storage_backend: StorageBackend | None = None
|
||||
|
||||
|
||||
def get_storage_backend() -> StorageBackend:
|
||||
"""
|
||||
Get the configured storage backend instance.
|
||||
|
||||
Returns:
|
||||
Storage backend based on STORAGE_BACKEND setting
|
||||
|
||||
Raises:
|
||||
ValueError: If storage backend is misconfigured
|
||||
"""
|
||||
global _storage_backend
|
||||
|
||||
if _storage_backend is not None:
|
||||
return _storage_backend
|
||||
|
||||
backend_type = settings.storage_backend.lower()
|
||||
|
||||
if backend_type == "r2":
|
||||
_storage_backend = R2StorageBackend()
|
||||
elif backend_type == "local":
|
||||
_storage_backend = LocalStorageBackend()
|
||||
else:
|
||||
raise ValueError(f"Unknown storage backend: {backend_type}")
|
||||
|
||||
return _storage_backend
|
||||
|
||||
|
||||
def reset_storage_backend() -> None:
|
||||
"""Reset the storage backend (useful for testing)."""
|
||||
global _storage_backend
|
||||
_storage_backend = None
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# PUBLIC API
|
||||
# =============================================================================
|
||||
__all__ = [
|
||||
"StorageBackend",
|
||||
"LocalStorageBackend",
|
||||
"R2StorageBackend",
|
||||
"get_storage_backend",
|
||||
"reset_storage_backend",
|
||||
]
|
||||
Reference in New Issue
Block a user