refactor: migrate templates and static files to self-contained modules

Templates Migration:
- Migrate admin templates to modules (tenancy, billing, monitoring, marketplace, etc.)
- Migrate vendor templates to modules (tenancy, billing, orders, messaging, etc.)
- Migrate storefront templates to modules (catalog, customers, orders, cart, checkout, cms)
- Migrate public templates to modules (billing, marketplace, cms)
- Keep shared templates in app/templates/ (base.html, errors/, partials/, macros/)
- Migrate letzshop partials to marketplace module

Static Files Migration:
- Migrate admin JS to modules: tenancy (23 files), core (5 files), monitoring (1 file)
- Migrate vendor JS to modules: tenancy (4 files), core (2 files)
- Migrate shared JS: vendor-selector.js to core, media-picker.js to cms
- Migrate storefront JS: storefront-layout.js to core
- Keep framework JS in static/ (api-client, utils, money, icons, log-config, lib/)
- Update all template references to use module_static paths

Naming Consistency:
- Rename static/platform/ to static/public/
- Rename app/templates/platform/ to app/templates/public/
- Update all extends and static references

Documentation:
- Update module-system.md with shared templates documentation
- Update frontend-structure.md with new module JS organization

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-02-01 14:34:16 +01:00
parent 843703258f
commit 4e28d91a78
542 changed files with 11603 additions and 9037 deletions

View File

@@ -1,20 +1,21 @@
# app/tasks/dispatcher.py
"""
Task dispatcher with feature flag for gradual Celery migration.
Task dispatcher for Celery background tasks.
This module provides a unified interface for dispatching background tasks.
Based on the USE_CELERY setting, tasks are either:
- Sent to Celery for persistent, reliable execution
- Run via FastAPI BackgroundTasks (fire-and-forget)
This module provides a unified interface for dispatching background tasks
to Celery workers. All tasks are dispatched to their canonical locations
in the respective modules.
This allows for gradual rollout and instant rollback.
Module task locations:
- Marketplace: app.modules.marketplace.tasks
- Billing: app.modules.billing.tasks
- Dev-Tools: app.modules.dev_tools.tasks
- Monitoring: app.modules.monitoring.tasks
"""
import logging
from typing import Any
from fastapi import BackgroundTasks
from app.core.config import settings
logger = logging.getLogger(__name__)
@@ -22,14 +23,13 @@ logger = logging.getLogger(__name__)
class TaskDispatcher:
"""
Dispatches tasks to either Celery or FastAPI BackgroundTasks.
Dispatches tasks to Celery workers.
Usage:
from app.tasks.dispatcher import task_dispatcher
# In an API endpoint:
task_id = task_dispatcher.dispatch_marketplace_import(
background_tasks=background_tasks,
job_id=job.id,
url=url,
marketplace=marketplace,
@@ -42,21 +42,27 @@ class TaskDispatcher:
"""Check if Celery is enabled."""
return settings.use_celery
def _require_celery(self, task_name: str) -> None:
"""Raise error if Celery is not enabled."""
if not self.use_celery:
raise RuntimeError(
f"Celery is required for {task_name}. "
f"Set USE_CELERY=true in environment."
)
def dispatch_marketplace_import(
self,
background_tasks: BackgroundTasks,
job_id: int,
url: str,
marketplace: str,
vendor_id: int,
batch_size: int = 1000,
language: str = "en",
) -> str | None:
) -> str:
"""
Dispatch marketplace import task.
Args:
background_tasks: FastAPI BackgroundTasks instance
job_id: ID of the MarketplaceImportJob record
url: URL to the CSV file
marketplace: Name of the marketplace
@@ -65,151 +71,100 @@ class TaskDispatcher:
language: Language code for translations
Returns:
str | None: Celery task ID if using Celery, None otherwise
str: Celery task ID
"""
if self.use_celery:
from app.tasks.celery_tasks.marketplace import process_marketplace_import
self._require_celery("marketplace import")
from app.modules.marketplace.tasks import process_marketplace_import
task = process_marketplace_import.delay(
job_id=job_id,
url=url,
marketplace=marketplace,
vendor_id=vendor_id,
batch_size=batch_size,
language=language,
)
logger.info(f"Dispatched marketplace import to Celery: task_id={task.id}")
return task.id
else:
from app.tasks.background_tasks import process_marketplace_import
background_tasks.add_task(
process_marketplace_import,
job_id=job_id,
url=url,
marketplace=marketplace,
vendor_id=vendor_id,
batch_size=batch_size,
language=language,
)
logger.info("Dispatched marketplace import to BackgroundTasks")
return None
task = process_marketplace_import.delay(
job_id=job_id,
url=url,
marketplace=marketplace,
vendor_id=vendor_id,
batch_size=batch_size,
language=language,
)
logger.info(f"Dispatched marketplace import to Celery: task_id={task.id}")
return task.id
def dispatch_historical_import(
self,
background_tasks: BackgroundTasks,
job_id: int,
vendor_id: int,
) -> str | None:
) -> str:
"""
Dispatch Letzshop historical import task.
Args:
background_tasks: FastAPI BackgroundTasks instance
job_id: ID of the LetzshopHistoricalImportJob record
vendor_id: ID of the vendor
Returns:
str | None: Celery task ID if using Celery, None otherwise
str: Celery task ID
"""
if self.use_celery:
from app.tasks.celery_tasks.letzshop import process_historical_import
self._require_celery("historical import")
from app.modules.marketplace.tasks import process_historical_import
task = process_historical_import.delay(job_id=job_id, vendor_id=vendor_id)
logger.info(f"Dispatched historical import to Celery: task_id={task.id}")
return task.id
else:
from app.tasks.letzshop_tasks import process_historical_import
background_tasks.add_task(
process_historical_import,
job_id=job_id,
vendor_id=vendor_id,
)
logger.info("Dispatched historical import to BackgroundTasks")
return None
task = process_historical_import.delay(job_id=job_id, vendor_id=vendor_id)
logger.info(f"Dispatched historical import to Celery: task_id={task.id}")
return task.id
def dispatch_code_quality_scan(
self,
background_tasks: BackgroundTasks,
scan_id: int,
) -> str | None:
) -> str:
"""
Dispatch code quality scan task.
Args:
background_tasks: FastAPI BackgroundTasks instance
scan_id: ID of the ArchitectureScan record
Returns:
str | None: Celery task ID if using Celery, None otherwise
str: Celery task ID
"""
if self.use_celery:
from app.tasks.celery_tasks.code_quality import execute_code_quality_scan
self._require_celery("code quality scan")
from app.modules.dev_tools.tasks import execute_code_quality_scan
task = execute_code_quality_scan.delay(scan_id=scan_id)
logger.info(f"Dispatched code quality scan to Celery: task_id={task.id}")
return task.id
else:
from app.tasks.code_quality_tasks import execute_code_quality_scan
background_tasks.add_task(execute_code_quality_scan, scan_id=scan_id)
logger.info("Dispatched code quality scan to BackgroundTasks")
return None
task = execute_code_quality_scan.delay(scan_id=scan_id)
logger.info(f"Dispatched code quality scan to Celery: task_id={task.id}")
return task.id
def dispatch_test_run(
self,
background_tasks: BackgroundTasks,
run_id: int,
test_path: str = "tests",
extra_args: list[str] | None = None,
) -> str | None:
) -> str:
"""
Dispatch test run task.
Args:
background_tasks: FastAPI BackgroundTasks instance
run_id: ID of the TestRun record
test_path: Path to tests
extra_args: Additional pytest arguments
Returns:
str | None: Celery task ID if using Celery, None otherwise
str: Celery task ID
"""
if self.use_celery:
from app.tasks.celery_tasks.test_runner import execute_test_run
self._require_celery("test run")
from app.modules.dev_tools.tasks import execute_test_run
task = execute_test_run.delay(
run_id=run_id,
test_path=test_path,
extra_args=extra_args,
)
logger.info(f"Dispatched test run to Celery: task_id={task.id}")
return task.id
else:
from app.tasks.test_runner_tasks import execute_test_run
background_tasks.add_task(
execute_test_run,
run_id=run_id,
test_path=test_path,
extra_args=extra_args,
)
logger.info("Dispatched test run to BackgroundTasks")
return None
task = execute_test_run.delay(
run_id=run_id,
test_path=test_path,
extra_args=extra_args,
)
logger.info(f"Dispatched test run to Celery: task_id={task.id}")
return task.id
def dispatch_product_export(
self,
vendor_id: int,
triggered_by: str,
include_inactive: bool = False,
) -> str | None:
) -> str:
"""
Dispatch product export task (Celery only).
This task is only available via Celery as it's designed for
asynchronous batch exports. For synchronous exports, use
the export service directly.
Dispatch product export task.
Args:
vendor_id: ID of the vendor to export
@@ -217,24 +172,32 @@ class TaskDispatcher:
include_inactive: Whether to include inactive products
Returns:
str | None: Celery task ID if using Celery, None otherwise
str: Celery task ID
"""
if self.use_celery:
from app.tasks.celery_tasks.export import export_vendor_products_to_folder
self._require_celery("product export")
from app.modules.marketplace.tasks import export_vendor_products_to_folder
task = export_vendor_products_to_folder.delay(
vendor_id=vendor_id,
triggered_by=triggered_by,
include_inactive=include_inactive,
)
logger.info(f"Dispatched product export to Celery: task_id={task.id}")
return task.id
else:
logger.warning(
"Product export task requires Celery. "
"Use letzshop_export_service directly for synchronous export."
)
return None
task = export_vendor_products_to_folder.delay(
vendor_id=vendor_id,
triggered_by=triggered_by,
include_inactive=include_inactive,
)
logger.info(f"Dispatched product export to Celery: task_id={task.id}")
return task.id
def dispatch_capacity_snapshot(self) -> str:
"""
Dispatch capacity snapshot capture task.
Returns:
str: Celery task ID
"""
self._require_celery("capacity snapshot")
from app.modules.monitoring.tasks import capture_capacity_snapshot
task = capture_capacity_snapshot.delay()
logger.info(f"Dispatched capacity snapshot to Celery: task_id={task.id}")
return task.id
def get_task_status(self, task_id: str) -> dict[str, Any]:
"""