Some checks failed
New admin page to browse, search, compare, and inline-edit translation keys across all modules and languages from the browser. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
313 lines
9.3 KiB
Python
313 lines
9.3 KiB
Python
# app/modules/dev_tools/routes/api/admin_translations.py
|
|
"""
|
|
Translation Editor API endpoints.
|
|
|
|
Browse, search, and edit UI translation JSON files across all modules.
|
|
All endpoints require super-admin authentication.
|
|
"""
|
|
|
|
import json
|
|
import logging
|
|
import re
|
|
from pathlib import Path
|
|
from typing import Literal
|
|
|
|
from fastapi import APIRouter, Depends, Query
|
|
from fastapi.responses import JSONResponse
|
|
from pydantic import BaseModel, Field
|
|
|
|
from app.api.deps import UserContext, get_current_super_admin_api
|
|
from app.utils.i18n import (
|
|
SUPPORTED_LANGUAGES,
|
|
clear_translation_cache,
|
|
get_locales_path,
|
|
get_module_locale_dirs,
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
router = APIRouter(prefix="/translations", tags=["translations"])
|
|
|
|
# Variable placeholder pattern: {name}
|
|
VARIABLE_PATTERN = re.compile(r"\{(\w+)\}")
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Helpers
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
def _flatten_json(data: dict, prefix: str = "") -> dict[str, str]:
|
|
"""Flatten nested dict to dot-notation paths."""
|
|
result = {}
|
|
for key, value in data.items():
|
|
path = f"{prefix}.{key}" if prefix else key
|
|
if isinstance(value, dict):
|
|
result.update(_flatten_json(value, path))
|
|
else:
|
|
result[path] = value
|
|
return result
|
|
|
|
|
|
def _set_nested_value(data: dict, path: str, value: str) -> None:
|
|
"""Set a value in a nested dict using dot-notation path."""
|
|
keys = path.split(".")
|
|
current = data
|
|
for key in keys[:-1]:
|
|
if key not in current or not isinstance(current[key], dict):
|
|
current[key] = {}
|
|
current = current[key]
|
|
current[keys[-1]] = value
|
|
|
|
|
|
def _remove_nested_value(data: dict, path: str) -> None:
|
|
"""Remove a value from a nested dict using dot-notation path."""
|
|
keys = path.split(".")
|
|
current = data
|
|
for key in keys[:-1]:
|
|
if key not in current or not isinstance(current[key], dict):
|
|
return
|
|
current = current[key]
|
|
current.pop(keys[-1], None)
|
|
|
|
|
|
def _load_json(file_path: Path) -> dict:
|
|
"""Load JSON file, return empty dict if missing."""
|
|
if not file_path.exists():
|
|
return {}
|
|
try:
|
|
with open(file_path, encoding="utf-8") as f:
|
|
return json.load(f)
|
|
except (json.JSONDecodeError, OSError) as e:
|
|
logger.error(f"Error loading {file_path}: {e}")
|
|
return {}
|
|
|
|
|
|
def _save_json(file_path: Path, data: dict) -> None:
|
|
"""Write dict to JSON file with pretty formatting."""
|
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
json.dump(data, f, indent=4, ensure_ascii=False)
|
|
f.write("\n")
|
|
|
|
|
|
def _get_module_file_path(module: str, language: str) -> Path | None:
|
|
"""Get the JSON file path for a module + language, or None if not found."""
|
|
if module == "shared":
|
|
return get_locales_path() / f"{language}.json"
|
|
for mod_code, locales_dir in get_module_locale_dirs():
|
|
if mod_code == module:
|
|
return locales_dir / f"{language}.json"
|
|
return None
|
|
|
|
|
|
def _extract_variables(text: str) -> list[str]:
|
|
"""Extract {variable} placeholders from a string."""
|
|
return VARIABLE_PATTERN.findall(text)
|
|
|
|
|
|
def _load_all_keys() -> tuple[list[dict], list[dict], dict]:
|
|
"""
|
|
Load all translation keys across all modules and languages.
|
|
|
|
Returns:
|
|
(modules_list, keys_list, stats_dict)
|
|
"""
|
|
modules_info = []
|
|
all_keys: dict[tuple[str, str], dict[str, str | None]] = {}
|
|
|
|
# Global translations (static/locales/)
|
|
shared_keys_per_lang: dict[str, dict[str, str]] = {}
|
|
for lang in SUPPORTED_LANGUAGES:
|
|
file_path = get_locales_path() / f"{lang}.json"
|
|
data = _load_json(file_path)
|
|
flat = _flatten_json(data)
|
|
shared_keys_per_lang[lang] = flat
|
|
for path in flat:
|
|
key_id = ("shared", path)
|
|
if key_id not in all_keys:
|
|
all_keys[key_id] = {la: None for la in SUPPORTED_LANGUAGES}
|
|
all_keys[key_id][lang] = flat[path]
|
|
|
|
shared_all_paths = set()
|
|
for flat in shared_keys_per_lang.values():
|
|
shared_all_paths.update(flat.keys())
|
|
modules_info.append({
|
|
"code": "shared",
|
|
"label": "Shared",
|
|
"key_count": len(shared_all_paths),
|
|
})
|
|
|
|
# Module translations
|
|
for mod_code, locales_dir in get_module_locale_dirs():
|
|
mod_keys_per_lang: dict[str, dict[str, str]] = {}
|
|
for lang in SUPPORTED_LANGUAGES:
|
|
file_path = locales_dir / f"{lang}.json"
|
|
data = _load_json(file_path)
|
|
flat = _flatten_json(data)
|
|
mod_keys_per_lang[lang] = flat
|
|
for path in flat:
|
|
key_id = (mod_code, path)
|
|
if key_id not in all_keys:
|
|
all_keys[key_id] = {la: None for la in SUPPORTED_LANGUAGES}
|
|
all_keys[key_id][lang] = flat[path]
|
|
|
|
mod_all_paths = set()
|
|
for flat in mod_keys_per_lang.values():
|
|
mod_all_paths.update(flat.keys())
|
|
if mod_all_paths:
|
|
modules_info.append({
|
|
"code": mod_code,
|
|
"label": mod_code.replace("_", " ").replace("-", " ").title(),
|
|
"key_count": len(mod_all_paths),
|
|
})
|
|
|
|
# Build keys list and stats
|
|
keys_list = []
|
|
missing_counts = {lang: 0 for lang in SUPPORTED_LANGUAGES}
|
|
|
|
for (module, path), values in sorted(all_keys.items()):
|
|
# Detect variables from any non-null value
|
|
variables = []
|
|
has_variables = False
|
|
for val in values.values():
|
|
if val and isinstance(val, str):
|
|
found = _extract_variables(val)
|
|
if found:
|
|
has_variables = True
|
|
for v in found:
|
|
if v not in variables:
|
|
variables.append(v)
|
|
|
|
entry = {
|
|
"module": module,
|
|
"path": path,
|
|
"values": values,
|
|
"has_variables": has_variables,
|
|
}
|
|
if has_variables:
|
|
entry["variables"] = variables
|
|
|
|
keys_list.append(entry)
|
|
|
|
for lang in SUPPORTED_LANGUAGES:
|
|
if values[lang] is None:
|
|
missing_counts[lang] += 1
|
|
|
|
stats = {
|
|
"total_keys": len(keys_list),
|
|
"missing": missing_counts,
|
|
}
|
|
|
|
return modules_info, keys_list, stats
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Schemas
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class UpdateKeyRequest(BaseModel):
|
|
module: str = Field(..., min_length=1)
|
|
path: str = Field(..., min_length=1)
|
|
language: Literal["en", "fr", "de", "lb"]
|
|
value: str = Field(...)
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Endpoints
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@router.get("/keys")
|
|
async def get_translation_keys(
|
|
ctx: UserContext = Depends(get_current_super_admin_api),
|
|
):
|
|
"""Load all translation keys across all modules and languages."""
|
|
modules, keys, stats = _load_all_keys()
|
|
return {
|
|
"modules": modules,
|
|
"languages": SUPPORTED_LANGUAGES,
|
|
"keys": keys,
|
|
"stats": stats,
|
|
}
|
|
|
|
|
|
@router.put("/key")
|
|
async def update_translation_key(
|
|
body: UpdateKeyRequest,
|
|
ctx: UserContext = Depends(get_current_super_admin_api),
|
|
):
|
|
"""Update a single translation value."""
|
|
file_path = _get_module_file_path(body.module, body.language)
|
|
if file_path is None:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content={"detail": f"Module '{body.module}' not found"},
|
|
)
|
|
data = _load_json(file_path)
|
|
|
|
if body.value == "":
|
|
# Empty string means delete the key
|
|
_remove_nested_value(data, body.path)
|
|
else:
|
|
_set_nested_value(data, body.path, body.value)
|
|
|
|
_save_json(file_path, data)
|
|
clear_translation_cache()
|
|
|
|
# Return updated entry
|
|
values = {}
|
|
for lang in SUPPORTED_LANGUAGES:
|
|
lang_path = _get_module_file_path(body.module, lang)
|
|
if lang_path:
|
|
lang_data = _load_json(lang_path)
|
|
flat = _flatten_json(lang_data)
|
|
values[lang] = flat.get(body.path)
|
|
else:
|
|
values[lang] = None
|
|
|
|
variables = []
|
|
has_variables = False
|
|
for val in values.values():
|
|
if val and isinstance(val, str):
|
|
found = _extract_variables(val)
|
|
if found:
|
|
has_variables = True
|
|
for v in found:
|
|
if v not in variables:
|
|
variables.append(v)
|
|
|
|
entry = {
|
|
"module": body.module,
|
|
"path": body.path,
|
|
"values": values,
|
|
"has_variables": has_variables,
|
|
}
|
|
if has_variables:
|
|
entry["variables"] = variables
|
|
|
|
return entry
|
|
|
|
|
|
@router.get("/missing")
|
|
async def get_missing_translations(
|
|
module: str | None = Query(None),
|
|
ctx: UserContext = Depends(get_current_super_admin_api),
|
|
):
|
|
"""Return only keys with missing translations in at least one language."""
|
|
_, keys, stats = _load_all_keys()
|
|
|
|
missing_keys = [
|
|
k for k in keys
|
|
if any(v is None for v in k["values"].values())
|
|
]
|
|
|
|
if module:
|
|
missing_keys = [k for k in missing_keys if k["module"] == module]
|
|
|
|
return {
|
|
"keys": missing_keys,
|
|
"stats": stats,
|
|
}
|