#!/usr/bin/env bash # scripts/backup.sh — Automated PostgreSQL backup for Orion and Gitea # # Usage: # bash scripts/backup.sh # Local backup only # bash scripts/backup.sh --upload # Local backup + sync to Cloudflare R2 # # Cron / systemd timer: runs daily at 03:00 # On Sundays: copies daily backup to weekly/ # Retention: 7 daily, 4 weekly set -euo pipefail # ============================================================================= # Configuration # ============================================================================= BACKUP_ROOT="${HOME}/backups" TIMESTAMP=$(date +%Y%m%d_%H%M%S) DAY_OF_WEEK=$(date +%u) # 1=Monday, 7=Sunday # Orion DB settings (from docker-compose.yml) ORION_CONTAINER="orion-db-1" ORION_DB="orion_db" ORION_USER="orion_user" # Gitea DB settings (from ~/gitea/docker-compose.yml) GITEA_CONTAINER="gitea-db" GITEA_DB="gitea" GITEA_USER="gitea" # R2 settings (loaded from .env if available) ORION_APP_DIR="${HOME}/apps/orion" if [ -f "${ORION_APP_DIR}/.env" ]; then R2_ACCOUNT_ID=$(grep -s '^R2_ACCOUNT_ID=' "${ORION_APP_DIR}/.env" | cut -d= -f2- || true) R2_BACKUP_BUCKET=$(grep -s '^R2_BACKUP_BUCKET=' "${ORION_APP_DIR}/.env" | cut -d= -f2- || true) fi R2_BACKUP_BUCKET="${R2_BACKUP_BUCKET:-orion-backups}" R2_ENDPOINT="https://${R2_ACCOUNT_ID}.r2.cloudflarestorage.com" # Retention DAILY_KEEP=7 WEEKLY_KEEP=4 # ============================================================================= # Functions # ============================================================================= log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" } backup_database() { local container="$1" local db_name="$2" local db_user="$3" local target_dir="$4" local filename="$5" mkdir -p "${target_dir}" log "Backing up ${db_name} from ${container}..." if docker exec "${container}" pg_dump -U "${db_user}" "${db_name}" | gzip > "${target_dir}/${filename}"; then local size size=$(du -h "${target_dir}/${filename}" | cut -f1) log " OK: ${filename} (${size})" else log " FAILED: ${db_name} backup" return 1 fi } rotate_backups() { local dir="$1" local keep_days="$2" if [ -d "${dir}" ]; then local count count=$(find "${dir}" -name "*.sql.gz" -mtime +"${keep_days}" 2>/dev/null | wc -l) if [ "${count}" -gt 0 ]; then find "${dir}" -name "*.sql.gz" -mtime +"${keep_days}" -delete log " Rotated: removed ${count} old backups from ${dir}" fi fi } upload_to_r2() { if [ -z "${R2_ACCOUNT_ID:-}" ]; then log "ERROR: R2_ACCOUNT_ID not set. Cannot upload." return 1 fi log "Syncing backups to R2 bucket: ${R2_BACKUP_BUCKET}..." aws s3 sync "${BACKUP_ROOT}/" "s3://${R2_BACKUP_BUCKET}/" \ --endpoint-url "${R2_ENDPOINT}" \ --profile r2 \ --delete \ --exclude "*.tmp" log " OK: R2 sync complete" } # ============================================================================= # Main # ============================================================================= UPLOAD=false if [ "${1:-}" = "--upload" ]; then UPLOAD=true fi log "=== Orion Backup Started ===" # Ensure backup directories exist mkdir -p "${BACKUP_ROOT}/orion/"{daily,weekly} mkdir -p "${BACKUP_ROOT}/gitea/"{daily,weekly} # --- Daily backups --- ERRORS=0 backup_database "${ORION_CONTAINER}" "${ORION_DB}" "${ORION_USER}" \ "${BACKUP_ROOT}/orion/daily" "orion_${TIMESTAMP}.sql.gz" || ERRORS=$((ERRORS + 1)) backup_database "${GITEA_CONTAINER}" "${GITEA_DB}" "${GITEA_USER}" \ "${BACKUP_ROOT}/gitea/daily" "gitea_${TIMESTAMP}.sql.gz" || ERRORS=$((ERRORS + 1)) # --- Weekly copies (Sunday) --- if [ "${DAY_OF_WEEK}" -eq 7 ]; then log "Sunday: copying to weekly/" cp -f "${BACKUP_ROOT}/orion/daily/orion_${TIMESTAMP}.sql.gz" \ "${BACKUP_ROOT}/orion/weekly/" 2>/dev/null || true cp -f "${BACKUP_ROOT}/gitea/daily/gitea_${TIMESTAMP}.sql.gz" \ "${BACKUP_ROOT}/gitea/weekly/" 2>/dev/null || true fi # --- Rotation --- log "Rotating old backups..." rotate_backups "${BACKUP_ROOT}/orion/daily" "${DAILY_KEEP}" rotate_backups "${BACKUP_ROOT}/gitea/daily" "${DAILY_KEEP}" rotate_backups "${BACKUP_ROOT}/orion/weekly" $((WEEKLY_KEEP * 7)) rotate_backups "${BACKUP_ROOT}/gitea/weekly" $((WEEKLY_KEEP * 7)) # --- Optional R2 upload --- if [ "${UPLOAD}" = true ]; then upload_to_r2 || ERRORS=$((ERRORS + 1)) fi # --- Summary --- if [ "${ERRORS}" -eq 0 ]; then log "=== Backup completed successfully ===" else log "=== Backup completed with ${ERRORS} error(s) ===" exit 1 fi