From 9f9644a9aee27d2da01469b4aa19645a6cbeb827 Mon Sep 17 00:00:00 2001 From: Maor Rozenfeld <49363375+maor-rozenfeld@users.noreply.github.com> Date: Wed, 7 Jan 2026 21:15:57 +0100 Subject: [PATCH] Add automated PostgreSQL backup service - Add postgres-backup service to docker-compose.yml - Configure daily backups with 7-day retention by default - Include backup script for automated database dumps - Update README with backup configuration and restore instructions - Add configurable backup schedule and retention via env vars Fixes CI-135. --- deploy/docker-compose/.env.defaults | 4 + deploy/docker-compose/README.md | 56 ++++++++++++ deploy/docker-compose/docker-compose.yml | 19 ++++ deploy/docker-compose/postgres-backup.sh | 110 +++++++++++++++++++++++ 4 files changed, 189 insertions(+) create mode 100755 deploy/docker-compose/postgres-backup.sh diff --git a/deploy/docker-compose/.env.defaults b/deploy/docker-compose/.env.defaults index 41646537c5..83009c7bef 100644 --- a/deploy/docker-compose/.env.defaults +++ b/deploy/docker-compose/.env.defaults @@ -68,6 +68,10 @@ OPS_POSTGRES_PORT=5432 OPS_POSTGRES_USERNAME=postgres OPS_OPENOPS_TABLES_DB_HOST=postgres +# Database Backups +BACKUP_RETENTION_DAYS=7 +BACKUP_SCHEDULE=0 2 * * * + # --------------------------------------------------------- # Tables # --------------------------------------------------------- diff --git a/deploy/docker-compose/README.md b/deploy/docker-compose/README.md index ee1898ac42..e811c73fa2 100644 --- a/deploy/docker-compose/README.md +++ b/deploy/docker-compose/README.md @@ -28,3 +28,59 @@ However, it is possible to share your local session with the platform for local To do this, you need to set two environment variables: - `OPS_ENABLE_HOST_SESSION=true`: enables sharing of the host session with the platform container. - `HOST_CLOUDSDK_CONFIG=/root/.config/gcloud`: defines the path to the host machine's Google Cloud configuration folder that will be shared with the platform container + + +# Database Backups + +PostgreSQL backups are automatically performed by the `postgres-backup` service according to best practices: + +## Backup Configuration + +- **Schedule**: Backups run daily at 2 AM by default (configurable via `BACKUP_SCHEDULE` env var using cron syntax) +- **Retention**: Backups are kept for 7 days by default (configurable via `BACKUP_RETENTION_DAYS` env var) +- **Initial Backup**: A backup is performed immediately when the service starts +- **Location**: Backups are stored in the `postgres_backups` Docker volume at `/backups` + +## Backup Types + +The service performs two types of backups: +1. **Database backup**: Individual database (`backup_YYYYMMDD_HHMMSS.sql.gz`) +2. **Full cluster backup**: All databases including analytics (`backup_all_YYYYMMDD_HHMMSS.sql.gz`) + +## Customizing Backup Schedule + +Edit your `.env` file to customize backup behavior: + +```bash +# Run backups every 6 hours +BACKUP_SCHEDULE=0 */6 * * * + +# Keep backups for 30 days +BACKUP_RETENTION_DAYS=30 +``` + +## Restoring from Backup + +To restore a backup: + +```bash +# List available backups +docker exec -it ls -lh /backups + +# Restore a specific backup +docker exec -i \ + psql -U postgres -d openops < backup_20260107_020000.sql + +# Or for compressed backups +docker exec -i \ + bash -c "gunzip -c /backups/backup_20260107_020000.sql.gz | psql -U postgres -d openops" +``` + +## Accessing Backups + +Backups can be copied from the container to your host: + +```bash +docker cp :/backups ./postgres-backups +``` + diff --git a/deploy/docker-compose/docker-compose.yml b/deploy/docker-compose/docker-compose.yml index f2c2701475..24223e203e 100644 --- a/deploy/docker-compose/docker-compose.yml +++ b/deploy/docker-compose/docker-compose.yml @@ -141,9 +141,28 @@ services: interval: 5s timeout: 3s retries: 3 + postgres-backup: + image: 'postgres:14.4' + restart: unless-stopped + environment: + POSTGRES_USER: ${OPS_POSTGRES_USERNAME} + POSTGRES_PASSWORD: ${OPS_POSTGRES_PASSWORD} + POSTGRES_DB: ${OPS_POSTGRES_DATABASE} + POSTGRES_HOST: postgres + POSTGRES_PORT: 5432 + BACKUP_RETENTION_DAYS: ${BACKUP_RETENTION_DAYS:-7} + BACKUP_SCHEDULE: ${BACKUP_SCHEDULE:-0 2 * * *} + volumes: + - postgres_backups:/backups + - ./postgres-backup.sh:/usr/local/bin/backup.sh:ro + entrypoint: ['/bin/bash', '/usr/local/bin/backup.sh'] + depends_on: + postgres: + condition: service_healthy volumes: openops_azure_cli_data: openops_gcloud_cli_data: openops_tables_data: postgres_data: + postgres_backups: redis_data: diff --git a/deploy/docker-compose/postgres-backup.sh b/deploy/docker-compose/postgres-backup.sh new file mode 100755 index 0000000000..bc9caa7570 --- /dev/null +++ b/deploy/docker-compose/postgres-backup.sh @@ -0,0 +1,110 @@ +#!/bin/bash +set -e + +BACKUP_DIR="/backups" +RETENTION_DAYS="${BACKUP_RETENTION_DAYS:-7}" +SCHEDULE="${BACKUP_SCHEDULE:-0 2 * * *}" + +echo "PostgreSQL Backup Service Started" +echo "Backup directory: $BACKUP_DIR" +echo "Retention period: $RETENTION_DAYS days" +echo "Backup schedule: $SCHEDULE" + +# Install cronie for scheduled backups +apt-get update -qq && apt-get install -y -qq cron > /dev/null 2>&1 + +perform_backup() { + local timestamp=$(date +%Y%m%d_%H%M%S) + local backup_file="${BACKUP_DIR}/backup_${timestamp}.sql.gz" + + echo "[$(date '+%Y-%m-%d %H:%M:%S')] Starting backup to $backup_file" + + # Perform the backup + PGPASSWORD="$POSTGRES_PASSWORD" pg_dump \ + -h "$POSTGRES_HOST" \ + -p "$POSTGRES_PORT" \ + -U "$POSTGRES_USER" \ + -d "$POSTGRES_DB" \ + --verbose \ + 2>&1 | gzip > "$backup_file" + + if [ ${PIPESTATUS[0]} -eq 0 ]; then + local size=$(du -h "$backup_file" | cut -f1) + echo "[$(date '+%Y-%m-%d %H:%M:%S')] Backup completed successfully: $backup_file ($size)" + + # Cleanup old backups + echo "[$(date '+%Y-%m-%d %H:%M:%S')] Cleaning up backups older than $RETENTION_DAYS days" + find "$BACKUP_DIR" -name "backup_*.sql.gz" -type f -mtime +$RETENTION_DAYS -delete + + local remaining=$(find "$BACKUP_DIR" -name "backup_*.sql.gz" -type f | wc -l) + echo "[$(date '+%Y-%m-%d %H:%M:%S')] Retained backups: $remaining" + else + echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Backup failed" >&2 + rm -f "$backup_file" + return 1 + fi +} + +# Backup on all databases (including analytics) +backup_all_databases() { + local timestamp=$(date +%Y%m%d_%H%M%S) + local backup_file="${BACKUP_DIR}/backup_all_${timestamp}.sql.gz" + + echo "[$(date '+%Y-%m-%d %H:%M:%S')] Starting full cluster backup to $backup_file" + + PGPASSWORD="$POSTGRES_PASSWORD" pg_dumpall \ + -h "$POSTGRES_HOST" \ + -p "$POSTGRES_PORT" \ + -U "$POSTGRES_USER" \ + --verbose \ + 2>&1 | gzip > "$backup_file" + + if [ ${PIPESTATUS[0]} -eq 0 ]; then + local size=$(du -h "$backup_file" | cut -f1) + echo "[$(date '+%Y-%m-%d %H:%M:%S')] Full backup completed successfully: $backup_file ($size)" + + # Cleanup old full backups + find "$BACKUP_DIR" -name "backup_all_*.sql.gz" -type f -mtime +$RETENTION_DAYS -delete + else + echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Full backup failed" >&2 + rm -f "$backup_file" + return 1 + fi +} + +# Wait for PostgreSQL to be ready +echo "Waiting for PostgreSQL to be ready..." +until PGPASSWORD="$POSTGRES_PASSWORD" pg_isready -h "$POSTGRES_HOST" -p "$POSTGRES_PORT" -U "$POSTGRES_USER"; do + sleep 2 +done +echo "PostgreSQL is ready" + +# Create backup directory if it doesn't exist +mkdir -p "$BACKUP_DIR" + +# Perform initial backup +echo "Performing initial backup..." +perform_backup +backup_all_databases + +# Setup cron job +echo "$SCHEDULE root /usr/local/bin/backup-job.sh >> /var/log/backup.log 2>&1" > /etc/cron.d/postgres-backup +chmod 0644 /etc/cron.d/postgres-backup + +# Create the backup job script +cat > /usr/local/bin/backup-job.sh << 'EOF' +#!/bin/bash +source /etc/environment +perform_backup +EOF + +chmod +x /usr/local/bin/backup-job.sh + +# Export functions and variables for cron +export -f perform_backup +export BACKUP_DIR RETENTION_DAYS POSTGRES_HOST POSTGRES_PORT POSTGRES_USER POSTGRES_PASSWORD POSTGRES_DB +env | grep -E '^(BACKUP_|POSTGRES_|RETENTION_)' > /etc/environment + +# Start cron in foreground +echo "Starting cron scheduler..." +cron && tail -f /var/log/cron.log /var/log/backup.log