Skip to content

Instantly share code, notes, and snippets.

@yorickdewid
Created March 1, 2025 18:40
Show Gist options
  • Save yorickdewid/a143cd97a7d48122c40f6dcd1d96e587 to your computer and use it in GitHub Desktop.
Save yorickdewid/a143cd97a7d48122c40f6dcd1d96e587 to your computer and use it in GitHub Desktop.
PostgreSQL backup and upload script
#!/bin/bash
set -e # Exit immediately if a command exits with a non-zero status
# Log function for consistent output
log() {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1"
}
# Configuration
DB_HOST=""
DB_PORT=
DB_NAME=""
DB_USER=""
BACKUP_DIR="pg_backup"
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
BACKUP_FILE="${BACKUP_DIR}/${DB_NAME}_${TIMESTAMP}.bak"
RETENTION_DAYS=7 # Keep local backups for this many days
S3_BUCKET=""
S3_PREFIX="db-backup/${DB_NAME}"
LOG_FILE="${BACKUP_DIR}/backup_${TIMESTAMP}.log"
# Create backup directory if it doesn't exist
mkdir -p "${BACKUP_DIR}"
# Start logging
exec > >(tee -a "${LOG_FILE}") 2>&1
log "Starting backup of database ${DB_NAME} with user ${DB_USER}"
# Check if pg_dump is available
if ! command -v pg_dump &> /dev/null; then
log "Error: pg_dump command not found. Please install PostgreSQL client tools."
exit 1
fi
# Check if s3cmd is available
if ! command -v s3cmd &> /dev/null; then
log "Error: s3cmd command not found. Please install s3cmd."
exit 1
fi
# Get password from environment or prompt if not set
if [ -z "${PGPASSWORD}" ]; then
log "PGPASSWORD environment variable not set. Using ~/.pgpass or will prompt for password."
fi
# Create the backup with error handling
log "Creating backup file: ${BACKUP_FILE}"
pg_dump \
--host="${DB_HOST}" \
--port="${DB_PORT}" \
--username="${DB_USER}" \
--format=custom \
--compress=9 \
--file="${BACKUP_FILE}" \
--exclude-table=public \
"${DB_NAME}"
# Check if backup was successful
if [ $? -eq 0 ]; then
BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1)
log "Database backup completed successfully: ${BACKUP_FILE} (Size: ${BACKUP_SIZE})"
else
log "Error creating backup"
exit 1
fi
# Upload to S3
log "Uploading backup to S3: s3://${S3_BUCKET}/${S3_PREFIX}/"
s3cmd put "${BACKUP_FILE}" "s3://${S3_BUCKET}/${S3_PREFIX}/${DB_NAME}_${TIMESTAMP}.bak" --acl-private
# Check if the upload was successful
if [ $? -eq 0 ]; then
log "Backup uploaded to S3: s3://${S3_BUCKET}/${S3_PREFIX}/${DB_NAME}_${TIMESTAMP}.bak"
else
log "S3 upload failed!"
exit 1
fi
# Clean up old local backups
if [ "${RETENTION_DAYS}" -gt 0 ]; then
log "Cleaning up local backups older than ${RETENTION_DAYS} days"
find "${BACKUP_DIR}" -name "${DB_NAME}_*.bak" -type f -mtime +${RETENTION_DAYS} -delete
find "${BACKUP_DIR}" -name "backup_*.log" -type f -mtime +${RETENTION_DAYS} -delete
fi
# Clean up current backup file
log "Removing local backup file: ${BACKUP_FILE}"
rm -f "${BACKUP_FILE}"
log "Backup process completed successfully"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment