Created
February 13, 2017 03:43
-
-
Save atmartins/f496e50f146e6ec7f2e0b1e85f136192 to your computer and use it in GitHub Desktop.
Back up a mongo db, then sync entire persistent data folder to s3. Includes instructions.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Backup Mongo Db and Sync to S3 | |
# | |
# Written by Aaron Martins | |
# [email protected] | |
# Feb 2017 | |
# | |
# Note: Current, only local mongo (passwordless) supported on port 27017. | |
# | |
# ## Requirements: | |
# docker | |
# tar | |
# mongodb | |
# | |
# ## Installation: | |
# 1. Install Docker (reminder: apt-get docker is NOT correct way to install) | |
# | |
# 2. Add these env vars to ~/.profile or ~/.bash_profile: | |
# export AWS_ACCESS_KEY_ID="" | |
# export AWS_SECRET_ACCESS_KEY="" | |
# export AWS_DEFAULT_REGION="us-east-1" | |
# | |
# ### Cron: | |
# `crontab -e` | |
# add (for every 4 hours): | |
# ``` | |
# # Loads /root/.profile into this context, which contains essential env vars. Then runs backup.sh and sends output to custom log. | |
# 0 */4 * * * . /root/.profile; /PATH/TO/backup.sh "mydatabase" "mybucket/backups" >> /home/log/backup-cron.log 2>&1 | |
# ``` | |
# | |
# ## Use: | |
# `./backup.sh "mongoDatabaseName" "s3bucket/s3folder"` | |
# | |
if [ $# -ne 2 ] ; then | |
echo 'Usage: ./backup.sh DB_NAME S3_BUCKET' | |
exit 0 | |
fi | |
if [ -z "${AWS_ACCESS_KEY_ID}" ] \ | |
|| [ -z "${AWS_SECRET_ACCESS_KEY}" ] \ | |
|| [ -z "${AWS_DEFAULT_REGION}" ] ; | |
then | |
echo "Please set environment vars: | |
AWS_ACCESS_KEY_ID | |
AWS_SECRET_ACCESS_KEY | |
AWS_DEFAULT_REGION" | |
exit 0 | |
fi | |
#...will give you the full directory name of the script no matter where it is being called from. | |
PWDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" | |
dt=$(date '+%Y-%m-%dT%H:%M:%S'); | |
persistent_data="$PWDIR/persistent_data"; | |
path="$persistent_data/db_backups" | |
file="$1_$dt.tar.gz" | |
fullFilePath="$path/$file" | |
mkdir -p "$path" | |
echo "Backing $1 up to $fullFilePath" | |
mkdir tmp | |
cd tmp | |
# TODO use docker to mongodump for more portability | |
mongodump --quiet --db "$1" | |
tar -czf "$fullFilePath" dump | |
cd ".." | |
rm -rf tmp | |
rm "$path/latest" | |
ln -s "$fullFilePath" "$path/latest" | |
echo "Backing up to AWS, s3://mwdbackups/$2" | |
docker run --rm -t $(tty &>/dev/null && echo "-i") \ | |
-e "AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}" \ | |
-e "AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}" \ | |
-e "AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION}" \ | |
-v "$persistent_data:/project" \ | |
mesosphere/aws-cli \ | |
s3 sync \ | |
. \ | |
s3://mwdbackups/$2 \ | |
--storage-class REDUCED_REDUNDANCY \ | |
--only-show-errors | |
echo "...done trying to back up to AWS." |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment