diff --git a/README.md b/README.md index 3e997d2..1216c29 100644 --- a/README.md +++ b/README.md @@ -48,10 +48,14 @@ RESTORE=false `dockup` will use your AWS credentials to create a new bucket with name as per the environment variable `S3_BUCKET_NAME`, or if not defined, using the default name `docker-backups.example.com`. The paths in `PATHS_TO_BACKUP` will be tarballed, gzipped, time-stamped and uploaded to the S3 bucket. +If you want `dockup` to run as a cron task, you can set the environment variable `CRON_TIME` to the desired frequency, for example `CRON_TIME=0 0 * * *` to backup every day at midnight. + +For more complex backup tasks as dumping a database, you can optionally define the environment variables `BEFORE_BACKUP_CMD` and `AFTER_BACKUP_CMD`. ## Restore -To restore your data simply set the `RESTORE` environment variable to `true` - this will restore the latest backup from S3 to your volume. +To restore your data simply set the `RESTORE` environment variable to `true` - this will restore the latest backup from S3 to your volume. If you want to restore a specific backup instead of the last one, you can also set the environment variable `LAST_BACKUP` to the desired tarball name. +For more complex restore operations, you can define a command to be run once the tarball has been downloaded and extracted using the environment variable `AFTER_RESTORE_CMD`. ## A note on Buckets diff --git a/backup.sh b/backup.sh index 20e221c..5ad4c27 100755 --- a/backup.sh +++ b/backup.sh @@ -1,17 +1,23 @@ #!/bin/bash export PATH=$PATH:/usr/bin:/usr/local/bin:/bin + # Get timestamp : ${BACKUP_SUFFIX:=.$(date +"%Y-%m-%d-%H-%M-%S")} readonly tarball=$BACKUP_NAME$BACKUP_SUFFIX.tar.gz +# If a pre-backup command is defined, run it before creating the tarball +if [ -n "$BEFORE_BACKUP_CMD" ]; then + eval "$BEFORE_BACKUP_CMD" || exit +fi + # Create a gzip compressed tarball with the volume(s) tar czf $tarball $BACKUP_TAR_OPTION $PATHS_TO_BACKUP # Create bucket, if it doesn't already exist -BUCKET_EXIST=$(aws s3 ls | grep $S3_BUCKET_NAME | wc -l) +BUCKET_EXIST=$(aws s3 --region $AWS_DEFAULT_REGION ls | grep $S3_BUCKET_NAME | wc -l) if [ $BUCKET_EXIST -eq 0 ]; then - aws s3 mb s3://$S3_BUCKET_NAME + aws s3 --region $AWS_DEFAULT_REGION mb s3://$S3_BUCKET_NAME fi # Upload the backup to S3 with timestamp @@ -19,3 +25,8 @@ aws s3 --region $AWS_DEFAULT_REGION cp $tarball s3://$S3_BUCKET_NAME/$tarball # Clean up rm $tarball + +# If a post-backup command is defined (eg: for cleanup) +if [ -n "$AFTER_BACKUP_CMD" ]; then + eval "$AFTER_BACKUP_CMD" +fi \ No newline at end of file diff --git a/restore.sh b/restore.sh index 35ce640..eced2c9 100755 --- a/restore.sh +++ b/restore.sh @@ -1,10 +1,17 @@ #!/bin/bash -# Find last backup file -: ${LAST_BACKUP:=$(aws s3 ls s3://$S3_BUCKET_NAME | awk -F " " '{print $4}' | grep ^$BACKUP_NAME | sort -r | head -n1)} +if [ ! -n "${LAST_BACKUP}" ]; then + # Find last backup file + : ${LAST_BACKUP:=$(aws s3 --region $AWS_DEFAULT_REGION ls s3://$S3_BUCKET_NAME | awk -F " " '{print $4}' | grep ^$BACKUP_NAME | sort -r | head -n1)} +fi # Download backup from S3 -aws s3 cp s3://$S3_BUCKET_NAME/$LAST_BACKUP $LAST_BACKUP +aws s3 --region $AWS_DEFAULT_REGION cp s3://$S3_BUCKET_NAME/$LAST_BACKUP $LAST_BACKUP || (echo "Failed to download tarball from S3"; exit) # Extract backup -tar xzf $LAST_BACKUP $RESTORE_TAR_OPTION +tar xzf $LAST_BACKUP $RESTORE_TAR_OPTION || exit + +# If a post extraction command is defined, run it +if [ -n "$AFTER_RESTORE_CMD" ]; then + eval "$AFTER_RESTORE_CMD" || exit +fi diff --git a/run.sh b/run.sh index 8af6650..0deab98 100755 --- a/run.sh +++ b/run.sh @@ -3,12 +3,13 @@ if [[ "$RESTORE" == "true" ]]; then ./restore.sh else - ./backup.sh -fi - -if [ -n "$CRON_TIME" ]; then - echo "${CRON_TIME} /backup.sh >> /dockup.log 2>&1" > /crontab.conf - crontab /crontab.conf - echo "=> Running dockup backups as a cronjob for ${CRON_TIME}" - exec cron -f -fi + if [ -n "$CRON_TIME" ]; then + env | grep -v 'affinity:container' | sed -e 's/^\([^=]*\)=\(.*\)/export \1="\2"/' > /env.conf # Save current environment + echo "${CRON_TIME} . /env.conf && /backup.sh >> /dockup.log 2>&1" > /crontab.conf + crontab /crontab.conf + echo "=> Running dockup backups as a cronjob for ${CRON_TIME}" + exec cron -f + else + ./backup.sh + fi +fi \ No newline at end of file