forked from numdes/nd_postgres_backup
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbackup.sh
96 lines (78 loc) · 2.77 KB
/
backup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
#!/usr/bin/env bash
#
# Script for backing up PostgreSQL database and upload backup to S3.
# After backup is done, script optionally can send notification to Telegram chat or to private URL
set -euo pipefail
IFS=$'\n\t'
# Check if we have gotten path argument from scheduler if not set path to /
if [[ -z "$1" ]]; then
backup_path=""
elif [[ ! "$1" == */ ]]; then
backup_path="$1/"
else
backup_path="$1"
fi
export PGPASSWORD=${POSTGRES_PASSWORD}
mkdir --parents $backup_path
cd $backup_path
echo "Starting $1 backup in $(pwd)"
# Will create base backup
echo "Backing up [${POSTGRES_USER}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}] to\
[${S3_ENDPOINT}], extra opts - [${POSTGRES_EXTRA_OPTS}]."
pg_dump --username="${POSTGRES_USER}" \
--host="${POSTGRES_HOST}" \
--port="${POSTGRES_PORT}" \
--dbname="${POSTGRES_DB}" \
"${POSTGRES_EXTRA_OPTS}" \
> "${POSTGRES_DB}".sql
# Declaring variables for informational purposes
if [[ ${S3_OBJECT_PATH} != "**None**" ]]; then
ARCHIVE_FILE_NAME=$(basename "${S3_OBJECT_PATH}")
relative_s3_object_path="${S3_OBJECT_PATH}"
else
# Will be name of directory in backet yyyy-mm-dd_HH:MM:SS
timestamp="$(date +%F_%T)"
ARCHIVE_FILE_NAME="${POSTGRES_DB}.tar.gz"
relative_s3_object_path="${S3_BUCKET}/${backup_path}${timestamp}/${ARCHIVE_FILE_NAME}"
fi
FULL_S3_DIR_PATH="${S3_ENDPOINT}/${relative_s3_object_path}"
# Do compression
tar --create \
--gzip \
--verbose \
--file "${ARCHIVE_FILE_NAME}" \
"${POSTGRES_DB}.sql"
# Count file size
ARCHIVE_FILE_SIZE="$(ls -lh "${ARCHIVE_FILE_NAME}" | awk '{print $5}')"
echo "Created ${ARCHIVE_FILE_NAME} with file size: ${ARCHIVE_FILE_SIZE}"
# Set an alias for S3 interoperation
mcli alias set "${S3_ALIAS}" "${S3_ENDPOINT}" "${S3_ACCESS_KEY}" "${S3_SECRET_KEY}"
echo "Starting to copy ${ARCHIVE_FILE_NAME} to ${FULL_S3_DIR_PATH}..."
# Copying backup to S3
mcli cp "${ARCHIVE_FILE_NAME}" "${S3_ALIAS}"/"${relative_s3_object_path}"
# Do clean up
echo "Maid is here... Doing cleaning..."
cd ..
rm --recursive --force $backup_path
# Do announce
# We are not going to spam chat every hour. Excluded hourly backups from notifications
if [[ ! ${backup_path} =~ ^hourly.? ]]; then
echo "Starting notification routine..."
# Check which backup routine applied
if [[ ${backup_path} =~ ^daily.? ]]; then
BACKUP_SCHEDULE="-=DAILY=-"
elif [[ ${backup_path} =~ ^weekly.? ]]; then
BACKUP_SCHEDULE="-=WEEKLY=-"
else
BACKUP_SCHEDULE="-=UNCERTAIN SCHEDULE=-"
fi
# Set variables globally
export ARCHIVE_FILE_NAME
export ARCHIVE_FILE_SIZE
export FULL_S3_DIR_PATH
export BACKUP_SCHEDULE
# Start execution of notification scripts
find /hooks -type f -name '*.sh' -print0 | \
sort -z | \
xargs -0 -I {} sh -c 'echo "Running: {}" && {}'
fi