Skip to content

Commit b52dbd3

Browse files
authored
Merge pull request #125 from kartoza/monitor
capture pg_dump sucess or failure state and add monitor option
2 parents 8b85dea + f3359f7 commit b52dbd3

File tree

2 files changed

+86
-30
lines changed

2 files changed

+86
-30
lines changed

README.md

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,8 @@ For a typical usage of this look at the [docker-compose-s3.yml](https://github.c
158158
The image supports mounting the following configs:
159159
* `s3cfg` when backing to `S3` backend
160160
* backup-cron for any custom configuration you need to specify in the file.
161+
* `backup_monitoring.sh` - For any custom monitoring state on database dump completion or failure
162+
i.e Add webhook/callback support for backup completion notifications
161163

162164
An environment variable `${EXTRA_CONFIG_DIR}` controls the location of the folder.
163165

@@ -170,6 +172,21 @@ run the following:
170172
```
171173
Where `s3cfg` is located in `/data`
172174

175+
If you need to run i.e webhook you can implement your own custom hook logic
176+
```
177+
-e ${EXTRA_CONFIG_DIR}=/settings
178+
-v /data:/settings
179+
```
180+
181+
Where `backup_monitoring.sh` is located in `/data` or in
182+
kubernetes you can mount this file as a config.
183+
184+
or you can just run the env variable
185+
```bash
186+
MONITORING_ENDPOINT_COMMAND="""curl -D - -X POST -G 'https://appsignal-endpoint.net/check_ins/heartbeats' -d 'api_key=YOUR-APP-LEVEL-API-KEY' -d 'identifier=YOUR-CHECK-IN-IDENTIFIER'"""
187+
```
188+
to monitor success or failure of the backup.
189+
173190
## Restoring
174191

175192
The image provides a simple restore script. There are two ways to restore files based on the

scripts/backups.sh

Lines changed: 69 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,22 @@ function s3_config() {
2323

2424
}
2525

26+
function backup_monitoring() {
27+
28+
if [[ -z "${MONITORING_ENDPOINT_COMMAND}" ]]; then
29+
if [[ -f "${EXTRA_CONFIG_DIR}/backup_monitoring.sh" ]]; then
30+
cp -f "${EXTRA_CONFIG_DIR}/backup_monitoring.sh" /backup-scripts/backup_monitoring.sh
31+
chmod 0755 /backup-scripts/backup_monitoring.sh
32+
/bin/bash /backup-scripts/backup_monitoring.sh
33+
else
34+
echo "No monitoring command or script found."
35+
fi
36+
else
37+
eval "${MONITORING_ENDPOINT_COMMAND}"
38+
fi
39+
}
40+
41+
2642
# Cleanup S3 bucket
2743
function clean_s3bucket() {
2844
S3_BUCKET="$1"
@@ -81,12 +97,14 @@ function dump_tables() {
8197
pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DATABASE}" -t "${DB_TABLE}" | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out "${FILENAME}"
8298
if [[ $? -ne 0 ]];then
8399
echo -e "Backup of \e[0;32m ${DB_TABLE} \033[0m from DATABASE \e[0;32m ${DATABASE} \033[0m failed" >> ${CONSOLE_LOGGING_OUTPUT}
100+
backup_monitoring
84101
fi
85102
else
86103
# Plain backup
87104
pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DATABASE}" -t "${DB_TABLE}" > "${FILENAME}"
88105
if [[ $? -ne 0 ]];then
89106
echo -e "Backup of \e[0;32m ${DB_TABLE} \033[0m from DATABASE \e[0;32m ${DATABASE} \033[0m failed" >> ${CONSOLE_LOGGING_OUTPUT}
107+
backup_monitoring
90108
fi
91109
fi
92110

@@ -96,54 +114,75 @@ function dump_tables() {
96114
done
97115
}
98116

99-
100117
function backup_db() {
101118
EXTRA_PARAMS=''
102119
if [ -n "$1" ]; then
103120
EXTRA_PARAMS=$1
104121
fi
122+
105123
for DB in ${DBLIST}; do
106124
if [ -z "${ARCHIVE_FILENAME:-}" ]; then
107-
export FILENAME=${MYBACKUPDIR}/${DUMPPREFIX}_${DB}.${MYDATE}.dmp
125+
export FILENAME="${MYBACKUPDIR}/${DUMPPREFIX}_${DB}.${MYDATE}.dmp"
108126
else
109-
export FILENAME=${MYBASEDIR}/"${ARCHIVE_FILENAME}.${DB}.dmp"
127+
export FILENAME="${MYBASEDIR}/${ARCHIVE_FILENAME}.${DB}.dmp"
110128
fi
111129

112-
if [[ "${DB_TABLES}" =~ [Ff][Aa][Ll][Ss][Ee] ]]; then
113-
export PGPASSWORD=${POSTGRES_PASS}
114-
echo -e "Backup of \e[1;31m ${DB} \033[0m starting at \e[1;31m $(date) \033[0m" >> ${CONSOLE_LOGGING_OUTPUT}
115-
if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]]; then
116-
if ! ( set -o pipefail && pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DB}" | openssl enc -aes-256-cbc -pass pass:"${DB_DUMP_ENCRYPTION_PASS_PHRASE}" -pbkdf2 -iter 10000 -md sha256 -out "${FILENAME}" ); then
117-
echo -e "\e[1;31mFailed to back up ${DB} at $(date)\033[0m" >> ${CONSOLE_LOGGING_OUTPUT}
118-
rm ${FILENAME}
119-
continue
120-
fi
130+
export PGPASSWORD="${POSTGRES_PASS}"
131+
132+
echo "[$(date)] Starting backup of ${DB}" >> "${CONSOLE_LOGGING_OUTPUT}"
133+
134+
135+
if [[ "${DB_DUMP_ENCRYPTION}" =~ ^([Tt][Rr][Uu][Ee])$ ]]; then
136+
137+
set -o pipefail
138+
if pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DB}" \
139+
| openssl enc -aes-256-cbc \
140+
-pass pass:"${DB_DUMP_ENCRYPTION_PASS_PHRASE}" \
141+
-pbkdf2 -iter 10000 -md sha256 \
142+
-out "${FILENAME}"
143+
then
144+
echo "[$(date)] Backup SUCCESS (encrypted) for ${DB}, saved to ${FILENAME}" \
145+
>> "${CONSOLE_LOGGING_OUTPUT}"
146+
backup_monitoring
121147
else
122-
if ! pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} > ${FILENAME}; then
123-
echo -e "\e[1;31mFailed to back up ${DB} at $(date)\033[0m" >> ${CONSOLE_LOGGING_OUTPUT}
124-
rm ${FILENAME}
125-
continue
126-
fi
127-
fi
128-
echo -e "Backup of \e[1;33m ${DB} \033[0m completed at \e[1;33m $(date) \033[0m and dump located at \e[1;33m ${FILENAME} \033[0m" >> ${CONSOLE_LOGGING_OUTPUT}
129-
if [[ ${STORAGE_BACKEND} == "S3" ]]; then
130-
gzip ${FILENAME}
131-
echo -e "Pushing database backup \e[1;31m ${FILENAME} \033[0m to \e[1;31m s3://${BUCKET}/ \033[0m" >> ${CONSOLE_LOGGING_OUTPUT}
132-
${EXTRA_PARAMS}
133-
rm ${MYBACKUPDIR}/*.dmp.gz
148+
echo "[$(date)] ERROR: Backup FAILED (encrypted) for ${DB}" \
149+
>> "${CONSOLE_LOGGING_OUTPUT}"
150+
backup_monitoring
151+
rm -f "${FILENAME}"
152+
continue
134153
fi
135-
else
154+
set +o pipefail
155+
136156

137-
dump_tables ${DB}
138-
if [[ ${STORAGE_BACKEND} == "S3" ]]; then
139-
${EXTRA_PARAMS}
140-
rm ${MYBACKUPDIR}/*
157+
else
158+
if pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DB}" > "${FILENAME}"; then
159+
echo "[$(date)] Backup SUCCESS for ${DB}, saved to ${FILENAME}" \
160+
>> "${CONSOLE_LOGGING_OUTPUT}"
161+
backup_monitoring
162+
else
163+
echo "[$(date)] ERROR: Backup FAILED for ${DB}" \
164+
>> "${CONSOLE_LOGGING_OUTPUT}"
165+
backup_monitoring
166+
rm -f "${FILENAME}"
167+
continue
141168
fi
142169
fi
143-
done
144170

171+
172+
if [[ "${STORAGE_BACKEND}" == "S3" ]]; then
173+
gzip "${FILENAME}"
174+
175+
echo "[$(date)] Uploading ${FILENAME}.gz to s3://${BUCKET}/" \
176+
>> "${CONSOLE_LOGGING_OUTPUT}"
177+
178+
${EXTRA_PARAMS}
179+
180+
rm -f ${MYBACKUPDIR}/*.dmp.gz
181+
fi
182+
done
145183
}
146184

185+
147186
function remove_files() {
148187
TIME_MINUTES=$((REMOVE_BEFORE * 24 * 60))
149188
MIN_SAVED_FILE=${MIN_SAVED_FILE:-0}

0 commit comments

Comments
 (0)