File tree Expand file tree Collapse file tree 6 files changed +123
-11
lines changed
Expand file tree Collapse file tree 6 files changed +123
-11
lines changed Original file line number Diff line number Diff line change 1- FROM tiangolo/uwsgi-nginx-flask:python3.6-alpine3.7
1+ FROM tiangolo/uwsgi-nginx-flask:python3.8-alpine
22
3- ENV STATIC_URL /static
4- ENV STATIC_PATH /var/www/app/static
3+ COPY . /app
54
6- COPY ./requirements.txt /var/www/requirements.txt
5+ COPY docker/.aws /root/.aws
6+ COPY docker/sqlitebackup.sh /usr/local/bin/
7+ COPY docker/supervisor.d/ /etc/supervisor.d/
8+ COPY docker/prestart.sh /app/.
79
8- COPY . /app
9- RUN pip install -r /var/www/requirements.txt
10+ RUN chmod 755 /usr/local/bin/sq*
11+
12+ RUN apk add bash sqlite
13+ RUN pip3 install awscli awscli_plugin_endpoint
14+
15+ RUN cd /usr/local/bin && wget https://raw.githubusercontent.com/jacobtomlinson/docker-sqlite-to-s3/master/sqlite-to-s3.sh && chmod 755 sqlite*
1016
11- EXPOSE 8080
17+ RUN aws configure set plugins.endpoint awscli_plugin_endpoint
1218
13- ENTRYPOINT [ "python" ]
14- CMD [ " /app/server.py" ]
19+ RUN pip install -r /app/requirements.txt
20+ VOLUME /app/data/
Original file line number Diff line number Diff line change 11import os
22
33# Location of database
4- DATABASE_FILE_PATH = os .path .abspath (os .getenv ('DATABASE_FILE_PATH' , 'data.db' ))
4+ DATABASE_FILE_PATH = os .path .abspath (os .getenv ('DATABASE_FILE_PATH' , 'data/data .db' ))
55
66# Amount of time before another view by the same user will count
77COOKIE_TIMEOUT = 60 * 5
4242# Whitelist of URL patterns to track
4343# Any URL will be allowed if list is empty
4444URL_WHITELIST_RE = [
45- ]
45+ ]
Original file line number Diff line number Diff line change 1+ [profile wasabi]
2+ region = eu-central-1
3+ s3 =
4+ endpoint_url = https://s3.eu-central-1.wasabisys.com
5+ s3api =
6+ endpoint_url = https://s3.eu-central-1.wasabisys.com
7+ [plugins]
8+ endpoint = awscli_plugin_endpoint
Original file line number Diff line number Diff line change 1+ #! /usr/bin/env bash
2+
3+ : ${DATABASE_FILE_PATH:= " /app/data/data.db" }
4+ : ${S3_BUCKET:= " sqlite" }
5+
6+ set -e -o pipefail
7+
8+ export DATABASE_PATH=$DATABASE_FILE_PATH S3_BUCKET
9+
10+ err () {
11+ echo " [$( date +' %Y-%m-%dT%H:%M:%S%z' ) ] ($PROGNAME ): ERROR: $@ " >&2
12+ }
13+
14+ status () {
15+ echo " [$( date +' %Y-%m-%dT%H:%M:%S%z' ) ] ($PROGNAME ): $@ "
16+ }
17+
18+ set -e -o pipefail
19+
20+ if [[ ! -z $AWS_ACCESS_KEY_ID ]] && [[ ! -z $AWS_SECRET_ACCESS_KEY ]]; then
21+ status " ==> AWS CREDS DETECTED"
22+ if [[ ! -f $DATABASE_PATH ]] ; then
23+ /usr/local/bin/sqlite-to-s3.sh restore
24+ else
25+ status " LOCAL DB FOUND at $DATABASE_PATH !" ;
26+ fi
27+ fi
Original file line number Diff line number Diff line change 1+ #! /usr/bin/env bash
2+ #
3+ set -eo pipefail
4+
5+ shopt -s nullglob dotglob
6+
7+ PROGNAME=$( basename $0 )
8+
9+ # Provide an option to override values via env variables
10+ : ${BKPINTERVAL:= " 60" }
11+ : ${LOCK_FD:= " 200" }
12+ : ${LOCK_FILE:= " /var/lock/${PROGNAME} .lock" }
13+ : ${S3_BUCKET:= " sqlite" }
14+ : ${DATABASE_FILE_PATH:= " /app/data/data.db" }
15+
16+ export S3_BUCKET DATABASE_PATH=$DATABASE_FILE_PATH
17+
18+ err () {
19+ echo " [$( date +' %Y-%m-%dT%H:%M:%S%z' ) ] ($PROGNAME ): ERROR: $@ " >&2
20+ }
21+
22+ status () {
23+ echo " [$( date +' %Y-%m-%dT%H:%M:%S%z' ) ] ($PROGNAME ): $@ "
24+ }
25+
26+ lock () {
27+ eval " exec $LOCK_FD >$LOCK_FILE "
28+ flock -n $LOCK_FD || ( err " Cannot aquire lock on ${LOCK_FILE} " ; exit 1; )
29+ }
30+
31+ cleanup () {
32+ shopt -u nullglob dotglob
33+ }
34+
35+ finish () {
36+ local exit_status=" ${1:- $? } "
37+ if [[ " $exit_status " -eq 0 ]]; then
38+ status " DONE (exit code: ${exit_status} )"
39+ else
40+ err " exit code: ${exit_status} "
41+ fi
42+ cleanup
43+ exit $exit_status
44+ }
45+
46+ trap finish SIGHUP SIGINT SIGQUIT SIGTERM ERR
47+
48+ lock
49+
50+ status " Initial delay 30s ..."
51+ sleep 30
52+
53+ while : ; do
54+ status " Starting backup"
55+ if [[ ! -z $AWS_ACCESS_KEY_ID ]] && [[ ! -z $AWS_SECRET_ACCESS_KEY ]]; then
56+ /usr/local/bin/sqlite-to-s3.sh backup
57+ else
58+ status " ==> NO AWS credentials, backup skipped!"
59+ fi
60+ status " DONE."
61+ status " Next backup in $BKPINTERVAL seconds..."
62+ sleep " $BKPINTERVAL "
63+ done
64+
65+ finish
Original file line number Diff line number Diff line change 1+ [program:sqlbackup]
2+ command =/usr/local/bin/sqlitebackup.sh -r
3+ autostart =true
4+ autorestart =true
5+ stderr_logfile =/dev/stderr
6+ stdout_logfile =/dev/stdout
You can’t perform that action at this time.
0 commit comments