Skip to content

Commit b31da0b

Browse files
authored
Merge branch 'master' into develop
2 parents be490b3 + 028966d commit b31da0b

File tree

10 files changed

+201
-15
lines changed

10 files changed

+201
-15
lines changed

.gitattributes

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# Declare files that will always have LF line endings on checkout.
2+
*.* text eol=lf

Dockerfile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ RUN set -ex && \
3636
postgresql \
3737
postgresql-client \
3838
pv \
39+
py3-cryptography \
3940
redis \
4041
sqlite \
4142
xz \
@@ -68,6 +69,9 @@ RUN set -ex && \
6869
make && \
6970
make install && \
7071
\
72+
apk add gcc build-base libressl-dev libffi-dev python3-dev py3-pip && \
73+
pip3 install blobxfer && \
74+
\
7175
### Cleanup
7276
apk del .db-backup-build-deps && \
7377
rm -rf /usr/src/* && \

README.md

Lines changed: 20 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ This will build a container for backing up multiple types of DB Servers
1414

1515
Currently backs up CouchDB, InfluxDB, MySQL, MongoDB, Postgres, Redis servers.
1616

17-
* dump to local filesystem or backup to S3 Compatible services
17+
* dump to local filesystem or backup to S3 Compatible services, and Azure.
1818
* select database user and password
1919
* backup all databases, single, or multiple databases
2020
* backup all to seperate files or one singular file
@@ -102,6 +102,7 @@ Images are built primarily for `amd64` architecture, and may also include builds
102102
* Set various [environment variables](#environment-variables) to understand the capabilities of this image.
103103
* Map [persistent storage](#data-volumes) for access to configuration and data files for backup.
104104
* Make [networking ports](#networking) available for public access if necessary
105+
105106
### Persistent Storage
106107

107108
The following directories are used for configuration and can be mapped for persistent storage.
@@ -189,11 +190,11 @@ Your Organization will be mapped to `DB_USER` and your root token will need to b
189190
If `BACKUP_LOCATION` = `S3` then the following options are used.
190191

191192
| Parameter | Description | Default |
192-
| --------------------- | ----------------------------------------------------------------------------------------- | ------- |
193+
|-----------------------|-------------------------------------------------------------------------------------------|---------|
193194
| `S3_BUCKET` | S3 Bucket name e.g. `mybucket` | |
194195
| `S3_KEY_ID` | S3 Key ID | |
195196
| `S3_KEY_SECRET` | S3 Key Secret | |
196-
| `S3_PATH` | S3 Pathname to save to e.g. '`backup`' | |
197+
| `S3_PATH` | S3 Pathname to save to (must end in a trailing slash e.g. '`backup/`') | |
197198
| `S3_REGION` | Define region in which bucket is defined. Example: `ap-northeast-2` | |
198199
| `S3_HOST` | Hostname (and port) of S3-compatible service, e.g. `minio:8080`. Defaults to AWS. | |
199200
| `S3_PROTOCOL` | Protocol to connect to `S3_HOST`. Either `http` or `https`. Defaults to `https`. | `https` |
@@ -202,6 +203,22 @@ If `BACKUP_LOCATION` = `S3` then the following options are used.
202203
| _*OR*_ | | |
203204
| `S3_CERT_SKIP_VERIFY` | Skip verifying self signed certificates when connecting | `TRUE` |
204205

206+
#### Upload to a Azure storage account by `blobxfer`
207+
208+
Support to upload backup files with [blobxfer](https://github.com/Azure/blobxfer) to the Azure fileshare storage.
209+
210+
211+
If `BACKUP_LOCATION` = `blobxfer` then the following options are used.
212+
213+
| Parameter | Description | Default |
214+
| ------------------------------- | ------------------------------------------------------------------------ | -------------------- |
215+
| `BLOBXFER_STORAGE_ACCOUNT` | Microsoft Azure Cloud storage account name. | |
216+
| `BLOBXFER_STORAGE_ACCOUNT_KEY` | Microsoft Azure Cloud storage account key. | |
217+
| `BLOBXFER_REMOTE_PATH` | Remote Azure path | `/docker-db-backup` |
218+
219+
> This service uploads files from backup targed directory `DB_DUMP_TARGET`.
220+
> If the a cleanup configuration in `DB_CLEANUP_TIME` is defined, the remote directory on Azure storage will also be cleaned automatically.
221+
205222
## Maintenance
206223

207224
### Shell Access

examples/.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# See http://help.github.com/ignore-files/ for more about ignoring files.
2+
3+
# Example container mounted folders
4+
**/backups/
5+
**/db/
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
#
2+
# Example for Microsoft SQL Server
3+
# upload with blobxfer to azure storage
4+
#
5+
6+
version: '2'
7+
8+
networks:
9+
example-mssql-blobxfer-net:
10+
name: example-mssql-blobxfer-net
11+
12+
services:
13+
example-mssql-s3-db:
14+
hostname: example-db-host
15+
image: mcr.microsoft.com/mssql/server:2019-latest
16+
container_name: example-mssql-s3-db
17+
restart: unless-stopped
18+
ports:
19+
- "127.0.0.1:11433:1433"
20+
networks:
21+
example-mssql-blobxfer-net:
22+
volumes:
23+
- ./tmp/backups:/tmp/backups # shared tmp backup directory
24+
environment:
25+
ACCEPT_EULA: Y
26+
MSSQL_SA_PASSWORD: 5hQa0utRFBpIY3yhoIyE
27+
MSSQL_PID: Express
28+
29+
example-mssql-blobxfer-db-backup:
30+
container_name: example-mssql-blobxfer-db-backup
31+
# if you want to build and use image from current source
32+
# execute in terminal --> docker build -t tiredofit/db-backup-mssql-blobxfer .
33+
# replace --> image: tiredofit/db-backup-mssql
34+
# image: tiredofit/db-backup
35+
image: tiredofit/db-backup-mssql-blobxfer
36+
links:
37+
- example-mssql-s3-db
38+
volumes:
39+
- ./backups:/backup
40+
- ./tmp/backups:/tmp/backups # shared tmp backup directory
41+
#- ./post-script.sh:/assets/custom-scripts/post-script.sh
42+
environment:
43+
# - DEBUG_MODE=TRUE
44+
- DB_TYPE=mssql
45+
- DB_HOST=example-db-host
46+
# - DB_PORT=1488
47+
# - DB_NAME=ALL # [ALL] not working on sql server.
48+
# create database with name `test1` manually first
49+
- DB_NAME=test1 # Create this database
50+
- DB_USER=sa
51+
- DB_PASS=5hQa0utRFBpIY3yhoIyE
52+
- DB_DUMP_FREQ=1 # backup every 5 minute
53+
# - DB_DUMP_BEGIN=0000 # backup starts immediately
54+
- DB_CLEANUP_TIME=3 # clean backups they are older than 60 minutes
55+
- ENABLE_CHECKSUM=TRUE
56+
- CHECKSUM=SHA1
57+
- COMPRESSION=GZ
58+
- SPLIT_DB=FALSE
59+
- CONTAINER_ENABLE_MONITORING=FALSE
60+
# === S3 Blobxfer ===
61+
- BACKUP_LOCATION=blobxfer
62+
# Add here azure storage account
63+
- BLOBXFER_STORAGE_ACCOUNT={TODO Add Storage Name}
64+
# Add here azure storage account key
65+
- BLOBXFER_STORAGE_ACCOUNT_KEY={TODO Add Key}
66+
- BLOBXFER_REMOTE_PATH=docker-db-backup
67+
restart: always
68+
networks:
69+
example-mssql-blobxfer-net:

examples/mssql/docker-compose.yml

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
#
2+
# Example for Microsoft SQL Server
3+
#
4+
5+
version: '2'
6+
7+
networks:
8+
example-mssql-net:
9+
name: example-mssql-net
10+
11+
services:
12+
example-mssql-db:
13+
hostname: example-db-host
14+
image: mcr.microsoft.com/mssql/server:2019-latest
15+
container_name: example-mssql-db
16+
restart: unless-stopped
17+
ports:
18+
- "127.0.0.1:11433:1433"
19+
networks:
20+
example-mssql-net:
21+
volumes:
22+
- ./tmp/backups:/tmp/backups # shared tmp backup directory
23+
environment:
24+
ACCEPT_EULA: Y
25+
MSSQL_SA_PASSWORD: 5hQa0utRFBpIY3yhoIyE
26+
MSSQL_PID: Express
27+
28+
example-mssql-db-backup:
29+
container_name: example-mssql-db-backup
30+
# if you want to build and use image from current source
31+
# execute in terminal --> docker build -t tiredofit/db-backup-mssql .
32+
# replace --> image: tiredofit/db-backup-mssql
33+
# image: tiredofit/db-backup
34+
image: tiredofit/db-backup-mssql
35+
links:
36+
- example-mssql-db
37+
volumes:
38+
- ./backups:/backup
39+
- ./tmp/backups:/tmp/backups # shared tmp backup directory
40+
#- ./post-script.sh:/assets/custom-scripts/post-script.sh
41+
environment:
42+
# - DEBUG_MODE=TRUE
43+
- DB_TYPE=mssql
44+
- DB_HOST=example-db-host
45+
# - DB_PORT=1488
46+
# - DB_NAME=ALL # [ALL] not working on sql server.
47+
# create database with name `test1` manually first
48+
- DB_NAME=test1
49+
- DB_USER=sa
50+
- DB_PASS=5hQa0utRFBpIY3yhoIyE
51+
- DB_DUMP_FREQ=1 # backup every minute
52+
# - DB_DUMP_BEGIN=0000 # backup starts immediately
53+
- DB_CLEANUP_TIME=5 # clean backups they are older than 5 minute
54+
- ENABLE_CHECKSUM=FALSE
55+
- CHECKSUM=SHA1
56+
- COMPRESSION=GZ
57+
- SPLIT_DB=FALSE
58+
- CONTAINER_ENABLE_MONITORING=FALSE
59+
restart: always
60+
networks:
61+
example-mssql-net:
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,16 @@
11
version: '2'
22

3+
networks:
4+
example-db-network:
5+
name: example-db-network
6+
37
services:
48
example-db:
9+
hostname: example-db-host
510
container_name: example-db
611
image: mariadb:latest
12+
ports:
13+
- 13306:3306
714
volumes:
815
- ./db:/var/lib/mysql
916
environment:
@@ -12,6 +19,8 @@ services:
1219
- MYSQL_USER=example
1320
- MYSQL_PASSWORD=examplepassword
1421
restart: always
22+
networks:
23+
- example-db-network
1524

1625
example-db-backup:
1726
container_name: example-db-backup
@@ -22,17 +31,21 @@ services:
2231
- ./backups:/backup
2332
#- ./post-script.sh:/assets/custom-scripts/post-script.sh
2433
environment:
34+
# - DEBUG_MODE=TRUE
2535
- DB_TYPE=mariadb
26-
- DB_HOST=example-db
36+
- DB_HOST=example-db-host
2737
- DB_NAME=example
2838
- DB_USER=example
29-
- DB_PASS="examplepassword"
30-
- DB_DUMP_FREQ=1440
31-
- DB_DUMP_BEGIN=0000
32-
- DB_CLEANUP_TIME=8640
39+
- DB_PASS=examplepassword
40+
- DB_DUMP_FREQ=1 # backup every minute
41+
# - DB_DUMP_BEGIN=0000 # backup starts immediately
42+
- DB_CLEANUP_TIME=5 # clean backups they are older than 5 minute
3343
- CHECKSUM=SHA1
34-
- COMPRESSION=ZSTD
44+
- COMPRESSION=GZ
3545
- SPLIT_DB=FALSE
46+
- CONTAINER_ENABLE_MONITORING=FALSE
3647
restart: always
48+
networks:
49+
- example-db-network
3750

3851

install/assets/defaults/10-db-backup

100755100644
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
#!/command/with-contenv bash
22

33
BACKUP_LOCATION=${BACKUP_LOCATION:-"FILESYSTEM"}
4+
BLOBXFER_REMOTE_PATH=${BLOBXFER_REMOTE_PATH:-"/docker-db-backup"}
45
CHECKSUM=${CHECKSUM:-"MD5"}
56
COMPRESSION=${COMPRESSION:-"ZSTD"}
67
COMPRESSION_LEVEL=${COMPRESSION_LEVEL:-"3"}
@@ -22,5 +23,4 @@ SCRIPT_LOCATION_POST=${SCRIPT_LOCATION_POST:-"/assets/scripts/post/"}
2223
SIZE_VALUE=${SIZE_VALUE:-"bytes"}
2324
SKIP_AVAILABILITY_CHECK=${SKIP_AVAILABILITY_CHECK:-"FALSE"}
2425
SPLIT_DB=${SPLIT_DB:-"TRUE"}
25-
TEMP_LOCATION=${TEMP_LOCATION:-"/tmp/backups"}
26-
26+
TEMP_LOCATION=${TEMP_LOCATION:-"/tmp/backups"}

install/assets/functions/10-db-backup

100755100644
Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ backup_mssql() {
175175
compression
176176
pre_dbbackup "${DB_NAME}"
177177
print_notice "Dumping MSSQL database: '${DB_NAME}'"
178-
silent /opt/mssql-tools/bin/sqlcmd -E -C -S ${DB_HOST}\,${DB_PORT} -U ${DB_USER} -P ${DB_PASS} Q "BACKUP DATABASE \[${DB_NAME}\] TO DISK = N'${TEMP_LOCATION}/${target}' WITH NOFORMAT, NOINIT, NAME = '${DB_NAME}-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
178+
silent /opt/mssql-tools18/bin/sqlcmd -C -S ${DB_HOST}\,${DB_PORT} -U ${DB_USER} -P ${DB_PASS} -Q "BACKUP DATABASE [${DB_NAME}] TO DISK = N'${TEMP_LOCATION}/${target}' WITH NOFORMAT, NOINIT, NAME = '${DB_NAME}-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"
179179
exit_code=$?
180180
check_exit_code $target
181181
generate_checksum
@@ -460,22 +460,25 @@ cleanup_old_data() {
460460
if [ -n "${DB_CLEANUP_TIME}" ]; then
461461
if [ "${master_exit_code}" != 1 ]; then
462462
case "${BACKUP_LOCATION,,}" in
463-
"file" | "filesystem" )
463+
"file" | "filesystem" | "blobxfer" )
464464
print_info "Cleaning up old backups"
465465
mkdir -p "${DB_DUMP_TARGET}"
466466
find "${DB_DUMP_TARGET}"/ -mmin +"${DB_CLEANUP_TIME}" -iname "*" -exec rm {} \;
467+
468+
print_info "Cleaning up old backups on S3 storage with blobxfer"
469+
blobxfer upload --mode file --remote-path ${BLOBXFER_REMOTE_PATH} --local-path ${DB_DUMP_TARGET} --delete --delete-only
467470
;;
468471
"s3" | "minio" )
469472
print_info "Cleaning up old backups"
470-
aws ${PARAM_AWS_ENDPOINT_URL} s3 ls s3://${S3_BUCKET}/${S3_PATH} ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS} | grep " DIR " -v | while read -r s3_file; do
473+
aws ${PARAM_AWS_ENDPOINT_URL} s3 ls s3://${S3_BUCKET}/${S3_PATH} ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS} | grep " DIR " -v | grep " PRE " -v | while read -r s3_file; do
471474
s3_createdate=$(echo $s3_file | awk {'print $1" "$2'})
472475
s3_createdate=$(date -d "$s3_createdate" "+%s")
473476
s3_olderthan=$(echo $(( $(date +%s)-${DB_CLEANUP_TIME}*60 )))
474477
if [[ $s3_createdate -le $s3_olderthan ]] ; then
475478
s3_filename=$(echo $s3_file | awk {'print $4'})
476479
if [ "$s3_filename" != "" ] ; then
477480
print_debug "Deleting $s3_filename"
478-
silent aws ${PARAM_AWS_ENDPOINT_URL} s3 rm s3://${S3_BUCKET}/${S3_PATH}/${s3_filename} ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS}
481+
silent aws ${PARAM_AWS_ENDPOINT_URL} s3 rm s3://${S3_BUCKET}/${S3_PATH}${s3_filename} ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS}
479482
fi
480483
fi
481484

@@ -639,6 +642,18 @@ move_dbbackup() {
639642
silent aws ${PARAM_AWS_ENDPOINT_URL} s3 cp ${TEMP_LOCATION}/*.${checksum_extension} s3://${S3_BUCKET}/${S3_PATH}/ ${s3_ssl} ${s3_ca_cert} ${S3_EXTRA_OPTS}
640643
fi
641644

645+
rm -rf "${TEMP_LOCATION}"/*."${checksum_extension}"
646+
rm -rf "${TEMP_LOCATION}"/"${target}"
647+
;;
648+
"blobxfer" )
649+
print_info "Moving backup to S3 Bucket with blobxfer"
650+
651+
mkdir -p "${DB_DUMP_TARGET}"
652+
mv "${TEMP_LOCATION}"/*."${checksum_extension}" "${DB_DUMP_TARGET}"/
653+
mv "${TEMP_LOCATION}"/"${target}" "${DB_DUMP_TARGET}"/"${target}"
654+
655+
blobxfer upload --mode file --remote-path ${BLOBXFER_REMOTE_PATH} --local-path ${DB_DUMP_TARGET}
656+
642657
rm -rf "${TEMP_LOCATION}"/*."${checksum_extension}"
643658
rm -rf "${TEMP_LOCATION}"/"${target}"
644659
;;

0 commit comments

Comments
 (0)