Skip to content

Commit 48bea7a

Browse files
authored
Merge pull request #11 from claudioaltamura/master
chg: added AUTH for redis
2 parents fcafa17 + c3179d5 commit 48bea7a

File tree

1 file changed

+13
-12
lines changed
  • install/etc/s6/services/10-db-backup

1 file changed

+13
-12
lines changed

install/etc/s6/services/10-db-backup/run

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,7 @@ fi
8484
"redis" | "REDIS" )
8585
DBTYPE=redis
8686
DBPORT=${DB_PORT:-6379}
87+
[[ ( -n "${DB_PASS}" ) ]] && REDIS_PASS_STR=" -a ${DBPASS}"
8788
;;
8889
"rethink" | "RETHINK" )
8990
DBTYPE=rethink
@@ -103,9 +104,9 @@ function backup_couch() {
103104
}
104105

105106
function backup_mysql() {
106-
if [ "$SPLIT_DB" = "TRUE" ] || [ "$SPLIT_DB" = "true" ]; then
107+
if [ "$SPLIT_DB" = "TRUE" ] || [ "$SPLIT_DB" = "true" ]; then
107108
DATABASES=`mysql -h $DBHOST -u$DBUSER -p$DBPASS --batch -e "SHOW DATABASES;" | grep -v Database|grep -v schema`
108-
109+
109110
for db in $DATABASES; do
110111
if [[ "$db" != "information_schema" ]] && [[ "$db" != _* ]] ; then
111112
echo "** [db-backup] Dumping database: $db"
@@ -137,7 +138,7 @@ function backup_mongo() {
137138
mongodump --out ${TMPDIR}/${TARGET} --host ${DBHOST} --port ${DBPORT} ${MONGO_USER_STR}${MONGO_PASS_STR}${MONGO_DB_STR} ${EXTRA_OPTS}
138139
cd ${TMPDIR}
139140
tar cf ${TARGET}.tar ${TARGET}/*
140-
TARGET=${TARGET}.tar
141+
TARGET=${TARGET}.tar
141142
generate_md5
142143
compression
143144
move_backup
@@ -166,13 +167,13 @@ function backup_pgsql() {
166167

167168
function backup_redis() {
168169
TARGET=redis_${db}_${DBHOST}_${now}.rdb
169-
echo bgsave | redis-cli -h ${DBHOST} -p ${DBPORT} --rdb ${TMPDIR}/${TARGET}
170+
echo bgsave | redis-cli -h ${DBHOST} -p ${DBPORT} ${REDIS_PASS_STR} --rdb ${TMPDIR}/${TARGET}
170171
echo "** [db-backup] Dumping Redis - Flushing Redis Cache First"
171172
sleep 10
172173
try=5
173174
while [ $try -gt 0 ] ; do
174-
saved=$(echo 'info Persistence' | redis-cli -h ${DBHOST} -p ${DBPORT} | awk '/rdb_bgsave_in_progress:0/{print "saved"}')
175-
ok=$(echo 'info Persistence' | redis-cli -h ${DBHOST} -p ${DBPORT} | awk '/rdb_last_bgsave_status:ok/{print "ok"}')
175+
saved=$(echo 'info Persistence' | redis-cli -h ${DBHOST} -p ${DBPORT} ${REDIS_PASS_STR} | awk '/rdb_bgsave_in_progress:0/{print "saved"}')
176+
ok=$(echo 'info Persistence' | redis-cli -h ${DBHOST} -p ${DBPORT} ${REDIS_PASS_STR} | awk '/rdb_last_bgsave_status:ok/{print "ok"}')
176177
if [[ "$saved" = "saved" ]] && [[ "$ok" = "ok" ]]; then
177178
echo "** [db-backup] Redis Backup Complete"
178179
fi
@@ -186,7 +187,7 @@ function backup_redis() {
186187
}
187188

188189
function backup_rethink() {
189-
TARGET=rethink_${db}_${DBHOST}_${now}.tar.gz
190+
TARGET=rethink_${db}_${DBHOST}_${now}.tar.gz
190191
echo "** [db-backup] Dumping rethink Database: $db"
191192
rethinkdb dump -f ${TMPDIR}/${TARGET} -c ${DBHOST}:${DBPORT} ${RETHINK_PASS_STR} ${RETHINK_DB_STR}
192193
move_backup
@@ -212,7 +213,7 @@ function compression() {
212213
}
213214

214215
function generate_md5() {
215-
if [ "$MD5" = "TRUE" ] || [ "$MD5" = "true" ] ; then
216+
if [ "$MD5" = "TRUE" ] || [ "$MD5" = "true" ] ; then
216217
cd $TMPDIR
217218
md5sum ${TARGET} > ${TARGET}.md5
218219
fi
@@ -225,13 +226,13 @@ function move_backup() {
225226
}
226227

227228

228-
### Container Startup
229+
### Container Startup
229230
echo '** [db-backup] Initialized at at '$(date)
230231

231232
### Wait for Next time to start backup
232233
current_time=$(date +"%s")
233234
today=$(date +"%Y%m%d")
234-
235+
235236
if [[ $DB_DUMP_BEGIN =~ ^\+(.*)$ ]]; then
236237
waittime=$(( ${BASH_REMATCH[1]} * 60 ))
237238
else
@@ -279,12 +280,12 @@ echo '** [db-backup] Initialized at at '$(date)
279280
;;
280281
esac
281282

282-
### Zabbix
283+
### Zabbix
283284
if [ "$ENABLE_ZABBIX" = "TRUE" ] || [ "$ENABLE_ZABBIX" = "true" ]; then
284285
zabbix_sender -c /etc/zabbix/zabbix_agentd.conf -k dbbackup.size -o `stat -c%s ${DB_DUMP_TARGET}/${TARGET}`
285286
zabbix_sender -c /etc/zabbix/zabbix_agentd.conf -k dbbackup.datetime -o `date -r ${DB_DUMP_TARGET}/${TARGET} +'%s'`
286287
fi
287-
288+
288289
### Automatic Cleanup
289290
if [[ -n "$DB_CLEANUP_TIME" ]]; then
290291
find $DB_DUMP_TARGET/ -mmin +$DB_CLEANUP_TIME -iname "$DBTYPE_$DBNAME_*.*" -exec rm {} \;

0 commit comments

Comments
 (0)