Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit 5478317

Browse files
committed
Use ebs volume with backup
1 parent 1ef4355 commit 5478317

File tree

1 file changed

+95
-3
lines changed

1 file changed

+95
-3
lines changed

nancy_run.sh

Lines changed: 95 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,24 @@ EBS_SIZE_MULTIPLIER=5
1919
POSTGRES_VERSION_DEFAULT=10
2020
AWS_BLOCK_DURATION=0
2121

22+
#######################################
23+
# Attach ebs drive with db backup
24+
# Globals:
25+
# DOCKER_MACHINE, AWS_REGION, DB_EBS_VOLUME_ID
26+
# Arguments:
27+
# None
28+
# Returns:
29+
# None
30+
#######################################
31+
function attach_db_ebs_drive() {
32+
docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/backup\""
33+
instance_id=$(docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
34+
attachResult=$(aws --region=$AWS_REGION ec2 attach-volume --device /dev/xvdc --volume-id $DB_EBS_VOLUME_ID --instance-id $instance_id)
35+
sleep 10
36+
docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdc /home/backup
37+
docker-machine ssh $DOCKER_MACHINE "sudo df -h /dev/xvdc"
38+
}
39+
2240
#######################################
2341
# Print a help
2442
# Globals:
@@ -139,6 +157,18 @@ function help() {
139157
- dump in \"custom\" format, made with 'pg_dump -Fc ..' ('*.pgdmp'),
140158
* sequence of SQL commands specified as in a form of plain text.
141159
160+
\033[1m--db-ebs-volume-id\033[22m (string)
161+
162+
Id of Amazon ebs volume with backup of database.
163+
164+
In root of drive expected be found
165+
- base.tar.gz
166+
- pg_xlog.tar.gz
167+
168+
Created as result of execution:
169+
'pg_basebackup -U postgres -z -ZX -P -Ft -D /ebs-db-vol-root'
170+
where X any compression level.
171+
142172
\033[1m--commands-after-container-init\033[22m (string)
143173
144174
Shell commands to be executed after the container initialization. Can be used
@@ -492,7 +522,7 @@ function check_cli_parameters() {
492522
[[ ! -z ${WORKLOAD_REAL+x} ]] && let workloads_count=$workloads_count+1
493523
[[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ]] && let workloads_count=$workloads_count+1
494524

495-
if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]]; then
525+
if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]] && [[ -z ${DB_EBS_VOLUME_ID+x} ]];then
496526
err "ERROR: The object (database) is not defined."
497527
exit 1
498528
fi
@@ -1006,6 +1036,8 @@ while [ $# -gt 0 ]; do
10061036
AWS_ZONE="$2"; shift 2 ;;
10071037
--aws-block-duration )
10081038
AWS_BLOCK_DURATION=$2; shift 2 ;;
1039+
--db-ebs-volume-id )
1040+
DB_EBS_VOLUME_ID=$2; shift 2;;
10091041

10101042
--s3cfg-path )
10111043
S3_CFG_PATH="$2"; shift 2 ;;
@@ -1084,6 +1116,10 @@ elif [[ "$RUN_ON" == "aws" ]]; then
10841116
msg " To connect docker machine use:"
10851117
msg " docker-machine ssh $DOCKER_MACHINE"
10861118

1119+
if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1120+
attach_db_ebs_drive;
1121+
fi
1122+
10871123
docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/storage\""
10881124
if [[ "${AWS_EC2_TYPE:0:2}" == "i3" ]]; then
10891125
msg "Using high-speed NVMe SSD disks"
@@ -1099,9 +1135,10 @@ elif [[ "$RUN_ON" == "aws" ]]; then
10991135
CONTAINER_HASH=$( \
11001136
docker `docker-machine config $DOCKER_MACHINE` run \
11011137
--name="pg_nancy_${CURRENT_TS}" \
1138+
--privileged \
11021139
-v /home/ubuntu:/machine_home \
11031140
-v /home/storage:/storage \
1104-
-v /home/basedump:/basedump \
1141+
-v /home/backup:/backup \
11051142
-dit "postgresmen/postgres-with-stuff:pg${PG_VERSION}"
11061143
)
11071144
DOCKER_CONFIG=$(docker-machine config $DOCKER_MACHINE)
@@ -1117,6 +1154,47 @@ MACHINE_HOME="/machine_home/nancy_${CONTAINER_HASH}"
11171154
alias docker_exec='docker $DOCKER_CONFIG exec -i ${CONTAINER_HASH} '
11181155
CPU_CNT=$(docker_exec bash -c "cat /proc/cpuinfo | grep processor | wc -l") # for execute in docker
11191156

1157+
#######################################
1158+
# Extract database backup from attached ebs volume to database
1159+
# Globals:
1160+
# PG_VERSION
1161+
# Arguments:
1162+
# None
1163+
# Returns:
1164+
# None
1165+
#######################################
1166+
function cp_db_ebs_backup() {
1167+
# Here we think what postgress stopped
1168+
msg "Restore(cp) database backup."
1169+
docker_exec bash -c "rm -rf /var/lib/postgresql/9.6/main/*"
1170+
1171+
OP_START_TIME=$(date +%s);
1172+
docker_exec bash -c "rm -rf /var/lib/postgresql/$PG_VERSION/main/*"
1173+
docker_exec bash -c "tar -C /storage/postgresql/$PG_VERSION/main/ -xzvf /backup/base.tar.gz"
1174+
docker_exec bash -c "tar -C /storage/postgresql/$PG_VERSION/main/pg_xlog -xzvf /backup/pg_xlog.tar.gz"
1175+
END_TIME=$(date +%s);
1176+
DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}')
1177+
msg "Backup copied for $DURATION."
1178+
1179+
docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main"
1180+
docker_exec bash -c "localedef -f UTF-8 -i en_US en_US.UTF-8"
1181+
}
1182+
1183+
#######################################
1184+
# Dettach drive with database backup
1185+
# Globals:
1186+
# DOCKER_MACHINE, DB_EBS_VOLUME_ID
1187+
# Arguments:
1188+
# None
1189+
# Returns:
1190+
# None
1191+
#######################################
1192+
function dettach_db_ebs_drive() {
1193+
docker_exec bash -c "umount /backup"
1194+
docker-machine ssh $DOCKER_MACHINE sudo umount /home/backup
1195+
dettachResult=$(aws --region=$AWS_REGION ec2 detach-volume --volume-id $DB_EBS_VOLUME_ID)
1196+
}
1197+
11201198
docker_exec bash -c "mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME"
11211199
if [[ "$RUN_ON" == "aws" ]]; then
11221200
docker-machine ssh $DOCKER_MACHINE "sudo chmod a+w /home/storage"
@@ -1128,6 +1206,12 @@ if [[ "$RUN_ON" == "aws" ]]; then
11281206
sleep 2 # wait for postgres stopped
11291207
docker_exec bash -c "sudo mv /var/lib/postgresql /storage/"
11301208
docker_exec bash -c "ln -s /storage/postgresql /var/lib/postgresql"
1209+
1210+
if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1211+
cp_db_ebs_backup;
1212+
dettach_db_ebs_drive
1213+
fi
1214+
11311215
docker_exec bash -c "sudo /etc/init.d/postgresql start"
11321216
sleep 2 # wait for postgres started
11331217
fi
@@ -1480,6 +1564,12 @@ function collect_results() {
14801564
msg "Time taken to generate and collect artifacts: $DURATION."
14811565
}
14821566

1567+
if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1568+
docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'"
1569+
docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database $DB_NAME rename to test;'"
1570+
DB_NAME=test
1571+
fi
1572+
14831573
[ ! -z ${S3_CFG_PATH+x} ] && copy_file $S3_CFG_PATH \
14841574
&& docker_exec cp $MACHINE_HOME/.s3cfg /root/.s3cfg
14851575
[ ! -z ${DB_DUMP+x} ] && copy_file $DB_DUMP
@@ -1496,7 +1586,9 @@ sleep 2 # wait for postgres up&running
14961586

14971587
apply_commands_after_container_init
14981588
apply_sql_before_db_restore
1499-
restore_dump
1589+
if [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then
1590+
restore_dump;
1591+
fi
15001592
apply_sql_after_db_restore
15011593
docker_exec bash -c "psql -U postgres $DB_NAME -b -c 'create extension if not exists pg_stat_statements;' $VERBOSE_OUTPUT_REDIRECT"
15021594
apply_ddl_do_code

0 commit comments

Comments
 (0)