@@ -19,6 +19,24 @@ EBS_SIZE_MULTIPLIER=5
19
19
POSTGRES_VERSION_DEFAULT=10
20
20
AWS_BLOCK_DURATION=0
21
21
22
+ # ######################################
23
+ # Attach ebs drive with db backup
24
+ # Globals:
25
+ # DOCKER_MACHINE, AWS_REGION, DB_EBS_VOLUME_ID
26
+ # Arguments:
27
+ # None
28
+ # Returns:
29
+ # None
30
+ # ######################################
31
+ function attach_db_ebs_drive() {
32
+ docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/backup\" "
33
+ instance_id=$( docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
34
+ attachResult=$( aws --region=$AWS_REGION ec2 attach-volume --device /dev/xvdc --volume-id $DB_EBS_VOLUME_ID --instance-id $instance_id )
35
+ sleep 10
36
+ docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdc /home/backup
37
+ docker-machine ssh $DOCKER_MACHINE " sudo df -h /dev/xvdc"
38
+ }
39
+
22
40
# ######################################
23
41
# Print a help
24
42
# Globals:
@@ -139,6 +157,18 @@ function help() {
139
157
- dump in \" custom\" format, made with 'pg_dump -Fc ..' ('*.pgdmp'),
140
158
* sequence of SQL commands specified as in a form of plain text.
141
159
160
+ \033[1m--db-ebs-volume-id\033[22m (string)
161
+
162
+ Id of Amazon ebs volume with backup of database.
163
+
164
+ In root of drive expected be found
165
+ - base.tar.gz
166
+ - pg_xlog.tar.gz
167
+
168
+ Created as result of execution:
169
+ 'pg_basebackup -U postgres -z -ZX -P -Ft -D /ebs-db-vol-root'
170
+ where X any compression level.
171
+
142
172
\033[1m--commands-after-container-init\033[22m (string)
143
173
144
174
Shell commands to be executed after the container initialization. Can be used
@@ -492,7 +522,7 @@ function check_cli_parameters() {
492
522
[[ ! -z ${WORKLOAD_REAL+x} ]] && let workloads_count=$workloads_count +1
493
523
[[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ]] && let workloads_count=$workloads_count +1
494
524
495
- if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]]; then
525
+ if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]] && [[ -z ${DB_EBS_VOLUME_ID+x} ]] ; then
496
526
err " ERROR: The object (database) is not defined."
497
527
exit 1
498
528
fi
@@ -1006,6 +1036,8 @@ while [ $# -gt 0 ]; do
1006
1036
AWS_ZONE=" $2 " ; shift 2 ;;
1007
1037
--aws-block-duration )
1008
1038
AWS_BLOCK_DURATION=$2 ; shift 2 ;;
1039
+ --db-ebs-volume-id )
1040
+ DB_EBS_VOLUME_ID=$2 ; shift 2;;
1009
1041
1010
1042
--s3cfg-path )
1011
1043
S3_CFG_PATH=" $2 " ; shift 2 ;;
@@ -1084,6 +1116,10 @@ elif [[ "$RUN_ON" == "aws" ]]; then
1084
1116
msg " To connect docker machine use:"
1085
1117
msg " docker-machine ssh $DOCKER_MACHINE "
1086
1118
1119
+ if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1120
+ attach_db_ebs_drive;
1121
+ fi
1122
+
1087
1123
docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/storage\" "
1088
1124
if [[ " ${AWS_EC2_TYPE: 0: 2} " == " i3" ]]; then
1089
1125
msg " Using high-speed NVMe SSD disks"
@@ -1099,9 +1135,10 @@ elif [[ "$RUN_ON" == "aws" ]]; then
1099
1135
CONTAINER_HASH=$( \
1100
1136
docker ` docker-machine config $DOCKER_MACHINE ` run \
1101
1137
--name=" pg_nancy_${CURRENT_TS} " \
1138
+ --privileged \
1102
1139
-v /home/ubuntu:/machine_home \
1103
1140
-v /home/storage:/storage \
1104
- -v /home/basedump:/basedump \
1141
+ -v /home/backup:/backup \
1105
1142
-dit " postgresmen/postgres-with-stuff:pg${PG_VERSION} "
1106
1143
)
1107
1144
DOCKER_CONFIG=$( docker-machine config $DOCKER_MACHINE )
@@ -1117,6 +1154,47 @@ MACHINE_HOME="/machine_home/nancy_${CONTAINER_HASH}"
1117
1154
alias docker_exec=' docker $DOCKER_CONFIG exec -i ${CONTAINER_HASH} '
1118
1155
CPU_CNT=$( docker_exec bash -c " cat /proc/cpuinfo | grep processor | wc -l" ) # for execute in docker
1119
1156
1157
+ # ######################################
1158
+ # Extract database backup from attached ebs volume to database
1159
+ # Globals:
1160
+ # PG_VERSION
1161
+ # Arguments:
1162
+ # None
1163
+ # Returns:
1164
+ # None
1165
+ # ######################################
1166
+ function cp_db_ebs_backup() {
1167
+ # Here we think what postgress stopped
1168
+ msg " Restore(cp) database backup."
1169
+ docker_exec bash -c " rm -rf /var/lib/postgresql/9.6/main/*"
1170
+
1171
+ OP_START_TIME=$( date +%s) ;
1172
+ docker_exec bash -c " rm -rf /var/lib/postgresql/$PG_VERSION /main/*"
1173
+ docker_exec bash -c " tar -C /storage/postgresql/$PG_VERSION /main/ -xzvf /backup/base.tar.gz"
1174
+ docker_exec bash -c " tar -C /storage/postgresql/$PG_VERSION /main/pg_xlog -xzvf /backup/pg_xlog.tar.gz"
1175
+ END_TIME=$( date +%s) ;
1176
+ DURATION=$( echo $(( END_TIME- OP_START_TIME)) | awk ' {printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}' )
1177
+ msg " Backup copied for $DURATION ."
1178
+
1179
+ docker_exec bash -c " chown -R postgres:postgres /storage/postgresql/$PG_VERSION /main"
1180
+ docker_exec bash -c " localedef -f UTF-8 -i en_US en_US.UTF-8"
1181
+ }
1182
+
1183
+ # ######################################
1184
+ # Dettach drive with database backup
1185
+ # Globals:
1186
+ # DOCKER_MACHINE, DB_EBS_VOLUME_ID
1187
+ # Arguments:
1188
+ # None
1189
+ # Returns:
1190
+ # None
1191
+ # ######################################
1192
+ function dettach_db_ebs_drive() {
1193
+ docker_exec bash -c " umount /backup"
1194
+ docker-machine ssh $DOCKER_MACHINE sudo umount /home/backup
1195
+ dettachResult=$( aws --region=$AWS_REGION ec2 detach-volume --volume-id $DB_EBS_VOLUME_ID )
1196
+ }
1197
+
1120
1198
docker_exec bash -c " mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME "
1121
1199
if [[ " $RUN_ON " == " aws" ]]; then
1122
1200
docker-machine ssh $DOCKER_MACHINE " sudo chmod a+w /home/storage"
@@ -1128,6 +1206,12 @@ if [[ "$RUN_ON" == "aws" ]]; then
1128
1206
sleep 2 # wait for postgres stopped
1129
1207
docker_exec bash -c " sudo mv /var/lib/postgresql /storage/"
1130
1208
docker_exec bash -c " ln -s /storage/postgresql /var/lib/postgresql"
1209
+
1210
+ if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1211
+ cp_db_ebs_backup;
1212
+ dettach_db_ebs_drive
1213
+ fi
1214
+
1131
1215
docker_exec bash -c " sudo /etc/init.d/postgresql start"
1132
1216
sleep 2 # wait for postgres started
1133
1217
fi
@@ -1480,6 +1564,12 @@ function collect_results() {
1480
1564
msg " Time taken to generate and collect artifacts: $DURATION ."
1481
1565
}
1482
1566
1567
+ if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1568
+ docker_exec bash -c " psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'"
1569
+ docker_exec bash -c " psql --set ON_ERROR_STOP=on -U postgres -c 'alter database $DB_NAME rename to test;'"
1570
+ DB_NAME=test
1571
+ fi
1572
+
1483
1573
[ ! -z ${S3_CFG_PATH+x} ] && copy_file $S3_CFG_PATH \
1484
1574
&& docker_exec cp $MACHINE_HOME /.s3cfg /root/.s3cfg
1485
1575
[ ! -z ${DB_DUMP+x} ] && copy_file $DB_DUMP
@@ -1496,7 +1586,9 @@ sleep 2 # wait for postgres up&running
1496
1586
1497
1587
apply_commands_after_container_init
1498
1588
apply_sql_before_db_restore
1499
- restore_dump
1589
+ if [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then
1590
+ restore_dump;
1591
+ fi
1500
1592
apply_sql_after_db_restore
1501
1593
docker_exec bash -c " psql -U postgres $DB_NAME -b -c 'create extension if not exists pg_stat_statements;' $VERBOSE_OUTPUT_REDIRECT "
1502
1594
apply_ddl_do_code
0 commit comments