Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit 62afc7c

Browse files
authored
Merge pull request #69 from postgres-ai/dmius-pg-restore
Use pg-restore to restore directory one file dump
2 parents ae5d98b + 573b0c6 commit 62afc7c

File tree

2 files changed

+70
-18
lines changed

2 files changed

+70
-18
lines changed

nancy_run.sh

Lines changed: 69 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ while true; do
9595
9696
Specify the path to database dump (created by pg_dump) to be used as an input.
9797
98-
\033[1m--after-db-init-code\033[22m (string)
98+
\033[1m--sql-after-db-restore\033[22m (string)
9999
100100
Specify additional commands to be executed after database is initiated (dump
101101
loaded or snapshot attached).
@@ -199,10 +199,12 @@ while true; do
199199
DB_PREPARED_SNAPSHOT="$2"; shift 2 ;;
200200
--db-dump )
201201
DB_DUMP_PATH="$2"; shift 2 ;;
202-
--after-db-init-code )
202+
--commands-after-docker-init )
203+
AFTER_DOCKER_INIT_CODE="$2"; shift 2 ;;
204+
--sql-after-db-restore )
203205
#s3 url|filename|content
204206
AFTER_DB_INIT_CODE="$2"; shift 2 ;;
205-
--before-db-init-code )
207+
--sql-before-db-restore )
206208
#s3 url|filename|content
207209
BEFORE_DB_INIT_CODE="$2"; shift 2 ;;
208210
--workload-real )
@@ -264,6 +266,7 @@ RUN_ON=${RUN_ON:-localhost}
264266

265267
if [ $DEBUG -eq 1 ]; then
266268
echo "debug: ${DEBUG}"
269+
echo "debug timeout: ${DEBUG_TIMEOUT}"
267270
echo "run_on: ${RUN_ON}"
268271
echo "container_id: ${CONTAINER_ID}"
269272
echo "aws_ec2_type: ${AWS_EC2_TYPE}"
@@ -284,6 +287,7 @@ if [ $DEBUG -eq 1 ]; then
284287
echo "s3-cfg-path: $S3_CFG_PATH"
285288
echo "tmp-path: $TMP_PATH"
286289
echo "after-db-init-code: $AFTER_DB_INIT_CODE"
290+
echo "after_docker_init_code: $AFTER_DOCKER_INIT_CODE"
287291
echo "before-db-init-code: $BEFORE_DB_INIT_CODE"
288292
echo "ebs-volume-size: $EBS_VOLUME_SIZE"
289293
fi
@@ -466,25 +470,36 @@ function checkParams() {
466470
fi
467471
fi
468472

473+
if [ ! -z ${AFTER_DOCKER_INIT_CODE+x} ]; then
474+
checkPath AFTER_DOCKER_INIT_CODE
475+
if [ "$?" -ne "0" ]; then
476+
#>&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DOCKER_INIT_CODE' not found as file will use as content"
477+
echo "$AFTER_DOCKER_INIT_CODE" > $TMP_PATH/after_docker_init_code_tmp.sh
478+
AFTER_DOCKER_INIT_CODE="$TMP_PATH/after_docker_init_code_tmp.sh"
479+
else
480+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as commands-after-docker-init will use as filename"
481+
fi
482+
fi
483+
469484
if [ ! -z ${AFTER_DB_INIT_CODE+x} ]; then
470485
checkPath AFTER_DB_INIT_CODE
471486
if [ "$?" -ne "0" ]; then
472487
#>&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DB_INIT_CODE' not found as file will use as content"
473488
echo "$AFTER_DB_INIT_CODE" > $TMP_PATH/after_db_init_code_tmp.sql
474489
AFTER_DB_INIT_CODE="$TMP_PATH/after_db_init_code_tmp.sql"
475490
else
476-
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as after_db_init_code will use as filename"
491+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as sql-after-db-restore will use as filename"
477492
fi
478493
fi
479494

480495
if [ ! -z ${BEFORE_DB_INIT_CODE+x} ]; then
481496
checkPath BEFORE_DB_INIT_CODE
482497
if [ "$?" -ne "0" ]; then
483-
#>&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DB_INIT_CODE' not found as file will use as content"
498+
#>&2 echo "WARNING: Value given as before_db_init_code: '$BEFORE_DB_INIT_CODE' not found as file will use as content"
484499
echo "$BEFORE_DB_INIT_CODE" > $TMP_PATH/before_db_init_code_tmp.sql
485500
BEFORE_DB_INIT_CODE="$TMP_PATH/before_db_init_code_tmp.sql"
486501
else
487-
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as before_db_init_code will use as filename"
502+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as sql-before-db-restore will use as filename"
488503
fi
489504
fi
490505

@@ -570,7 +585,7 @@ if ([ "$RUN_ON" == "aws" ] && [ ! ${AWS_EC2_TYPE:0:2} == "i3" ] && \
570585
fi
571586

572587
set -ueo pipefail
573-
[ $DEBUG -eq 1 ] && set -ueox pipefail # to debug
588+
[ $DEBUG -eq 1 ] && set -uox pipefail # to debug
574589
shopt -s expand_aliases
575590

576591
## Docker tools
@@ -614,6 +629,7 @@ function createDockerMachine() {
614629
--amazonec2-ssh-keypath="$6" \
615630
--amazonec2-instance-type=$2 \
616631
--amazonec2-spot-price=$3 \
632+
--amazonec2-zone $7 \
617633
$1 2> >(grep -v "failed waiting for successful resource state" >&2) &
618634
# --amazonec2-block-duration-minutes=$4 \
619635
}
@@ -641,6 +657,7 @@ function cleanupAndExit {
641657
echo "$(date "+%Y-%m-%d %H:%M:%S"): Remove temp files..." # if exists
642658
docker $dockerConfig exec -i ${containerHash} sh -c "sudo rm -rf $MACHINE_HOME"
643659
rm -f "$TMP_PATH/after_db_init_code_tmp.sql"
660+
rm -f "$TMP_PATH/after_docker_init_code_tmp.sh"
644661
rm -f "$TMP_PATH/before_db_init_code_tmp.sql"
645662
rm -f "$TMP_PATH/workload_custom_sql_tmp.sql"
646663
rm -f "$TMP_PATH/target_ddl_do_tmp.sql"
@@ -684,17 +701,24 @@ elif [[ "$RUN_ON" = "aws" ]]; then
684701
--start-time=$(date +%s) --product-descriptions="Linux/UNIX (Amazon VPC)" \
685702
--query 'SpotPriceHistory[*].{az:AvailabilityZone, price:SpotPrice}'
686703
)
687-
maxprice=$(echo $prices | jq 'max_by(.price) | .price')
688-
maxprice="${maxprice/\"/}"
689-
maxprice="${maxprice/\"/}"
690-
echo "$(date "+%Y-%m-%d %H:%M:%S"): Max price from history: $maxprice"
691-
multiplier="1.1"
692-
price=$(echo "$maxprice * $multiplier" | bc -l)
704+
minprice=$(echo $prices | jq 'min_by(.price) | .price')
705+
region=$(echo $prices | jq 'min_by(.price) | .az')
706+
region="${region/\"/}"
707+
region="${region/\"/}"
708+
minprice="${minprice/\"/}"
709+
minprice="${minprice/\"/}"
710+
zone=${region: -1}
711+
echo "$(date "+%Y-%m-%d %H:%M:%S"): Min price from history: $minprice in $region (zone: $zone)"
712+
multiplier="1.01"
713+
price=$(echo "$minprice * $multiplier" | bc -l)
693714
echo "$(date "+%Y-%m-%d %H:%M:%S"): Increased price: $price"
694715
EC2_PRICE=$price
716+
if [ -z $zone ]; then
717+
region='a' #default zone
718+
fi
695719

696720
createDockerMachine $DOCKER_MACHINE $AWS_EC2_TYPE $EC2_PRICE \
697-
60 $AWS_KEY_PAIR $AWS_KEY_PATH;
721+
60 $AWS_KEY_PAIR $AWS_KEY_PATH $zone;
698722
status=$(waitEC2Ready "docker-machine create" "$DOCKER_MACHINE" 1)
699723
if [ "$status" == "price-too-low" ]
700724
then
@@ -740,6 +764,8 @@ elif [[ "$RUN_ON" = "aws" ]]; then
740764
exit 1;
741765
fi
742766
echo "$(date "+%Y-%m-%d %H:%M:%S"): Docker $DOCKER_MACHINE is running."
767+
echo " To connect docker machine use:"
768+
echo " docker \`docker-machine config $DOCKER_MACHINE\` exec -it pg_nancy_${CURRENT_TS} bash"
743769

744770
docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/storage\""
745771
if [ ${AWS_EC2_TYPE:0:2} == 'i3' ]
@@ -839,6 +865,19 @@ function copyFile() {
839865
## Apply machine features
840866
# Dump
841867
sleep 2 # wait for postgres up&running
868+
OP_START_TIME=$(date +%s);
869+
if ([ ! -z ${AFTER_DOCKER_INIT_CODE+x} ] && [ "$AFTER_DOCKER_INIT_CODE" != "" ])
870+
then
871+
echo "$(date "+%Y-%m-%d %H:%M:%S"): Apply code after docker init"
872+
AFTER_DOCKER_INIT_CODE_FILENAME=$(basename $AFTER_DOCKER_INIT_CODE)
873+
copyFile $AFTER_DOCKER_INIT_CODE
874+
# --set ON_ERROR_STOP=on
875+
docker_exec bash -c "chmod +x $MACHINE_HOME/$AFTER_DOCKER_INIT_CODE_FILENAME"
876+
docker_exec sh $MACHINE_HOME/$AFTER_DOCKER_INIT_CODE_FILENAME
877+
END_TIME=$(date +%s);
878+
DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}')
879+
echo "$(date "+%Y-%m-%d %H:%M:%S"): After docker init code applied for $DURATION."
880+
fi
842881

843882
OP_START_TIME=$(date +%s);
844883
if ([ ! -z ${BEFORE_DB_INIT_CODE+x} ] && [ "$BEFORE_DB_INIT_CODE" != "" ])
@@ -854,6 +893,8 @@ then
854893
fi
855894
OP_START_TIME=$(date +%s);
856895
echo "$(date "+%Y-%m-%d %H:%M:%S"): Restore database dump"
896+
#CPU_CNT=$(cat /proc/cpuinfo | grep processor | wc -l)
897+
CPU_CNT=$(docker_exec bash -c "cat /proc/cpuinfo | grep processor | wc -l") # for execute in docker
857898
case "$DB_DUMP_EXT" in
858899
sql)
859900
docker_exec bash -c "cat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test $OUTPUT_REDIRECT"
@@ -864,6 +905,9 @@ case "$DB_DUMP_EXT" in
864905
gz)
865906
docker_exec bash -c "zcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test $OUTPUT_REDIRECT"
866907
;;
908+
pgdmp)
909+
docker_exec bash -c "pg_restore -j $CPU_CNT --no-owner --no-privileges -U postgres -d test $MACHINE_HOME/$DB_DUMP_FILENAME" || true
910+
;;
867911
esac
868912
END_TIME=$(date +%s);
869913
DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}')
@@ -938,7 +982,7 @@ fi
938982

939983
# Clear statistics and log
940984
echo "$(date "+%Y-%m-%d %H:%M:%S"): Execute vacuumdb..."
941-
docker_exec vacuumdb -U postgres test -j $(cat /proc/cpuinfo | grep processor | wc -l) --analyze
985+
docker_exec vacuumdb -U postgres test -j $CPU_CNT --analyze
942986
docker_exec bash -c "echo '' > /var/log/postgresql/postgresql-$PG_VERSION-main.log"
943987
# Execute workload
944988
OP_START_TIME=$(date +%s);
@@ -947,7 +991,11 @@ if [ ! -z ${WORKLOAD_REAL+x} ] && [ "$WORKLOAD_REAL" != '' ];then
947991
echo "$(date "+%Y-%m-%d %H:%M:%S"): Execute pgreplay queries..."
948992
docker_exec psql -U postgres test -c 'create role testuser superuser login;'
949993
WORKLOAD_FILE_NAME=$(basename $WORKLOAD_REAL)
950-
docker_exec bash -c "pgreplay -r -j $MACHINE_HOME/$WORKLOAD_FILE_NAME"
994+
if [ ! -z ${WORKLOAD_REAL_REPLAY_SPEED+x} ] && [ "$WORKLOAD_REAL_REPLAY_SPEED" != '' ]; then
995+
docker_exec bash -c "pgreplay -r -s $WORKLOAD_REAL_REPLAY_SPEED $MACHINE_HOME/$WORKLOAD_FILE_NAME"
996+
else
997+
docker_exec bash -c "pgreplay -r -j $MACHINE_HOME/$WORKLOAD_FILE_NAME"
998+
fi
951999
else
9521000
if ([ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && [ "$WORKLOAD_CUSTOM_SQL" != "" ]); then
9531001
WORKLOAD_CUSTOM_FILENAME=$(basename $WORKLOAD_CUSTOM_SQL)
@@ -963,26 +1011,30 @@ echo "$(date "+%Y-%m-%d %H:%M:%S"): Workload executed for $DURATION."
9631011
OP_START_TIME=$(date +%s);
9641012
echo "$(date "+%Y-%m-%d %H:%M:%S"): Prepare JSON log..."
9651013
docker_exec bash -c "/root/pgbadger/pgbadger \
966-
-j $(cat /proc/cpuinfo | grep processor | wc -l) \
1014+
-j $CPU_CNT \
9671015
--prefix '%t [%p]: [%l-1] db=%d,user=%u (%a,%h)' /var/log/postgresql/* -f stderr \
9681016
-o $MACHINE_HOME/$ARTIFACTS_FILENAME.json"
9691017
#2> >(grep -v "install the Text::CSV_XS" >&2)
9701018

9711019
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz"
1020+
docker_exec bash -c "gzip -c /etc/postgresql/$PG_VERSION/main/postgresql.conf > $MACHINE_HOME/$ARTIFACTS_FILENAME.conf.gz"
9721021
echo "$(date "+%Y-%m-%d %H:%M:%S"): Save artifcats..."
9731022
if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then
9741023
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
9751024
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
1025+
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.conf.gz $ARTIFACTS_DESTINATION/
9761026
else
9771027
if [ "$RUN_ON" = "localhost" ]; then
9781028
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
9791029
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
1030+
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.conf.gz $ARTIFACTS_DESTINATION/
9801031
# TODO option: ln / cp
9811032
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
9821033
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
9831034
elif [ "$RUN_ON" = "aws" ]; then
9841035
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
9851036
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
1037+
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.conf.gz $ARTIFACTS_DESTINATION/
9861038
else
9871039
>&2 echo "ASSERT: must not reach this point"
9881040
exit 1

tests/nancy_run_before_init_code.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/bin/bash
22

33
output=$(${BASH_SOURCE%/*}/../nancy run \
4-
--before-db-init-code "select abs from beforeinittable;" \
4+
--sql-before-db-restore "select abs from beforeinittable;" \
55
--workload-custom-sql "file://$srcDir/custom.sql" \
66
--db-dump "file://$srcDir/test.dump.bz2" \
77
--tmp-path $srcDir/tmp \

0 commit comments

Comments
 (0)