Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit f6e9be9

Browse files
authored
Merge branch 'master' into dmius-checkparams
2 parents dd41a60 + 4a06749 commit f6e9be9

File tree

1 file changed

+103
-26
lines changed

1 file changed

+103
-26
lines changed

nancy_run.sh

Lines changed: 103 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
55
DOCKER_MACHINE="${DOCKER_MACHINE:-nancy-$CURRENT_TS}"
66
DOCKER_MACHINE="${DOCKER_MACHINE//_/-}"
77
DEBUG_TIMEOUT=0
8+
EBS_SIZE_MULTIPLIER=15
89

910
## Get command line params
1011
while true; do
@@ -486,6 +487,31 @@ function checkParams() {
486487

487488
checkParams;
488489

490+
# Determine dump file size
491+
if [ ! -z ${DB_DUMP_PATH+x} ]; then
492+
dumpFileSize=0
493+
if [[ $DB_DUMP_PATH =~ "s3://" ]]; then
494+
dumpFileSize=$(s3cmd info $DB_DUMP_PATH | grep "File size:" )
495+
dumpFileSize=${dumpFileSize/File size:/}
496+
dumpFileSize=${dumpFileSize/\t/}
497+
dumpFileSize=${dumpFileSize// /}
498+
#echo "S3 FILESIZE: $dumpFileSize"
499+
else
500+
dumpFileSize=$(stat -c%s "$DB_DUMP_PATH")
501+
fi
502+
[ $DEBUG -eq 1 ] && echo "Dump filesize: $dumpFileSize bytes"
503+
KB=1024
504+
let minSize=300*$KB*$KB*$KB
505+
ebsSize=$minSize # 300 GB
506+
if [ "$dumpFileSize" -gt "$minSize" ]; then
507+
let ebsSize=$dumpFileSize
508+
let ebsSize=$ebsSize*$EBS_SIZE_MULTIPLIER
509+
ebsSize=$(numfmt --to-unit=G $ebsSize)
510+
EBS_SIZE=$ebsSize
511+
[ $DEBUG -eq 1 ] && echo "EBS Size: $EBS_SIZE Gb"
512+
fi
513+
fi
514+
489515
set -ueo pipefail
490516
[ $DEBUG -eq 1 ] && set -ueox pipefail # to debug
491517
shopt -s expand_aliases
@@ -562,6 +588,12 @@ function cleanupAndExit {
562588
docker container rm -f $containerHash
563589
elif [ "$RUN_ON" = "aws" ]; then
564590
destroyDockerMachine $DOCKER_MACHINE
591+
if [ ! -z ${VOLUME_ID+x} ]; then
592+
echo "Wait and delete volume $VOLUME_ID"
593+
sleep 60 # wait to machine removed
594+
delvolout=$(aws ec2 delete-volume --volume-id $VOLUME_ID)
595+
echo "Volume $VOLUME_ID deleted"
596+
fi
565597
else
566598
>&2 echo "ASSERT: must not reach this point"
567599
exit 1
@@ -643,13 +675,48 @@ elif [[ "$RUN_ON" = "aws" ]]; then
643675
>&2 echo "Failed: Docker $DOCKER_MACHINE is NOT running."
644676
exit 1;
645677
fi
646-
647678
echo "Docker $DOCKER_MACHINE is running."
648679

680+
docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/storage\""
681+
if [ ${AWS_EC2_TYPE:0:2} == 'i3' ]
682+
then
683+
echo "Attempt use high speed disk"
684+
# Init i3 storage, just mount existing volume
685+
echo "Attach i3 nvme volume"
686+
docker-machine ssh $DOCKER_MACHINE sudo add-apt-repository -y ppa:sbates
687+
docker-machine ssh $DOCKER_MACHINE sudo apt-get update || :
688+
docker-machine ssh $DOCKER_MACHINE sudo apt-get install -y nvme-cli
689+
690+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"# partition table of /dev/nvme0n1\" > /tmp/nvme.part"
691+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"unit: sectors \" >> /tmp/nvme.part"
692+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p1 : start= 2048, size=1855466702, Id=83 \" >> /tmp/nvme.part"
693+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p2 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
694+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p3 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
695+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p4 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
696+
697+
docker-machine ssh $DOCKER_MACHINE sudo sfdisk /dev/nvme0n1 < /tmp/nvme.part
698+
docker-machine ssh $DOCKER_MACHINE sudo mkfs -t ext4 /dev/nvme0n1p1
699+
docker-machine ssh $DOCKER_MACHINE sudo mount /dev/nvme0n1p1 /home/storage
700+
else
701+
echo "Attempt use external disk"
702+
# Create new volume and attach them for non i3 instances if needed
703+
if [ ! -z ${EBS_SIZE+x} ]; then
704+
echo "Create and attach EBS volume"
705+
[ $DEBUG -eq 1 ] && echo "Create volume with size: $EBS_SIZE Gb"
706+
VOLUME_ID=$(aws ec2 create-volume --size $EBS_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId)
707+
INSTANCE_ID=$(docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
708+
sleep 10 # wait to volume will ready
709+
attachResult=$(aws ec2 attach-volume --device /dev/xvdf --volume-id $VOLUME_ID --instance-id $INSTANCE_ID --region us-east-1)
710+
docker-machine ssh $DOCKER_MACHINE sudo mkfs.ext4 /dev/xvdf
711+
docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdf /home/storage
712+
fi
713+
fi
714+
649715
containerHash=$( \
650716
docker `docker-machine config $DOCKER_MACHINE` run \
651717
--name="pg_nancy_${CURRENT_TS}" \
652718
-v /home/ubuntu:/machine_home \
719+
-v /home/storage:/storage \
653720
-dit "postgresmen/postgres-with-stuff:pg${PG_VERSION}"
654721
)
655722
dockerConfig=$(docker-machine config $DOCKER_MACHINE)
@@ -662,6 +729,19 @@ alias docker_exec='docker $dockerConfig exec -i ${containerHash} '
662729

663730
MACHINE_HOME="/machine_home/nancy_${containerHash}"
664731
docker_exec sh -c "mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME"
732+
if [[ "$RUN_ON" = "aws" ]]; then
733+
docker_exec bash -c "ln -s /storage/ $MACHINE_HOME/storage"
734+
MACHINE_HOME="$MACHINE_HOME/storage"
735+
docker_exec sh -c "chmod a+w /storage"
736+
737+
echo "Move posgresql to separated disk"
738+
docker_exec bash -c "sudo /etc/init.d/postgresql stop"
739+
sleep 2 # wait for postgres stopped
740+
docker_exec bash -c "sudo mv /var/lib/postgresql /storage/"
741+
docker_exec bash -c "ln -s /storage/postgresql /var/lib/postgresql"
742+
docker_exec bash -c "sudo /etc/init.d/postgresql start"
743+
sleep 2 # wait for postgres started
744+
fi
665745

666746
function copyFile() {
667747
if [ "$1" != '' ]; then
@@ -673,7 +753,7 @@ function copyFile() {
673753
# TODO: option – hard links OR regular `cp`
674754
docker cp ${1/file:\/\//} $containerHash:$MACHINE_HOME/
675755
elif [ "$RUN_ON" = "aws" ]; then
676-
docker-machine scp $1 $DOCKER_MACHINE:/home/ubuntu/nancy_${containerHash}
756+
docker-machine scp $1 $DOCKER_MACHINE:/home/storage
677757
else
678758
>&2 echo "ASSERT: must not reach this point"
679759
exit 1
@@ -696,24 +776,21 @@ function copyFile() {
696776
# Dump
697777
sleep 2 # wait for postgres up&running
698778
DB_DUMP_FILENAME=$(basename $DB_DUMP_PATH)
699-
docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test"
779+
echo "Restore database dump"
780+
docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql -E --set ON_ERROR_STOP=on -U postgres test > /dev/null"
700781
# After init database sql code apply
701782
echo "Apply sql code after db init"
702783
if ([ ! -z ${AFTER_DB_INIT_CODE+x} ] && [ "$AFTER_DB_INIT_CODE" != "" ])
703784
then
704785
AFTER_DB_INIT_CODE_FILENAME=$(basename $AFTER_DB_INIT_CODE)
705-
if [[ $AFTER_DB_INIT_CODE =~ "s3://" ]]; then
706-
docker_exec s3cmd sync $AFTER_DB_INIT_CODE $MACHINE_HOME/
707-
else
708-
docker-machine scp $AFTER_DB_INIT_CODE $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash
709-
fi
710-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$AFTER_DB_INIT_CODE_FILENAME"
786+
copyFile $AFTER_DB_INIT_CODE
787+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$AFTER_DB_INIT_CODE_FILENAME"
711788
fi
712789
# Apply DDL code
713790
echo "Apply DDL SQL code"
714791
if ([ ! -z ${TARGET_DDL_DO+x} ] && [ "$TARGET_DDL_DO" != "" ]); then
715792
TARGET_DDL_DO_FILENAME=$(basename $TARGET_DDL_DO)
716-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$TARGET_DDL_DO_FILENAME"
793+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$TARGET_DDL_DO_FILENAME"
717794
fi
718795
# Apply initial postgres configuration
719796
echo "Apply initial postgres configuration"
@@ -759,26 +836,26 @@ docker_exec bash -c "/root/pgbadger/pgbadger \
759836
-o $MACHINE_HOME/$ARTIFACTS_FILENAME.json"
760837
#2> >(grep -v "install the Text::CSV_XS" >&2)
761838

762-
echo "Save JSON log..."
839+
logpath=$( \
840+
docker_exec bash -c "psql -XtU postgres \
841+
-c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
842+
| grep / | sed -e 's/^[ \t]*//'"
843+
)
844+
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz"
845+
echo "Save artifcats..."
763846
if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then
764-
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json \
765-
$ARTIFACTS_DESTINATION/
847+
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
848+
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
766849
else
767-
logpath=$( \
768-
docker_exec bash -c "psql -XtU postgres \
769-
-c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
770-
| grep / | sed -e 's/^[ \t]*//'"
771-
)
772-
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz"
773850
if [ "$RUN_ON" = "localhost" ]; then
774851
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
775852
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
776853
# TODO option: ln / cp
777854
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
778855
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
779856
elif [ "$RUN_ON" = "aws" ]; then
780-
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
781-
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
857+
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
858+
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
782859
else
783860
>&2 echo "ASSERT: must not reach this point"
784861
exit 1
@@ -788,18 +865,18 @@ fi
788865
echo "Apply DDL undo SQL code"
789866
if ([ ! -z ${TARGET_DDL_UNDO+x} ] && [ "$TARGET_DDL_UNDO" != "" ]); then
790867
TARGET_DDL_UNDO_FILENAME=$(basename $TARGET_DDL_UNDO)
791-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$TARGET_DDL_UNDO_FILENAME"
868+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$TARGET_DDL_UNDO_FILENAME"
792869
fi
793870

794871
echo -e "Run done!"
795872
echo -e "Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json"
796873
echo -e "Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.log.gz"
797874
echo -e "-------------------------------------------"
798875
echo -e "Summary:"
799-
echo -e " Queries duration:\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
800-
echo -e " Queries count:\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
801-
echo -e " Normalized queries count:\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
802-
echo -e " Errors count:\t\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
876+
echo -e " Queries duration:\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
877+
echo -e " Queries count:\t\t" $( docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
878+
echo -e " Normalized queries count:\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
879+
echo -e " Errors count:\t\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
803880
echo -e "-------------------------------------------"
804881

805882
sleep $DEBUG_TIMEOUT

0 commit comments

Comments
 (0)