Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit 4a06749

Browse files
authored
Merge pull request #39 from startupturbo/dmius-ebs-vol
Use i3 nvme0n1p1 volume or create+attach ebs for non i3
2 parents 9ca907e + 2577f4b commit 4a06749

File tree

1 file changed

+103
-27
lines changed

1 file changed

+103
-27
lines changed

nancy_run.sh

Lines changed: 103 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
55
DOCKER_MACHINE="${DOCKER_MACHINE:-nancy-$CURRENT_TS}"
66
DOCKER_MACHINE="${DOCKER_MACHINE//_/-}"
77
DEBUG_TIMEOUT=0
8+
EBS_SIZE_MULTIPLIER=15
89

910
## Get command line params
1011
while true; do
@@ -280,7 +281,6 @@ function checkPath() {
280281
if [[ $path =~ "file:///" ]]
281282
then
282283
path=${path/file:\/\//}
283-
echo "CHECK $path"
284284
if [ -f $path ]
285285
then
286286
eval "$1=\"$path\"" # update original variable
@@ -475,6 +475,31 @@ function checkParams() {
475475

476476
checkParams;
477477

478+
# Determine dump file size
479+
if [ ! -z ${DB_DUMP_PATH+x} ]; then
480+
dumpFileSize=0
481+
if [[ $DB_DUMP_PATH =~ "s3://" ]]; then
482+
dumpFileSize=$(s3cmd info $DB_DUMP_PATH | grep "File size:" )
483+
dumpFileSize=${dumpFileSize/File size:/}
484+
dumpFileSize=${dumpFileSize/\t/}
485+
dumpFileSize=${dumpFileSize// /}
486+
#echo "S3 FILESIZE: $dumpFileSize"
487+
else
488+
dumpFileSize=$(stat -c%s "$DB_DUMP_PATH")
489+
fi
490+
[ $DEBUG -eq 1 ] && echo "Dump filesize: $dumpFileSize bytes"
491+
KB=1024
492+
let minSize=300*$KB*$KB*$KB
493+
ebsSize=$minSize # 300 GB
494+
if [ "$dumpFileSize" -gt "$minSize" ]; then
495+
let ebsSize=$dumpFileSize
496+
let ebsSize=$ebsSize*$EBS_SIZE_MULTIPLIER
497+
ebsSize=$(numfmt --to-unit=G $ebsSize)
498+
EBS_SIZE=$ebsSize
499+
[ $DEBUG -eq 1 ] && echo "EBS Size: $EBS_SIZE Gb"
500+
fi
501+
fi
502+
478503
set -ueo pipefail
479504
[ $DEBUG -eq 1 ] && set -ueox pipefail # to debug
480505
shopt -s expand_aliases
@@ -551,6 +576,12 @@ function cleanupAndExit {
551576
docker container rm -f $containerHash
552577
elif [ "$RUN_ON" = "aws" ]; then
553578
destroyDockerMachine $DOCKER_MACHINE
579+
if [ ! -z ${VOLUME_ID+x} ]; then
580+
echo "Wait and delete volume $VOLUME_ID"
581+
sleep 60 # wait to machine removed
582+
delvolout=$(aws ec2 delete-volume --volume-id $VOLUME_ID)
583+
echo "Volume $VOLUME_ID deleted"
584+
fi
554585
else
555586
>&2 echo "ASSERT: must not reach this point"
556587
exit 1
@@ -632,13 +663,48 @@ elif [[ "$RUN_ON" = "aws" ]]; then
632663
>&2 echo "Failed: Docker $DOCKER_MACHINE is NOT running."
633664
exit 1;
634665
fi
635-
636666
echo "Docker $DOCKER_MACHINE is running."
637667

668+
docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/storage\""
669+
if [ ${AWS_EC2_TYPE:0:2} == 'i3' ]
670+
then
671+
echo "Attempt use high speed disk"
672+
# Init i3 storage, just mount existing volume
673+
echo "Attach i3 nvme volume"
674+
docker-machine ssh $DOCKER_MACHINE sudo add-apt-repository -y ppa:sbates
675+
docker-machine ssh $DOCKER_MACHINE sudo apt-get update || :
676+
docker-machine ssh $DOCKER_MACHINE sudo apt-get install -y nvme-cli
677+
678+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"# partition table of /dev/nvme0n1\" > /tmp/nvme.part"
679+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"unit: sectors \" >> /tmp/nvme.part"
680+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p1 : start= 2048, size=1855466702, Id=83 \" >> /tmp/nvme.part"
681+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p2 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
682+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p3 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
683+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p4 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
684+
685+
docker-machine ssh $DOCKER_MACHINE sudo sfdisk /dev/nvme0n1 < /tmp/nvme.part
686+
docker-machine ssh $DOCKER_MACHINE sudo mkfs -t ext4 /dev/nvme0n1p1
687+
docker-machine ssh $DOCKER_MACHINE sudo mount /dev/nvme0n1p1 /home/storage
688+
else
689+
echo "Attempt use external disk"
690+
# Create new volume and attach them for non i3 instances if needed
691+
if [ ! -z ${EBS_SIZE+x} ]; then
692+
echo "Create and attach EBS volume"
693+
[ $DEBUG -eq 1 ] && echo "Create volume with size: $EBS_SIZE Gb"
694+
VOLUME_ID=$(aws ec2 create-volume --size $EBS_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId)
695+
INSTANCE_ID=$(docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
696+
sleep 10 # wait to volume will ready
697+
attachResult=$(aws ec2 attach-volume --device /dev/xvdf --volume-id $VOLUME_ID --instance-id $INSTANCE_ID --region us-east-1)
698+
docker-machine ssh $DOCKER_MACHINE sudo mkfs.ext4 /dev/xvdf
699+
docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdf /home/storage
700+
fi
701+
fi
702+
638703
containerHash=$( \
639704
docker `docker-machine config $DOCKER_MACHINE` run \
640705
--name="pg_nancy_${CURRENT_TS}" \
641706
-v /home/ubuntu:/machine_home \
707+
-v /home/storage:/storage \
642708
-dit "postgresmen/postgres-with-stuff:pg${PG_VERSION}"
643709
)
644710
dockerConfig=$(docker-machine config $DOCKER_MACHINE)
@@ -651,6 +717,19 @@ alias docker_exec='docker $dockerConfig exec -i ${containerHash} '
651717

652718
MACHINE_HOME="/machine_home/nancy_${containerHash}"
653719
docker_exec sh -c "mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME"
720+
if [[ "$RUN_ON" = "aws" ]]; then
721+
docker_exec bash -c "ln -s /storage/ $MACHINE_HOME/storage"
722+
MACHINE_HOME="$MACHINE_HOME/storage"
723+
docker_exec sh -c "chmod a+w /storage"
724+
725+
echo "Move posgresql to separated disk"
726+
docker_exec bash -c "sudo /etc/init.d/postgresql stop"
727+
sleep 2 # wait for postgres stopped
728+
docker_exec bash -c "sudo mv /var/lib/postgresql /storage/"
729+
docker_exec bash -c "ln -s /storage/postgresql /var/lib/postgresql"
730+
docker_exec bash -c "sudo /etc/init.d/postgresql start"
731+
sleep 2 # wait for postgres started
732+
fi
654733

655734
function copyFile() {
656735
if [ "$1" != '' ]; then
@@ -662,7 +741,7 @@ function copyFile() {
662741
# TODO: option – hard links OR regular `cp`
663742
docker cp ${1/file:\/\//} $containerHash:$MACHINE_HOME/
664743
elif [ "$RUN_ON" = "aws" ]; then
665-
docker-machine scp $1 $DOCKER_MACHINE:/home/ubuntu/nancy_${containerHash}
744+
docker-machine scp $1 $DOCKER_MACHINE:/home/storage
666745
else
667746
>&2 echo "ASSERT: must not reach this point"
668747
exit 1
@@ -685,24 +764,21 @@ function copyFile() {
685764
# Dump
686765
sleep 2 # wait for postgres up&running
687766
DB_DUMP_FILENAME=$(basename $DB_DUMP_PATH)
688-
docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test"
767+
echo "Restore database dump"
768+
docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql -E --set ON_ERROR_STOP=on -U postgres test > /dev/null"
689769
# After init database sql code apply
690770
echo "Apply sql code after db init"
691771
if ([ ! -z ${AFTER_DB_INIT_CODE+x} ] && [ "$AFTER_DB_INIT_CODE" != "" ])
692772
then
693773
AFTER_DB_INIT_CODE_FILENAME=$(basename $AFTER_DB_INIT_CODE)
694-
if [[ $AFTER_DB_INIT_CODE =~ "s3://" ]]; then
695-
docker_exec s3cmd sync $AFTER_DB_INIT_CODE $MACHINE_HOME/
696-
else
697-
docker-machine scp $AFTER_DB_INIT_CODE $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash
698-
fi
699-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$AFTER_DB_INIT_CODE_FILENAME"
774+
copyFile $AFTER_DB_INIT_CODE
775+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$AFTER_DB_INIT_CODE_FILENAME"
700776
fi
701777
# Apply DDL code
702778
echo "Apply DDL SQL code"
703779
if ([ ! -z ${TARGET_DDL_DO+x} ] && [ "$TARGET_DDL_DO" != "" ]); then
704780
TARGET_DDL_DO_FILENAME=$(basename $TARGET_DDL_DO)
705-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$TARGET_DDL_DO_FILENAME"
781+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$TARGET_DDL_DO_FILENAME"
706782
fi
707783
# Apply initial postgres configuration
708784
echo "Apply initial postgres configuration"
@@ -748,26 +824,26 @@ docker_exec bash -c "/root/pgbadger/pgbadger \
748824
-o $MACHINE_HOME/$ARTIFACTS_FILENAME.json"
749825
#2> >(grep -v "install the Text::CSV_XS" >&2)
750826

751-
echo "Save JSON log..."
827+
logpath=$( \
828+
docker_exec bash -c "psql -XtU postgres \
829+
-c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
830+
| grep / | sed -e 's/^[ \t]*//'"
831+
)
832+
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz"
833+
echo "Save artifcats..."
752834
if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then
753-
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json \
754-
$ARTIFACTS_DESTINATION/
835+
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
836+
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
755837
else
756-
logpath=$( \
757-
docker_exec bash -c "psql -XtU postgres \
758-
-c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
759-
| grep / | sed -e 's/^[ \t]*//'"
760-
)
761-
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz"
762838
if [ "$RUN_ON" = "localhost" ]; then
763839
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
764840
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
765841
# TODO option: ln / cp
766842
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
767843
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
768844
elif [ "$RUN_ON" = "aws" ]; then
769-
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
770-
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
845+
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
846+
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
771847
else
772848
>&2 echo "ASSERT: must not reach this point"
773849
exit 1
@@ -777,18 +853,18 @@ fi
777853
echo "Apply DDL undo SQL code"
778854
if ([ ! -z ${TARGET_DDL_UNDO+x} ] && [ "$TARGET_DDL_UNDO" != "" ]); then
779855
TARGET_DDL_UNDO_FILENAME=$(basename $TARGET_DDL_UNDO)
780-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$TARGET_DDL_UNDO_FILENAME"
856+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$TARGET_DDL_UNDO_FILENAME"
781857
fi
782858

783859
echo -e "Run done!"
784860
echo -e "Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json"
785861
echo -e "Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.log.gz"
786862
echo -e "-------------------------------------------"
787863
echo -e "Summary:"
788-
echo -e " Queries duration:\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
789-
echo -e " Queries count:\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
790-
echo -e " Normalized queries count:\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
791-
echo -e " Errors count:\t\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
864+
echo -e " Queries duration:\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
865+
echo -e " Queries count:\t\t" $( docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
866+
echo -e " Normalized queries count:\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
867+
echo -e " Errors count:\t\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
792868
echo -e "-------------------------------------------"
793869

794870
sleep $DEBUG_TIMEOUT

0 commit comments

Comments
 (0)