Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit 94b180a

Browse files
authored
Merge branch 'master' into dmius-text-dump
2 parents e48e7e0 + 4a06749 commit 94b180a

File tree

1 file changed

+102
-26
lines changed

1 file changed

+102
-26
lines changed

nancy_run.sh

Lines changed: 102 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
55
DOCKER_MACHINE="${DOCKER_MACHINE:-nancy-$CURRENT_TS}"
66
DOCKER_MACHINE="${DOCKER_MACHINE//_/-}"
77
DEBUG_TIMEOUT=0
8+
EBS_SIZE_MULTIPLIER=15
89

910
## Get command line params
1011
while true; do
@@ -485,6 +486,31 @@ function checkParams() {
485486

486487
checkParams;
487488

489+
# Determine dump file size
490+
if [ ! -z ${DB_DUMP_PATH+x} ]; then
491+
dumpFileSize=0
492+
if [[ $DB_DUMP_PATH =~ "s3://" ]]; then
493+
dumpFileSize=$(s3cmd info $DB_DUMP_PATH | grep "File size:" )
494+
dumpFileSize=${dumpFileSize/File size:/}
495+
dumpFileSize=${dumpFileSize/\t/}
496+
dumpFileSize=${dumpFileSize// /}
497+
#echo "S3 FILESIZE: $dumpFileSize"
498+
else
499+
dumpFileSize=$(stat -c%s "$DB_DUMP_PATH")
500+
fi
501+
[ $DEBUG -eq 1 ] && echo "Dump filesize: $dumpFileSize bytes"
502+
KB=1024
503+
let minSize=300*$KB*$KB*$KB
504+
ebsSize=$minSize # 300 GB
505+
if [ "$dumpFileSize" -gt "$minSize" ]; then
506+
let ebsSize=$dumpFileSize
507+
let ebsSize=$ebsSize*$EBS_SIZE_MULTIPLIER
508+
ebsSize=$(numfmt --to-unit=G $ebsSize)
509+
EBS_SIZE=$ebsSize
510+
[ $DEBUG -eq 1 ] && echo "EBS Size: $EBS_SIZE Gb"
511+
fi
512+
fi
513+
488514
set -ueo pipefail
489515
[ $DEBUG -eq 1 ] && set -ueox pipefail # to debug
490516
shopt -s expand_aliases
@@ -561,6 +587,12 @@ function cleanupAndExit {
561587
docker container rm -f $containerHash
562588
elif [ "$RUN_ON" = "aws" ]; then
563589
destroyDockerMachine $DOCKER_MACHINE
590+
if [ ! -z ${VOLUME_ID+x} ]; then
591+
echo "Wait and delete volume $VOLUME_ID"
592+
sleep 60 # wait to machine removed
593+
delvolout=$(aws ec2 delete-volume --volume-id $VOLUME_ID)
594+
echo "Volume $VOLUME_ID deleted"
595+
fi
564596
else
565597
>&2 echo "ASSERT: must not reach this point"
566598
exit 1
@@ -642,13 +674,48 @@ elif [[ "$RUN_ON" = "aws" ]]; then
642674
>&2 echo "Failed: Docker $DOCKER_MACHINE is NOT running."
643675
exit 1;
644676
fi
645-
646677
echo "Docker $DOCKER_MACHINE is running."
647678

679+
docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/storage\""
680+
if [ ${AWS_EC2_TYPE:0:2} == 'i3' ]
681+
then
682+
echo "Attempt use high speed disk"
683+
# Init i3 storage, just mount existing volume
684+
echo "Attach i3 nvme volume"
685+
docker-machine ssh $DOCKER_MACHINE sudo add-apt-repository -y ppa:sbates
686+
docker-machine ssh $DOCKER_MACHINE sudo apt-get update || :
687+
docker-machine ssh $DOCKER_MACHINE sudo apt-get install -y nvme-cli
688+
689+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"# partition table of /dev/nvme0n1\" > /tmp/nvme.part"
690+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"unit: sectors \" >> /tmp/nvme.part"
691+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p1 : start= 2048, size=1855466702, Id=83 \" >> /tmp/nvme.part"
692+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p2 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
693+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p3 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
694+
docker-machine ssh $DOCKER_MACHINE sh -c "echo \"/dev/nvme0n1p4 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
695+
696+
docker-machine ssh $DOCKER_MACHINE sudo sfdisk /dev/nvme0n1 < /tmp/nvme.part
697+
docker-machine ssh $DOCKER_MACHINE sudo mkfs -t ext4 /dev/nvme0n1p1
698+
docker-machine ssh $DOCKER_MACHINE sudo mount /dev/nvme0n1p1 /home/storage
699+
else
700+
echo "Attempt use external disk"
701+
# Create new volume and attach them for non i3 instances if needed
702+
if [ ! -z ${EBS_SIZE+x} ]; then
703+
echo "Create and attach EBS volume"
704+
[ $DEBUG -eq 1 ] && echo "Create volume with size: $EBS_SIZE Gb"
705+
VOLUME_ID=$(aws ec2 create-volume --size $EBS_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId)
706+
INSTANCE_ID=$(docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
707+
sleep 10 # wait to volume will ready
708+
attachResult=$(aws ec2 attach-volume --device /dev/xvdf --volume-id $VOLUME_ID --instance-id $INSTANCE_ID --region us-east-1)
709+
docker-machine ssh $DOCKER_MACHINE sudo mkfs.ext4 /dev/xvdf
710+
docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdf /home/storage
711+
fi
712+
fi
713+
648714
containerHash=$( \
649715
docker `docker-machine config $DOCKER_MACHINE` run \
650716
--name="pg_nancy_${CURRENT_TS}" \
651717
-v /home/ubuntu:/machine_home \
718+
-v /home/storage:/storage \
652719
-dit "postgresmen/postgres-with-stuff:pg${PG_VERSION}"
653720
)
654721
dockerConfig=$(docker-machine config $DOCKER_MACHINE)
@@ -661,6 +728,19 @@ alias docker_exec='docker $dockerConfig exec -i ${containerHash} '
661728

662729
MACHINE_HOME="/machine_home/nancy_${containerHash}"
663730
docker_exec sh -c "mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME"
731+
if [[ "$RUN_ON" = "aws" ]]; then
732+
docker_exec bash -c "ln -s /storage/ $MACHINE_HOME/storage"
733+
MACHINE_HOME="$MACHINE_HOME/storage"
734+
docker_exec sh -c "chmod a+w /storage"
735+
736+
echo "Move posgresql to separated disk"
737+
docker_exec bash -c "sudo /etc/init.d/postgresql stop"
738+
sleep 2 # wait for postgres stopped
739+
docker_exec bash -c "sudo mv /var/lib/postgresql /storage/"
740+
docker_exec bash -c "ln -s /storage/postgresql /var/lib/postgresql"
741+
docker_exec bash -c "sudo /etc/init.d/postgresql start"
742+
sleep 2 # wait for postgres started
743+
fi
664744

665745
function copyFile() {
666746
if [ "$1" != '' ]; then
@@ -672,7 +752,7 @@ function copyFile() {
672752
# TODO: option – hard links OR regular `cp`
673753
docker cp ${1/file:\/\//} $containerHash:$MACHINE_HOME/
674754
elif [ "$RUN_ON" = "aws" ]; then
675-
docker-machine scp $1 $DOCKER_MACHINE:/home/ubuntu/nancy_${containerHash}
755+
docker-machine scp $1 $DOCKER_MACHINE:/home/storage
676756
else
677757
>&2 echo "ASSERT: must not reach this point"
678758
exit 1
@@ -695,6 +775,7 @@ function copyFile() {
695775
# Dump
696776
sleep 2 # wait for postgres up&running
697777

778+
echo "Restore database dump"
698779
case "$DB_DUMP_EXT" in
699780
sql)
700781
docker_exec bash -c "cat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test"
@@ -706,24 +787,19 @@ case "$DB_DUMP_EXT" in
706787
docker_exec bash -c "zcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test"
707788
;;
708789
esac
709-
710790
# After init database sql code apply
711791
echo "Apply sql code after db init"
712792
if ([ ! -z ${AFTER_DB_INIT_CODE+x} ] && [ "$AFTER_DB_INIT_CODE" != "" ])
713793
then
714794
AFTER_DB_INIT_CODE_FILENAME=$(basename $AFTER_DB_INIT_CODE)
715-
if [[ $AFTER_DB_INIT_CODE =~ "s3://" ]]; then
716-
docker_exec s3cmd sync $AFTER_DB_INIT_CODE $MACHINE_HOME/
717-
else
718-
docker-machine scp $AFTER_DB_INIT_CODE $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash
719-
fi
720-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$AFTER_DB_INIT_CODE_FILENAME"
795+
copyFile $AFTER_DB_INIT_CODE
796+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$AFTER_DB_INIT_CODE_FILENAME"
721797
fi
722798
# Apply DDL code
723799
echo "Apply DDL SQL code"
724800
if ([ ! -z ${TARGET_DDL_DO+x} ] && [ "$TARGET_DDL_DO" != "" ]); then
725801
TARGET_DDL_DO_FILENAME=$(basename $TARGET_DDL_DO)
726-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$TARGET_DDL_DO_FILENAME"
802+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$TARGET_DDL_DO_FILENAME"
727803
fi
728804
# Apply initial postgres configuration
729805
echo "Apply initial postgres configuration"
@@ -769,26 +845,26 @@ docker_exec bash -c "/root/pgbadger/pgbadger \
769845
-o $MACHINE_HOME/$ARTIFACTS_FILENAME.json"
770846
#2> >(grep -v "install the Text::CSV_XS" >&2)
771847

772-
echo "Save JSON log..."
848+
logpath=$( \
849+
docker_exec bash -c "psql -XtU postgres \
850+
-c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
851+
| grep / | sed -e 's/^[ \t]*//'"
852+
)
853+
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz"
854+
echo "Save artifcats..."
773855
if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then
774-
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json \
775-
$ARTIFACTS_DESTINATION/
856+
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
857+
docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
776858
else
777-
logpath=$( \
778-
docker_exec bash -c "psql -XtU postgres \
779-
-c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
780-
| grep / | sed -e 's/^[ \t]*//'"
781-
)
782-
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz"
783859
if [ "$RUN_ON" = "localhost" ]; then
784860
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
785861
docker cp $containerHash:$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
786862
# TODO option: ln / cp
787863
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
788864
#cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
789865
elif [ "$RUN_ON" = "aws" ]; then
790-
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
791-
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/nancy_$containerHash/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
866+
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
867+
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
792868
else
793869
>&2 echo "ASSERT: must not reach this point"
794870
exit 1
@@ -798,18 +874,18 @@ fi
798874
echo "Apply DDL undo SQL code"
799875
if ([ ! -z ${TARGET_DDL_UNDO+x} ] && [ "$TARGET_DDL_UNDO" != "" ]); then
800876
TARGET_DDL_UNDO_FILENAME=$(basename $TARGET_DDL_UNDO)
801-
docker_exec bash -c "psql -U postgres test -E -f $MACHINE_HOME/$TARGET_DDL_UNDO_FILENAME"
877+
docker_exec bash -c "psql -U postgres test -b -f $MACHINE_HOME/$TARGET_DDL_UNDO_FILENAME"
802878
fi
803879

804880
echo -e "Run done!"
805881
echo -e "Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json"
806882
echo -e "Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.log.gz"
807883
echo -e "-------------------------------------------"
808884
echo -e "Summary:"
809-
echo -e " Queries duration:\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
810-
echo -e " Queries count:\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
811-
echo -e " Normalized queries count:\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
812-
echo -e " Errors count:\t\t\t" $(cat $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
885+
echo -e " Queries duration:\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
886+
echo -e " Queries count:\t\t" $( docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
887+
echo -e " Normalized queries count:\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
888+
echo -e " Errors count:\t\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
813889
echo -e "-------------------------------------------"
814890

815891
sleep $DEBUG_TIMEOUT

0 commit comments

Comments
 (0)