@@ -5,6 +5,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
5
5
DOCKER_MACHINE=" ${DOCKER_MACHINE:- nancy-$CURRENT_TS } "
6
6
DOCKER_MACHINE=" ${DOCKER_MACHINE// _/ -} "
7
7
DEBUG_TIMEOUT=0
8
+ EBS_SIZE_MULTIPLIER=15
8
9
9
10
# # Get command line params
10
11
while true ; do
@@ -280,7 +281,6 @@ function checkPath() {
280
281
if [[ $path =~ " file:///" ]]
281
282
then
282
283
path=${path/ file: \/\/ / }
283
- echo " CHECK $path "
284
284
if [ -f $path ]
285
285
then
286
286
eval " $1 =\" $path \" " # update original variable
@@ -475,6 +475,31 @@ function checkParams() {
475
475
476
476
checkParams;
477
477
478
+ # Determine dump file size
479
+ if [ ! -z ${DB_DUMP_PATH+x} ]; then
480
+ dumpFileSize=0
481
+ if [[ $DB_DUMP_PATH =~ " s3://" ]]; then
482
+ dumpFileSize=$( s3cmd info $DB_DUMP_PATH | grep " File size:" )
483
+ dumpFileSize=${dumpFileSize/ File size:/ }
484
+ dumpFileSize=${dumpFileSize/ \t / }
485
+ dumpFileSize=${dumpFileSize// / }
486
+ # echo "S3 FILESIZE: $dumpFileSize"
487
+ else
488
+ dumpFileSize=$( stat -c%s " $DB_DUMP_PATH " )
489
+ fi
490
+ [ $DEBUG -eq 1 ] && echo " Dump filesize: $dumpFileSize bytes"
491
+ KB=1024
492
+ let minSize=300* $KB * $KB * $KB
493
+ ebsSize=$minSize # 300 GB
494
+ if [ " $dumpFileSize " -gt " $minSize " ]; then
495
+ let ebsSize=$dumpFileSize
496
+ let ebsSize=$ebsSize * $EBS_SIZE_MULTIPLIER
497
+ ebsSize=$( numfmt --to-unit=G $ebsSize )
498
+ EBS_SIZE=$ebsSize
499
+ [ $DEBUG -eq 1 ] && echo " EBS Size: $EBS_SIZE Gb"
500
+ fi
501
+ fi
502
+
478
503
set -ueo pipefail
479
504
[ $DEBUG -eq 1 ] && set -ueox pipefail # to debug
480
505
shopt -s expand_aliases
@@ -551,6 +576,12 @@ function cleanupAndExit {
551
576
docker container rm -f $containerHash
552
577
elif [ " $RUN_ON " = " aws" ]; then
553
578
destroyDockerMachine $DOCKER_MACHINE
579
+ if [ ! -z ${VOLUME_ID+x} ]; then
580
+ echo " Wait and delete volume $VOLUME_ID "
581
+ sleep 60 # wait to machine removed
582
+ delvolout=$( aws ec2 delete-volume --volume-id $VOLUME_ID )
583
+ echo " Volume $VOLUME_ID deleted"
584
+ fi
554
585
else
555
586
>&2 echo " ASSERT: must not reach this point"
556
587
exit 1
@@ -632,13 +663,48 @@ elif [[ "$RUN_ON" = "aws" ]]; then
632
663
>&2 echo " Failed: Docker $DOCKER_MACHINE is NOT running."
633
664
exit 1;
634
665
fi
635
-
636
666
echo " Docker $DOCKER_MACHINE is running."
637
667
668
+ docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/storage\" "
669
+ if [ ${AWS_EC2_TYPE: 0: 2} == ' i3' ]
670
+ then
671
+ echo " Attempt use high speed disk"
672
+ # Init i3 storage, just mount existing volume
673
+ echo " Attach i3 nvme volume"
674
+ docker-machine ssh $DOCKER_MACHINE sudo add-apt-repository -y ppa:sbates
675
+ docker-machine ssh $DOCKER_MACHINE sudo apt-get update || :
676
+ docker-machine ssh $DOCKER_MACHINE sudo apt-get install -y nvme-cli
677
+
678
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" # partition table of /dev/nvme0n1\" > /tmp/nvme.part"
679
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" unit: sectors \" >> /tmp/nvme.part"
680
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p1 : start= 2048, size=1855466702, Id=83 \" >> /tmp/nvme.part"
681
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p2 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
682
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p3 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
683
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p4 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
684
+
685
+ docker-machine ssh $DOCKER_MACHINE sudo sfdisk /dev/nvme0n1 < /tmp/nvme.part
686
+ docker-machine ssh $DOCKER_MACHINE sudo mkfs -t ext4 /dev/nvme0n1p1
687
+ docker-machine ssh $DOCKER_MACHINE sudo mount /dev/nvme0n1p1 /home/storage
688
+ else
689
+ echo " Attempt use external disk"
690
+ # Create new volume and attach them for non i3 instances if needed
691
+ if [ ! -z ${EBS_SIZE+x} ]; then
692
+ echo " Create and attach EBS volume"
693
+ [ $DEBUG -eq 1 ] && echo " Create volume with size: $EBS_SIZE Gb"
694
+ VOLUME_ID=$( aws ec2 create-volume --size $EBS_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId)
695
+ INSTANCE_ID=$( docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
696
+ sleep 10 # wait to volume will ready
697
+ attachResult=$( aws ec2 attach-volume --device /dev/xvdf --volume-id $VOLUME_ID --instance-id $INSTANCE_ID --region us-east-1)
698
+ docker-machine ssh $DOCKER_MACHINE sudo mkfs.ext4 /dev/xvdf
699
+ docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdf /home/storage
700
+ fi
701
+ fi
702
+
638
703
containerHash=$( \
639
704
docker ` docker-machine config $DOCKER_MACHINE ` run \
640
705
--name=" pg_nancy_${CURRENT_TS} " \
641
706
-v /home/ubuntu:/machine_home \
707
+ -v /home/storage:/storage \
642
708
-dit " postgresmen/postgres-with-stuff:pg${PG_VERSION} "
643
709
)
644
710
dockerConfig=$( docker-machine config $DOCKER_MACHINE )
@@ -651,6 +717,19 @@ alias docker_exec='docker $dockerConfig exec -i ${containerHash} '
651
717
652
718
MACHINE_HOME=" /machine_home/nancy_${containerHash} "
653
719
docker_exec sh -c " mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME "
720
+ if [[ " $RUN_ON " = " aws" ]]; then
721
+ docker_exec bash -c " ln -s /storage/ $MACHINE_HOME /storage"
722
+ MACHINE_HOME=" $MACHINE_HOME /storage"
723
+ docker_exec sh -c " chmod a+w /storage"
724
+
725
+ echo " Move posgresql to separated disk"
726
+ docker_exec bash -c " sudo /etc/init.d/postgresql stop"
727
+ sleep 2 # wait for postgres stopped
728
+ docker_exec bash -c " sudo mv /var/lib/postgresql /storage/"
729
+ docker_exec bash -c " ln -s /storage/postgresql /var/lib/postgresql"
730
+ docker_exec bash -c " sudo /etc/init.d/postgresql start"
731
+ sleep 2 # wait for postgres started
732
+ fi
654
733
655
734
function copyFile() {
656
735
if [ " $1 " != ' ' ]; then
@@ -662,7 +741,7 @@ function copyFile() {
662
741
# TODO: option – hard links OR regular `cp`
663
742
docker cp ${1/ file: \/\/ / } $containerHash :$MACHINE_HOME /
664
743
elif [ " $RUN_ON " = " aws" ]; then
665
- docker-machine scp $1 $DOCKER_MACHINE :/home/ubuntu/nancy_ ${containerHash}
744
+ docker-machine scp $1 $DOCKER_MACHINE :/home/storage
666
745
else
667
746
>&2 echo " ASSERT: must not reach this point"
668
747
exit 1
@@ -685,24 +764,21 @@ function copyFile() {
685
764
# Dump
686
765
sleep 2 # wait for postgres up&running
687
766
DB_DUMP_FILENAME=$( basename $DB_DUMP_PATH )
688
- docker_exec bash -c " bzcat $MACHINE_HOME /$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test"
767
+ echo " Restore database dump"
768
+ docker_exec bash -c " bzcat $MACHINE_HOME /$DB_DUMP_FILENAME | psql -E --set ON_ERROR_STOP=on -U postgres test > /dev/null"
689
769
# After init database sql code apply
690
770
echo " Apply sql code after db init"
691
771
if ([ ! -z ${AFTER_DB_INIT_CODE+x} ] && [ " $AFTER_DB_INIT_CODE " != " " ])
692
772
then
693
773
AFTER_DB_INIT_CODE_FILENAME=$( basename $AFTER_DB_INIT_CODE )
694
- if [[ $AFTER_DB_INIT_CODE =~ " s3://" ]]; then
695
- docker_exec s3cmd sync $AFTER_DB_INIT_CODE $MACHINE_HOME /
696
- else
697
- docker-machine scp $AFTER_DB_INIT_CODE $DOCKER_MACHINE :/home/ubuntu/nancy_$containerHash
698
- fi
699
- docker_exec bash -c " psql -U postgres test -E -f $MACHINE_HOME /$AFTER_DB_INIT_CODE_FILENAME "
774
+ copyFile $AFTER_DB_INIT_CODE
775
+ docker_exec bash -c " psql -U postgres test -b -f $MACHINE_HOME /$AFTER_DB_INIT_CODE_FILENAME "
700
776
fi
701
777
# Apply DDL code
702
778
echo " Apply DDL SQL code"
703
779
if ([ ! -z ${TARGET_DDL_DO+x} ] && [ " $TARGET_DDL_DO " != " " ]); then
704
780
TARGET_DDL_DO_FILENAME=$( basename $TARGET_DDL_DO )
705
- docker_exec bash -c " psql -U postgres test -E -f $MACHINE_HOME /$TARGET_DDL_DO_FILENAME "
781
+ docker_exec bash -c " psql -U postgres test -b -f $MACHINE_HOME /$TARGET_DDL_DO_FILENAME "
706
782
fi
707
783
# Apply initial postgres configuration
708
784
echo " Apply initial postgres configuration"
@@ -748,26 +824,26 @@ docker_exec bash -c "/root/pgbadger/pgbadger \
748
824
-o $MACHINE_HOME /$ARTIFACTS_FILENAME .json"
749
825
# 2> >(grep -v "install the Text::CSV_XS" >&2)
750
826
751
- echo " Save JSON log..."
827
+ logpath=$( \
828
+ docker_exec bash -c " psql -XtU postgres \
829
+ -c \" select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
830
+ | grep / | sed -e 's/^[ \t]*//'"
831
+ )
832
+ docker_exec bash -c " gzip -c $logpath > $MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz"
833
+ echo " Save artifcats..."
752
834
if [[ $ARTIFACTS_DESTINATION =~ " s3://" ]]; then
753
- docker_exec s3cmd put /$MACHINE_HOME /$ARTIFACTS_FILENAME .json \
754
- $ARTIFACTS_DESTINATION /
835
+ docker_exec s3cmd put /$MACHINE_HOME /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
836
+ docker_exec s3cmd put / $MACHINE_HOME / $ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
755
837
else
756
- logpath=$( \
757
- docker_exec bash -c " psql -XtU postgres \
758
- -c \" select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
759
- | grep / | sed -e 's/^[ \t]*//'"
760
- )
761
- docker_exec bash -c " gzip -c $logpath > $MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz"
762
838
if [ " $RUN_ON " = " localhost" ]; then
763
839
docker cp $containerHash :$MACHINE_HOME /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
764
840
docker cp $containerHash :$MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
765
841
# TODO option: ln / cp
766
842
# cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
767
843
# cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
768
844
elif [ " $RUN_ON " = " aws" ]; then
769
- docker-machine scp $DOCKER_MACHINE :/home/ubuntu/nancy_ $containerHash /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
770
- docker-machine scp $DOCKER_MACHINE :/home/ubuntu/nancy_ $containerHash /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
845
+ docker-machine scp $DOCKER_MACHINE :/home/storage /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
846
+ docker-machine scp $DOCKER_MACHINE :/home/storage /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
771
847
else
772
848
>&2 echo " ASSERT: must not reach this point"
773
849
exit 1
777
853
echo " Apply DDL undo SQL code"
778
854
if ([ ! -z ${TARGET_DDL_UNDO+x} ] && [ " $TARGET_DDL_UNDO " != " " ]); then
779
855
TARGET_DDL_UNDO_FILENAME=$( basename $TARGET_DDL_UNDO )
780
- docker_exec bash -c " psql -U postgres test -E -f $MACHINE_HOME /$TARGET_DDL_UNDO_FILENAME "
856
+ docker_exec bash -c " psql -U postgres test -b -f $MACHINE_HOME /$TARGET_DDL_UNDO_FILENAME "
781
857
fi
782
858
783
859
echo -e " Run done!"
784
860
echo -e " Report: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json"
785
861
echo -e " Query log: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .log.gz"
786
862
echo -e " -------------------------------------------"
787
863
echo -e " Summary:"
788
- echo -e " Queries duration:\t\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_duration' ) " ms"
789
- echo -e " Queries count:\t\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_number' )
790
- echo -e " Normalized queries count:\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .normalyzed_info| length' )
791
- echo -e " Errors count:\t\t\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.errors_number' )
864
+ echo -e " Queries duration:\t\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_duration' ) " ms"
865
+ echo -e " Queries count:\t\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_number' )
866
+ echo -e " Normalized queries count:\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .normalyzed_info| length' )
867
+ echo -e " Errors count:\t\t\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.errors_number' )
792
868
echo -e " -------------------------------------------"
793
869
794
870
sleep $DEBUG_TIMEOUT
0 commit comments