@@ -5,6 +5,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
5
5
DOCKER_MACHINE=" ${DOCKER_MACHINE:- nancy-$CURRENT_TS } "
6
6
DOCKER_MACHINE=" ${DOCKER_MACHINE// _/ -} "
7
7
DEBUG_TIMEOUT=0
8
+ EBS_SIZE_MULTIPLIER=15
8
9
9
10
# # Get command line params
10
11
while true ; do
@@ -486,6 +487,31 @@ function checkParams() {
486
487
487
488
checkParams;
488
489
490
+ # Determine dump file size
491
+ if [ ! -z ${DB_DUMP_PATH+x} ]; then
492
+ dumpFileSize=0
493
+ if [[ $DB_DUMP_PATH =~ " s3://" ]]; then
494
+ dumpFileSize=$( s3cmd info $DB_DUMP_PATH | grep " File size:" )
495
+ dumpFileSize=${dumpFileSize/ File size:/ }
496
+ dumpFileSize=${dumpFileSize/ \t / }
497
+ dumpFileSize=${dumpFileSize// / }
498
+ # echo "S3 FILESIZE: $dumpFileSize"
499
+ else
500
+ dumpFileSize=$( stat -c%s " $DB_DUMP_PATH " )
501
+ fi
502
+ [ $DEBUG -eq 1 ] && echo " Dump filesize: $dumpFileSize bytes"
503
+ KB=1024
504
+ let minSize=300* $KB * $KB * $KB
505
+ ebsSize=$minSize # 300 GB
506
+ if [ " $dumpFileSize " -gt " $minSize " ]; then
507
+ let ebsSize=$dumpFileSize
508
+ let ebsSize=$ebsSize * $EBS_SIZE_MULTIPLIER
509
+ ebsSize=$( numfmt --to-unit=G $ebsSize )
510
+ EBS_SIZE=$ebsSize
511
+ [ $DEBUG -eq 1 ] && echo " EBS Size: $EBS_SIZE Gb"
512
+ fi
513
+ fi
514
+
489
515
set -ueo pipefail
490
516
[ $DEBUG -eq 1 ] && set -ueox pipefail # to debug
491
517
shopt -s expand_aliases
@@ -562,6 +588,12 @@ function cleanupAndExit {
562
588
docker container rm -f $containerHash
563
589
elif [ " $RUN_ON " = " aws" ]; then
564
590
destroyDockerMachine $DOCKER_MACHINE
591
+ if [ ! -z ${VOLUME_ID+x} ]; then
592
+ echo " Wait and delete volume $VOLUME_ID "
593
+ sleep 60 # wait to machine removed
594
+ delvolout=$( aws ec2 delete-volume --volume-id $VOLUME_ID )
595
+ echo " Volume $VOLUME_ID deleted"
596
+ fi
565
597
else
566
598
>&2 echo " ASSERT: must not reach this point"
567
599
exit 1
@@ -643,13 +675,48 @@ elif [[ "$RUN_ON" = "aws" ]]; then
643
675
>&2 echo " Failed: Docker $DOCKER_MACHINE is NOT running."
644
676
exit 1;
645
677
fi
646
-
647
678
echo " Docker $DOCKER_MACHINE is running."
648
679
680
+ docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/storage\" "
681
+ if [ ${AWS_EC2_TYPE: 0: 2} == ' i3' ]
682
+ then
683
+ echo " Attempt use high speed disk"
684
+ # Init i3 storage, just mount existing volume
685
+ echo " Attach i3 nvme volume"
686
+ docker-machine ssh $DOCKER_MACHINE sudo add-apt-repository -y ppa:sbates
687
+ docker-machine ssh $DOCKER_MACHINE sudo apt-get update || :
688
+ docker-machine ssh $DOCKER_MACHINE sudo apt-get install -y nvme-cli
689
+
690
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" # partition table of /dev/nvme0n1\" > /tmp/nvme.part"
691
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" unit: sectors \" >> /tmp/nvme.part"
692
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p1 : start= 2048, size=1855466702, Id=83 \" >> /tmp/nvme.part"
693
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p2 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
694
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p3 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
695
+ docker-machine ssh $DOCKER_MACHINE sh -c " echo \" /dev/nvme0n1p4 : start= 0, size= 0, Id= 0 \" >> /tmp/nvme.part"
696
+
697
+ docker-machine ssh $DOCKER_MACHINE sudo sfdisk /dev/nvme0n1 < /tmp/nvme.part
698
+ docker-machine ssh $DOCKER_MACHINE sudo mkfs -t ext4 /dev/nvme0n1p1
699
+ docker-machine ssh $DOCKER_MACHINE sudo mount /dev/nvme0n1p1 /home/storage
700
+ else
701
+ echo " Attempt use external disk"
702
+ # Create new volume and attach them for non i3 instances if needed
703
+ if [ ! -z ${EBS_SIZE+x} ]; then
704
+ echo " Create and attach EBS volume"
705
+ [ $DEBUG -eq 1 ] && echo " Create volume with size: $EBS_SIZE Gb"
706
+ VOLUME_ID=$( aws ec2 create-volume --size $EBS_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId)
707
+ INSTANCE_ID=$( docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
708
+ sleep 10 # wait to volume will ready
709
+ attachResult=$( aws ec2 attach-volume --device /dev/xvdf --volume-id $VOLUME_ID --instance-id $INSTANCE_ID --region us-east-1)
710
+ docker-machine ssh $DOCKER_MACHINE sudo mkfs.ext4 /dev/xvdf
711
+ docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdf /home/storage
712
+ fi
713
+ fi
714
+
649
715
containerHash=$( \
650
716
docker ` docker-machine config $DOCKER_MACHINE ` run \
651
717
--name=" pg_nancy_${CURRENT_TS} " \
652
718
-v /home/ubuntu:/machine_home \
719
+ -v /home/storage:/storage \
653
720
-dit " postgresmen/postgres-with-stuff:pg${PG_VERSION} "
654
721
)
655
722
dockerConfig=$( docker-machine config $DOCKER_MACHINE )
@@ -662,6 +729,19 @@ alias docker_exec='docker $dockerConfig exec -i ${containerHash} '
662
729
663
730
MACHINE_HOME=" /machine_home/nancy_${containerHash} "
664
731
docker_exec sh -c " mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME "
732
+ if [[ " $RUN_ON " = " aws" ]]; then
733
+ docker_exec bash -c " ln -s /storage/ $MACHINE_HOME /storage"
734
+ MACHINE_HOME=" $MACHINE_HOME /storage"
735
+ docker_exec sh -c " chmod a+w /storage"
736
+
737
+ echo " Move posgresql to separated disk"
738
+ docker_exec bash -c " sudo /etc/init.d/postgresql stop"
739
+ sleep 2 # wait for postgres stopped
740
+ docker_exec bash -c " sudo mv /var/lib/postgresql /storage/"
741
+ docker_exec bash -c " ln -s /storage/postgresql /var/lib/postgresql"
742
+ docker_exec bash -c " sudo /etc/init.d/postgresql start"
743
+ sleep 2 # wait for postgres started
744
+ fi
665
745
666
746
function copyFile() {
667
747
if [ " $1 " != ' ' ]; then
@@ -673,7 +753,7 @@ function copyFile() {
673
753
# TODO: option – hard links OR regular `cp`
674
754
docker cp ${1/ file: \/\/ / } $containerHash :$MACHINE_HOME /
675
755
elif [ " $RUN_ON " = " aws" ]; then
676
- docker-machine scp $1 $DOCKER_MACHINE :/home/ubuntu/nancy_ ${containerHash}
756
+ docker-machine scp $1 $DOCKER_MACHINE :/home/storage
677
757
else
678
758
>&2 echo " ASSERT: must not reach this point"
679
759
exit 1
@@ -696,24 +776,21 @@ function copyFile() {
696
776
# Dump
697
777
sleep 2 # wait for postgres up&running
698
778
DB_DUMP_FILENAME=$( basename $DB_DUMP_PATH )
699
- docker_exec bash -c " bzcat $MACHINE_HOME /$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test"
779
+ echo " Restore database dump"
780
+ docker_exec bash -c " bzcat $MACHINE_HOME /$DB_DUMP_FILENAME | psql -E --set ON_ERROR_STOP=on -U postgres test > /dev/null"
700
781
# After init database sql code apply
701
782
echo " Apply sql code after db init"
702
783
if ([ ! -z ${AFTER_DB_INIT_CODE+x} ] && [ " $AFTER_DB_INIT_CODE " != " " ])
703
784
then
704
785
AFTER_DB_INIT_CODE_FILENAME=$( basename $AFTER_DB_INIT_CODE )
705
- if [[ $AFTER_DB_INIT_CODE =~ " s3://" ]]; then
706
- docker_exec s3cmd sync $AFTER_DB_INIT_CODE $MACHINE_HOME /
707
- else
708
- docker-machine scp $AFTER_DB_INIT_CODE $DOCKER_MACHINE :/home/ubuntu/nancy_$containerHash
709
- fi
710
- docker_exec bash -c " psql -U postgres test -E -f $MACHINE_HOME /$AFTER_DB_INIT_CODE_FILENAME "
786
+ copyFile $AFTER_DB_INIT_CODE
787
+ docker_exec bash -c " psql -U postgres test -b -f $MACHINE_HOME /$AFTER_DB_INIT_CODE_FILENAME "
711
788
fi
712
789
# Apply DDL code
713
790
echo " Apply DDL SQL code"
714
791
if ([ ! -z ${TARGET_DDL_DO+x} ] && [ " $TARGET_DDL_DO " != " " ]); then
715
792
TARGET_DDL_DO_FILENAME=$( basename $TARGET_DDL_DO )
716
- docker_exec bash -c " psql -U postgres test -E -f $MACHINE_HOME /$TARGET_DDL_DO_FILENAME "
793
+ docker_exec bash -c " psql -U postgres test -b -f $MACHINE_HOME /$TARGET_DDL_DO_FILENAME "
717
794
fi
718
795
# Apply initial postgres configuration
719
796
echo " Apply initial postgres configuration"
@@ -759,26 +836,26 @@ docker_exec bash -c "/root/pgbadger/pgbadger \
759
836
-o $MACHINE_HOME /$ARTIFACTS_FILENAME .json"
760
837
# 2> >(grep -v "install the Text::CSV_XS" >&2)
761
838
762
- echo " Save JSON log..."
839
+ logpath=$( \
840
+ docker_exec bash -c " psql -XtU postgres \
841
+ -c \" select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
842
+ | grep / | sed -e 's/^[ \t]*//'"
843
+ )
844
+ docker_exec bash -c " gzip -c $logpath > $MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz"
845
+ echo " Save artifcats..."
763
846
if [[ $ARTIFACTS_DESTINATION =~ " s3://" ]]; then
764
- docker_exec s3cmd put /$MACHINE_HOME /$ARTIFACTS_FILENAME .json \
765
- $ARTIFACTS_DESTINATION /
847
+ docker_exec s3cmd put /$MACHINE_HOME /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
848
+ docker_exec s3cmd put / $MACHINE_HOME / $ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
766
849
else
767
- logpath=$( \
768
- docker_exec bash -c " psql -XtU postgres \
769
- -c \" select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
770
- | grep / | sed -e 's/^[ \t]*//'"
771
- )
772
- docker_exec bash -c " gzip -c $logpath > $MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz"
773
850
if [ " $RUN_ON " = " localhost" ]; then
774
851
docker cp $containerHash :$MACHINE_HOME /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
775
852
docker cp $containerHash :$MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
776
853
# TODO option: ln / cp
777
854
# cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
778
855
# cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
779
856
elif [ " $RUN_ON " = " aws" ]; then
780
- docker-machine scp $DOCKER_MACHINE :/home/ubuntu/nancy_ $containerHash /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
781
- docker-machine scp $DOCKER_MACHINE :/home/ubuntu/nancy_ $containerHash /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
857
+ docker-machine scp $DOCKER_MACHINE :/home/storage /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
858
+ docker-machine scp $DOCKER_MACHINE :/home/storage /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
782
859
else
783
860
>&2 echo " ASSERT: must not reach this point"
784
861
exit 1
788
865
echo " Apply DDL undo SQL code"
789
866
if ([ ! -z ${TARGET_DDL_UNDO+x} ] && [ " $TARGET_DDL_UNDO " != " " ]); then
790
867
TARGET_DDL_UNDO_FILENAME=$( basename $TARGET_DDL_UNDO )
791
- docker_exec bash -c " psql -U postgres test -E -f $MACHINE_HOME /$TARGET_DDL_UNDO_FILENAME "
868
+ docker_exec bash -c " psql -U postgres test -b -f $MACHINE_HOME /$TARGET_DDL_UNDO_FILENAME "
792
869
fi
793
870
794
871
echo -e " Run done!"
795
872
echo -e " Report: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json"
796
873
echo -e " Query log: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .log.gz"
797
874
echo -e " -------------------------------------------"
798
875
echo -e " Summary:"
799
- echo -e " Queries duration:\t\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_duration' ) " ms"
800
- echo -e " Queries count:\t\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_number' )
801
- echo -e " Normalized queries count:\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .normalyzed_info| length' )
802
- echo -e " Errors count:\t\t\t" $( cat $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.errors_number' )
876
+ echo -e " Queries duration:\t\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_duration' ) " ms"
877
+ echo -e " Queries count:\t\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.queries_number' )
878
+ echo -e " Normalized queries count:\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .normalyzed_info| length' )
879
+ echo -e " Errors count:\t\t\t" $( docker_exec cat / $MACHINE_HOME /$ARTIFACTS_FILENAME .json | jq ' .overall_stat.errors_number' )
803
880
echo -e " -------------------------------------------"
804
881
805
882
sleep $DEBUG_TIMEOUT
0 commit comments