2
2
3
3
DEBUG=0
4
4
CURRENT_TS=$( date +%Y%m%d_%H%M%S%N_%Z)
5
- DOCKER_MACHINE=" ${DOCKER_MACHINE :- nancy-$CURRENT_TS } "
5
+ DOCKER_MACHINE=" nancy-$CURRENT_TS "
6
6
DOCKER_MACHINE=" ${DOCKER_MACHINE// _/ -} "
7
7
DEBUG_TIMEOUT=0
8
8
EBS_SIZE_MULTIPLIER=15
@@ -198,6 +198,9 @@ while true; do
198
198
--after-db-init-code )
199
199
# s3 url|filename|content
200
200
AFTER_DB_INIT_CODE=" $2 " ; shift 2 ;;
201
+ --before-db-init-code )
202
+ # s3 url|filename|content
203
+ BEFORE_DB_INIT_CODE=" $2 " ; shift 2 ;;
201
204
--workload-full-path )
202
205
# s3 url
203
206
WORKLOAD_FULL_PATH=" $2 " ; shift 2 ;;
@@ -236,6 +239,8 @@ while true; do
236
239
TMP_PATH=" $2 " ; shift 2 ;;
237
240
--debug-timeout )
238
241
DEBUG_TIMEOUT=" $2 " ; shift 2 ;;
242
+ --ebs-volume-size )
243
+ EBS_VOLUME_SIZE=" $2 " ; shift 2 ;;
239
244
-- )
240
245
>&2 echo " ERROR: Invalid option '$1 '"
241
246
exit 1;
271
276
echo " s3-cfg-path: $S3_CFG_PATH "
272
277
echo " tmp-path: $TMP_PATH "
273
278
echo " after-db-init-code: $AFTER_DB_INIT_CODE "
279
+ echo " before-db-init-code: $BEFORE_DB_INIT_CODE "
280
+ echo " ebs-volume-size: $EBS_VOLUME_SIZE "
274
281
fi
275
282
276
283
function checkPath() {
@@ -462,6 +469,17 @@ function checkParams() {
462
469
fi
463
470
fi
464
471
472
+ if [ ! -z ${BEFORE_DB_INIT_CODE+x} ]; then
473
+ checkPath BEFORE_DB_INIT_CODE
474
+ if [ " $? " -ne " 0" ]; then
475
+ # >&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DB_INIT_CODE' not found as file will use as content"
476
+ echo " $BEFORE_DB_INIT_CODE " > $TMP_PATH /before_db_init_code_tmp.sql
477
+ BEFORE_DB_INIT_CODE=" $TMP_PATH /before_db_init_code_tmp.sql"
478
+ else
479
+ [ " $DEBUG " -eq " 1" ] && echo " DEBUG: Value given as before_db_init_code will use as filename"
480
+ fi
481
+ fi
482
+
465
483
if [ ! -z ${TARGET_DDL_DO+x} ]; then
466
484
checkPath TARGET_DDL_DO
467
485
if [ " $? " -ne " 0" ]; then
@@ -494,12 +512,28 @@ function checkParams() {
494
512
[ " $DEBUG " -eq " 1" ] && echo " DEBUG: Value given as target_config will use as filename"
495
513
fi
496
514
fi
515
+
516
+ if [ ! -z ${EBS_VOLUME_SIZE+x} ]; then
517
+ if [ " $RUN_ON " == " localhost" ] || [ ${AWS_EC2_TYPE: 0: 2} == ' i3' ]; then
518
+ >&2 echo " WARNING: ebs-volume-size is not required for aws i3 aws instances and local execution."
519
+ fi ;
520
+ re=' ^[0-9]+$'
521
+ if ! [[ $EBS_VOLUME_SIZE =~ $re ]] ; then
522
+ >&2 echo " ERROR: ebs-volume-size must be numeric integer value."
523
+ exit 1;
524
+ fi
525
+ else
526
+ if [ ! ${AWS_EC2_TYPE: 0: 2} == ' i3' ]; then
527
+ >&2 echo " WARNING: ebs-volume-size is not given, will be calculate on base of dump size."
528
+ fi
529
+ fi
497
530
}
498
531
499
532
checkParams;
500
533
501
534
# Determine dump file size
502
- if [ ! -z ${DB_DUMP_PATH+x} ]; then
535
+ if ([ " $RUN_ON " == " aws" ] && [ ! ${AWS_EC2_TYPE: 0: 2} == " i3" ] && [ -z ${EBS_VOLUME_SIZE+x} ] && [ ! -z ${DB_DUMP_PATH+x} ]); then
536
+ echo " Calculate EBS volume size."
503
537
dumpFileSize=0
504
538
if [[ $DB_DUMP_PATH =~ " s3://" ]]; then
505
539
dumpFileSize=$( s3cmd info $DB_DUMP_PATH | grep " File size:" )
@@ -510,16 +544,17 @@ if [ ! -z ${DB_DUMP_PATH+x} ]; then
510
544
else
511
545
dumpFileSize=$( stat -c%s " $DB_DUMP_PATH " )
512
546
fi
513
- [ $DEBUG -eq 1 ] && echo " Dump filesize: $dumpFileSize bytes"
514
547
KB=1024
515
548
let minSize=300* $KB * $KB * $KB
516
549
ebsSize=$minSize # 300 GB
517
550
if [ " $dumpFileSize " -gt " $minSize " ]; then
518
551
let ebsSize=$dumpFileSize
519
552
let ebsSize=$ebsSize * $EBS_SIZE_MULTIPLIER
520
553
ebsSize=$( numfmt --to-unit=G $ebsSize )
521
- EBS_SIZE=$ebsSize
522
- [ $DEBUG -eq 1 ] && echo " EBS Size: $EBS_SIZE Gb"
554
+ EBS_VOLUME_SIZE=$ebsSize
555
+ [ $DEBUG -eq 1 ] && echo " EBS volume size: $EBS_VOLUME_SIZE Gb"
556
+ else
557
+ echo " EBS volume is not require."
523
558
fi
524
559
fi
525
560
@@ -589,6 +624,7 @@ function cleanupAndExit {
589
624
echo " Remove temp files..." # if exists
590
625
docker $dockerConfig exec -i ${containerHash} sh -c " sudo rm -rf $MACHINE_HOME "
591
626
rm -f " $TMP_PATH /after_db_init_code_tmp.sql"
627
+ rm -f " $TMP_PATH /before_db_init_code_tmp.sql"
592
628
rm -f " $TMP_PATH /workload_custom_sql_tmp.sql"
593
629
rm -f " $TMP_PATH /target_ddl_do_tmp.sql"
594
630
rm -f " $TMP_PATH /target_ddl_undo_tmp.sql"
@@ -711,10 +747,10 @@ elif [[ "$RUN_ON" = "aws" ]]; then
711
747
else
712
748
echo " Attempt use external disk"
713
749
# Create new volume and attach them for non i3 instances if needed
714
- if [ ! -z ${EBS_SIZE +x} ]; then
750
+ if [ ! -z ${EBS_VOLUME_SIZE +x} ]; then
715
751
echo " Create and attach EBS volume"
716
- [ $DEBUG -eq 1 ] && echo " Create volume with size: $EBS_SIZE Gb"
717
- VOLUME_ID=$( aws ec2 create-volume --size $EBS_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId)
752
+ [ $DEBUG -eq 1 ] && echo " Create volume with size: $EBS_VOLUME_SIZE Gb"
753
+ VOLUME_ID=$( aws ec2 create-volume --size $EBS_VOLUME_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId)
718
754
INSTANCE_ID=$( docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
719
755
sleep 10 # wait to volume will ready
720
756
attachResult=$( aws ec2 attach-volume --device /dev/xvdf --volume-id $VOLUME_ID --instance-id $INSTANCE_ID --region us-east-1)
@@ -787,6 +823,13 @@ function copyFile() {
787
823
# Dump
788
824
sleep 2 # wait for postgres up&running
789
825
826
+ echo " Apply sql code before db init"
827
+ if ([ ! -z ${BEFORE_DB_INIT_CODE+x} ] && [ " $BEFORE_DB_INIT_CODE " != " " ])
828
+ then
829
+ BEFORE_DB_INIT_CODE_FILENAME=$( basename $BEFORE_DB_INIT_CODE )
830
+ copyFile $BEFORE_DB_INIT_CODE
831
+ docker_exec bash -c " psql --set ON_ERROR_STOP=on -U postgres test -b -f $MACHINE_HOME /$BEFORE_DB_INIT_CODE_FILENAME "
832
+ fi
790
833
echo " Restore database dump"
791
834
case " $DB_DUMP_EXT " in
792
835
sql)
0 commit comments