|
2 | 2 |
|
3 | 3 | DEBUG=0
|
4 | 4 | CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
|
5 |
| -DOCKER_MACHINE="${DOCKER_MACHINE:-nancy-$CURRENT_TS}" |
| 5 | +DOCKER_MACHINE="nancy-$CURRENT_TS" |
6 | 6 | DOCKER_MACHINE="${DOCKER_MACHINE//_/-}"
|
7 | 7 | DEBUG_TIMEOUT=0
|
8 | 8 | EBS_SIZE_MULTIPLIER=15
|
@@ -198,6 +198,9 @@ while true; do
|
198 | 198 | --after-db-init-code )
|
199 | 199 | #s3 url|filename|content
|
200 | 200 | AFTER_DB_INIT_CODE="$2"; shift 2 ;;
|
| 201 | + --before-db-init-code ) |
| 202 | + #s3 url|filename|content |
| 203 | + BEFORE_DB_INIT_CODE="$2"; shift 2 ;; |
201 | 204 | --workload-real )
|
202 | 205 | #s3 url
|
203 | 206 | WORKLOAD_REAL="$2"; shift 2 ;;
|
@@ -236,15 +239,16 @@ while true; do
|
236 | 239 | TMP_PATH="$2"; shift 2 ;;
|
237 | 240 | --debug-timeout )
|
238 | 241 | DEBUG_TIMEOUT="$2"; shift 2 ;;
|
239 |
| - -- ) |
240 |
| - >&2 echo "ERROR: Invalid option '$1'" |
241 |
| - exit 1; |
242 |
| - break ;; |
| 242 | + --ebs-volume-size ) |
| 243 | + EBS_VOLUME_SIZE="$2"; shift 2 ;; |
243 | 244 | * )
|
244 |
| - if [ "${1:0:2}" == "--" ]; then |
| 245 | + option=$1 |
| 246 | + option="${option##*( )}" |
| 247 | + option="${option%%*( )}" |
| 248 | + if [ "${option:0:2}" == "--" ]; then |
245 | 249 | >&2 echo "ERROR: Invalid option '$1'. Please double-check options."
|
246 | 250 | exit 1
|
247 |
| - elif [ "${1:0:2}" != "" ]; then |
| 251 | + elif [ "$option" != "" ]; then |
248 | 252 | >&2 echo "ERROR: \"nancy run\" does not support payload (except \"help\"). Use options, see \"nancy run help\")"
|
249 | 253 | exit 1
|
250 | 254 | fi
|
@@ -276,6 +280,8 @@ if [ $DEBUG -eq 1 ]; then
|
276 | 280 | echo "s3-cfg-path: $S3_CFG_PATH"
|
277 | 281 | echo "tmp-path: $TMP_PATH"
|
278 | 282 | echo "after-db-init-code: $AFTER_DB_INIT_CODE"
|
| 283 | + echo "before-db-init-code: $BEFORE_DB_INIT_CODE" |
| 284 | + echo "ebs-volume-size: $EBS_VOLUME_SIZE" |
279 | 285 | fi
|
280 | 286 |
|
281 | 287 | function checkPath() {
|
@@ -467,6 +473,17 @@ function checkParams() {
|
467 | 473 | fi
|
468 | 474 | fi
|
469 | 475 |
|
| 476 | + if [ ! -z ${BEFORE_DB_INIT_CODE+x} ]; then |
| 477 | + checkPath BEFORE_DB_INIT_CODE |
| 478 | + if [ "$?" -ne "0" ]; then |
| 479 | + #>&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DB_INIT_CODE' not found as file will use as content" |
| 480 | + echo "$BEFORE_DB_INIT_CODE" > $TMP_PATH/before_db_init_code_tmp.sql |
| 481 | + BEFORE_DB_INIT_CODE="$TMP_PATH/before_db_init_code_tmp.sql" |
| 482 | + else |
| 483 | + [ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as before_db_init_code will use as filename" |
| 484 | + fi |
| 485 | + fi |
| 486 | + |
470 | 487 | if [ ! -z ${TARGET_DDL_DO+x} ]; then
|
471 | 488 | checkPath TARGET_DDL_DO
|
472 | 489 | if [ "$?" -ne "0" ]; then
|
@@ -499,32 +516,50 @@ function checkParams() {
|
499 | 516 | [ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as target_config will use as filename"
|
500 | 517 | fi
|
501 | 518 | fi
|
| 519 | + |
| 520 | + if [ ! -z ${EBS_VOLUME_SIZE+x} ]; then |
| 521 | + if [ "$RUN_ON" == "localhost" ] || [ ${AWS_EC2_TYPE:0:2} == 'i3' ]; then |
| 522 | + >&2 echo "WARNING: ebs-volume-size is not required for aws i3 aws instances and local execution." |
| 523 | + fi; |
| 524 | + re='^[0-9]+$' |
| 525 | + if ! [[ $EBS_VOLUME_SIZE =~ $re ]] ; then |
| 526 | + >&2 echo "ERROR: ebs-volume-size must be numeric integer value." |
| 527 | + exit 1; |
| 528 | + fi |
| 529 | + else |
| 530 | + if [ ! ${AWS_EC2_TYPE:0:2} == 'i3' ]; then |
| 531 | + >&2 echo "WARNING: ebs-volume-size is not given, will be calculate on base of dump size." |
| 532 | + fi |
| 533 | + fi |
502 | 534 | }
|
503 | 535 |
|
504 | 536 | checkParams;
|
505 | 537 |
|
506 | 538 | # Determine dump file size
|
507 |
| -if [ ! -z ${DB_DUMP_PATH+x} ]; then |
| 539 | +if ([ "$RUN_ON" == "aws" ] && [ ! ${AWS_EC2_TYPE:0:2} == "i3" ] && \ |
| 540 | + [ -z ${EBS_VOLUME_SIZE+x} ] && [ ! -z ${DB_DUMP_PATH+x} ]); then |
| 541 | + echo "Calculate EBS volume size." |
508 | 542 | dumpFileSize=0
|
509 | 543 | if [[ $DB_DUMP_PATH =~ "s3://" ]]; then
|
510 | 544 | dumpFileSize=$(s3cmd info $DB_DUMP_PATH | grep "File size:" )
|
511 | 545 | dumpFileSize=${dumpFileSize/File size:/}
|
512 | 546 | dumpFileSize=${dumpFileSize/\t/}
|
513 | 547 | dumpFileSize=${dumpFileSize// /}
|
514 |
| - #echo "S3 FILESIZE: $dumpFileSize" |
| 548 | + [ $DEBUG -eq 1 ] && echo "S3 FILESIZE: $dumpFileSize" |
515 | 549 | else
|
516 | 550 | dumpFileSize=$(stat -c%s "$DB_DUMP_PATH")
|
517 | 551 | fi
|
518 |
| - [ $DEBUG -eq 1 ] && echo "Dump filesize: $dumpFileSize bytes" |
| 552 | + let dumpFileSize=dumpFileSize*$EBS_SIZE_MULTIPLIER |
519 | 553 | KB=1024
|
520 | 554 | let minSize=300*$KB*$KB*$KB
|
521 | 555 | ebsSize=$minSize # 300 GB
|
522 | 556 | if [ "$dumpFileSize" -gt "$minSize" ]; then
|
523 | 557 | let ebsSize=$dumpFileSize
|
524 |
| - let ebsSize=$ebsSize*$EBS_SIZE_MULTIPLIER |
525 | 558 | ebsSize=$(numfmt --to-unit=G $ebsSize)
|
526 |
| - EBS_SIZE=$ebsSize |
527 |
| - [ $DEBUG -eq 1 ] && echo "EBS Size: $EBS_SIZE Gb" |
| 559 | + EBS_VOLUME_SIZE=$ebsSize |
| 560 | + [ $DEBUG -eq 1 ] && echo "EBS volume size: $EBS_VOLUME_SIZE Gb" |
| 561 | + else |
| 562 | + echo "EBS volume is not require." |
528 | 563 | fi
|
529 | 564 | fi
|
530 | 565 |
|
@@ -594,6 +629,7 @@ function cleanupAndExit {
|
594 | 629 | echo "Remove temp files..." # if exists
|
595 | 630 | docker $dockerConfig exec -i ${containerHash} sh -c "sudo rm -rf $MACHINE_HOME"
|
596 | 631 | rm -f "$TMP_PATH/after_db_init_code_tmp.sql"
|
| 632 | + rm -f "$TMP_PATH/before_db_init_code_tmp.sql" |
597 | 633 | rm -f "$TMP_PATH/workload_custom_sql_tmp.sql"
|
598 | 634 | rm -f "$TMP_PATH/target_ddl_do_tmp.sql"
|
599 | 635 | rm -f "$TMP_PATH/target_ddl_undo_tmp.sql"
|
@@ -716,10 +752,10 @@ elif [[ "$RUN_ON" = "aws" ]]; then
|
716 | 752 | else
|
717 | 753 | echo "Attempt use external disk"
|
718 | 754 | # Create new volume and attach them for non i3 instances if needed
|
719 |
| - if [ ! -z ${EBS_SIZE+x} ]; then |
| 755 | + if [ ! -z ${EBS_VOLUME_SIZE+x} ]; then |
720 | 756 | echo "Create and attach EBS volume"
|
721 |
| - [ $DEBUG -eq 1 ] && echo "Create volume with size: $EBS_SIZE Gb" |
722 |
| - VOLUME_ID=$(aws ec2 create-volume --size $EBS_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId) |
| 757 | + [ $DEBUG -eq 1 ] && echo "Create volume with size: $EBS_VOLUME_SIZE Gb" |
| 758 | + VOLUME_ID=$(aws ec2 create-volume --size $EBS_VOLUME_SIZE --region us-east-1 --availability-zone us-east-1a --volume-type gp2 | jq -r .VolumeId) |
723 | 759 | INSTANCE_ID=$(docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id)
|
724 | 760 | sleep 10 # wait to volume will ready
|
725 | 761 | attachResult=$(aws ec2 attach-volume --device /dev/xvdf --volume-id $VOLUME_ID --instance-id $INSTANCE_ID --region us-east-1)
|
@@ -792,6 +828,13 @@ function copyFile() {
|
792 | 828 | # Dump
|
793 | 829 | sleep 2 # wait for postgres up&running
|
794 | 830 |
|
| 831 | +echo "Apply sql code before db init" |
| 832 | +if ([ ! -z ${BEFORE_DB_INIT_CODE+x} ] && [ "$BEFORE_DB_INIT_CODE" != "" ]) |
| 833 | +then |
| 834 | + BEFORE_DB_INIT_CODE_FILENAME=$(basename $BEFORE_DB_INIT_CODE) |
| 835 | + copyFile $BEFORE_DB_INIT_CODE |
| 836 | + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres test -b -f $MACHINE_HOME/$BEFORE_DB_INIT_CODE_FILENAME" |
| 837 | +fi |
795 | 838 | echo "Restore database dump"
|
796 | 839 | case "$DB_DUMP_EXT" in
|
797 | 840 | sql)
|
|
0 commit comments