@@ -95,7 +95,7 @@ while true; do
95
95
96
96
Specify the path to database dump (created by pg_dump) to be used as an input.
97
97
98
- \033[1m--after-db-init-code \033[22m (string)
98
+ \033[1m--sql- after-db-restore \033[22m (string)
99
99
100
100
Specify additional commands to be executed after database is initiated (dump
101
101
loaded or snapshot attached).
@@ -199,10 +199,12 @@ while true; do
199
199
DB_PREPARED_SNAPSHOT=" $2 " ; shift 2 ;;
200
200
--db-dump )
201
201
DB_DUMP_PATH=" $2 " ; shift 2 ;;
202
- --after-db-init-code )
202
+ --commands-after-docker-init )
203
+ AFTER_DOCKER_INIT_CODE=" $2 " ; shift 2 ;;
204
+ --sql-after-db-restore )
203
205
# s3 url|filename|content
204
206
AFTER_DB_INIT_CODE=" $2 " ; shift 2 ;;
205
- --before-db-init-code )
207
+ --sql- before-db-restore )
206
208
# s3 url|filename|content
207
209
BEFORE_DB_INIT_CODE=" $2 " ; shift 2 ;;
208
210
--workload-real )
@@ -284,6 +286,7 @@ if [ $DEBUG -eq 1 ]; then
284
286
echo " s3-cfg-path: $S3_CFG_PATH "
285
287
echo " tmp-path: $TMP_PATH "
286
288
echo " after-db-init-code: $AFTER_DB_INIT_CODE "
289
+ echo " after_docker_init_code: $AFTER_DOCKER_INIT_CODE "
287
290
echo " before-db-init-code: $BEFORE_DB_INIT_CODE "
288
291
echo " ebs-volume-size: $EBS_VOLUME_SIZE "
289
292
fi
@@ -466,25 +469,36 @@ function checkParams() {
466
469
fi
467
470
fi
468
471
472
+ if [ ! -z ${AFTER_DOCKER_INIT_CODE+x} ]; then
473
+ checkPath AFTER_DOCKER_INIT_CODE
474
+ if [ " $? " -ne " 0" ]; then
475
+ # >&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DOCKER_INIT_CODE' not found as file will use as content"
476
+ echo " $AFTER_DOCKER_INIT_CODE " > $TMP_PATH /after_docker_init_code_tmp.sh
477
+ AFTER_DOCKER_INIT_CODE=" $TMP_PATH /after_docker_init_code_tmp.sh"
478
+ else
479
+ [ " $DEBUG " -eq " 1" ] && echo " DEBUG: Value given as commands-after-docker-init will use as filename"
480
+ fi
481
+ fi
482
+
469
483
if [ ! -z ${AFTER_DB_INIT_CODE+x} ]; then
470
484
checkPath AFTER_DB_INIT_CODE
471
485
if [ " $? " -ne " 0" ]; then
472
486
# >&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DB_INIT_CODE' not found as file will use as content"
473
487
echo " $AFTER_DB_INIT_CODE " > $TMP_PATH /after_db_init_code_tmp.sql
474
488
AFTER_DB_INIT_CODE=" $TMP_PATH /after_db_init_code_tmp.sql"
475
489
else
476
- [ " $DEBUG " -eq " 1" ] && echo " DEBUG: Value given as after_db_init_code will use as filename"
490
+ [ " $DEBUG " -eq " 1" ] && echo " DEBUG: Value given as sql-after-db-restore will use as filename"
477
491
fi
478
492
fi
479
493
480
494
if [ ! -z ${BEFORE_DB_INIT_CODE+x} ]; then
481
495
checkPath BEFORE_DB_INIT_CODE
482
496
if [ " $? " -ne " 0" ]; then
483
- # >&2 echo "WARNING: Value given as after_db_init_code : '$AFTER_DB_INIT_CODE ' not found as file will use as content"
497
+ # >&2 echo "WARNING: Value given as before_db_init_code : '$BEFORE_DB_INIT_CODE ' not found as file will use as content"
484
498
echo " $BEFORE_DB_INIT_CODE " > $TMP_PATH /before_db_init_code_tmp.sql
485
499
BEFORE_DB_INIT_CODE=" $TMP_PATH /before_db_init_code_tmp.sql"
486
500
else
487
- [ " $DEBUG " -eq " 1" ] && echo " DEBUG: Value given as before_db_init_code will use as filename"
501
+ [ " $DEBUG " -eq " 1" ] && echo " DEBUG: Value given as sql-before-db-restore will use as filename"
488
502
fi
489
503
fi
490
504
@@ -641,6 +655,7 @@ function cleanupAndExit {
641
655
echo " $( date " +%Y-%m-%d %H:%M:%S" ) : Remove temp files..." # if exists
642
656
docker $dockerConfig exec -i ${containerHash} sh -c " sudo rm -rf $MACHINE_HOME "
643
657
rm -f " $TMP_PATH /after_db_init_code_tmp.sql"
658
+ rm -f " $TMP_PATH /after_docker_init_code_tmp.sh"
644
659
rm -f " $TMP_PATH /before_db_init_code_tmp.sql"
645
660
rm -f " $TMP_PATH /workload_custom_sql_tmp.sql"
646
661
rm -f " $TMP_PATH /target_ddl_do_tmp.sql"
@@ -839,6 +854,19 @@ function copyFile() {
839
854
# # Apply machine features
840
855
# Dump
841
856
sleep 2 # wait for postgres up&running
857
+ OP_START_TIME=$( date +%s) ;
858
+ if ([ ! -z ${AFTER_DOCKER_INIT_CODE+x} ] && [ " $AFTER_DOCKER_INIT_CODE " != " " ])
859
+ then
860
+ echo " $( date " +%Y-%m-%d %H:%M:%S" ) : Apply code after docker init"
861
+ AFTER_DOCKER_INIT_CODE_FILENAME=$( basename $AFTER_DOCKER_INIT_CODE )
862
+ copyFile $AFTER_DOCKER_INIT_CODE
863
+ # --set ON_ERROR_STOP=on
864
+ docker_exec bash -c " chmod +x $MACHINE_HOME /$AFTER_DOCKER_INIT_CODE_FILENAME "
865
+ docker_exec sh $MACHINE_HOME /$AFTER_DOCKER_INIT_CODE_FILENAME
866
+ END_TIME=$( date +%s) ;
867
+ DURATION=$( echo $(( END_TIME- OP_START_TIME)) | awk ' {printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}' )
868
+ echo " $( date " +%Y-%m-%d %H:%M:%S" ) : After docker init code applied for $DURATION ."
869
+ fi
842
870
843
871
OP_START_TIME=$( date +%s) ;
844
872
if ([ ! -z ${BEFORE_DB_INIT_CODE+x} ] && [ " $BEFORE_DB_INIT_CODE " != " " ])
854
882
fi
855
883
OP_START_TIME=$( date +%s) ;
856
884
echo " $( date " +%Y-%m-%d %H:%M:%S" ) : Restore database dump"
885
+ # CPU_CNT=$(cat /proc/cpuinfo | grep processor | wc -l)
886
+ CPU_CNT=$( docker_exec bash -c " cat /proc/cpuinfo | grep processor | wc -l" ) # for execute in docker
887
+ let CPU_CNT=$CPU_CNT * 2
857
888
case " $DB_DUMP_EXT " in
858
889
sql)
859
890
docker_exec bash -c " cat $MACHINE_HOME /$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test $OUTPUT_REDIRECT "
@@ -865,7 +896,7 @@ case "$DB_DUMP_EXT" in
865
896
docker_exec bash -c " zcat $MACHINE_HOME /$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test $OUTPUT_REDIRECT "
866
897
;;
867
898
pgdmp)
868
- docker_exec bash -c " pg_restore -j $( cat /proc/cpuinfo | grep processor | wc -l ) --no-owner --no-privileges -U postgres -d test $MACHINE_HOME /$DB_DUMP_FILENAME "
899
+ docker_exec bash -c " pg_restore -j $CPU_CNT --no-owner --no-privileges -U postgres -d test $MACHINE_HOME /$DB_DUMP_FILENAME "
869
900
;;
870
901
esac
871
902
END_TIME=$( date +%s) ;
941
972
942
973
# Clear statistics and log
943
974
echo " $( date " +%Y-%m-%d %H:%M:%S" ) : Execute vacuumdb..."
944
- docker_exec vacuumdb -U postgres test -j $( cat /proc/cpuinfo | grep processor | wc -l ) --analyze
975
+ docker_exec vacuumdb -U postgres test -j $CPU_CNT --analyze
945
976
docker_exec bash -c " echo '' > /var/log/postgresql/postgresql-$PG_VERSION -main.log"
946
977
# Execute workload
947
978
OP_START_TIME=$( date +%s) ;
@@ -966,26 +997,30 @@ echo "$(date "+%Y-%m-%d %H:%M:%S"): Workload executed for $DURATION."
966
997
OP_START_TIME=$( date +%s) ;
967
998
echo " $( date " +%Y-%m-%d %H:%M:%S" ) : Prepare JSON log..."
968
999
docker_exec bash -c " /root/pgbadger/pgbadger \
969
- -j $( cat /proc/cpuinfo | grep processor | wc -l ) \
1000
+ -j $CPU_CNT \
970
1001
--prefix '%t [%p]: [%l-1] db=%d,user=%u (%a,%h)' /var/log/postgresql/* -f stderr \
971
1002
-o $MACHINE_HOME /$ARTIFACTS_FILENAME .json"
972
1003
# 2> >(grep -v "install the Text::CSV_XS" >&2)
973
1004
974
1005
docker_exec bash -c " gzip -c $logpath > $MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz"
1006
+ docker_exec bash -c " gzip -c /etc/postgresql/$PG_VERSION /main/postgresql.conf > $MACHINE_HOME /$ARTIFACTS_FILENAME .conf.gz"
975
1007
echo " $( date " +%Y-%m-%d %H:%M:%S" ) : Save artifcats..."
976
1008
if [[ $ARTIFACTS_DESTINATION =~ " s3://" ]]; then
977
1009
docker_exec s3cmd put /$MACHINE_HOME /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
978
1010
docker_exec s3cmd put /$MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
1011
+ docker_exec s3cmd put /$MACHINE_HOME /$ARTIFACTS_FILENAME .conf.gz $ARTIFACTS_DESTINATION /
979
1012
else
980
1013
if [ " $RUN_ON " = " localhost" ]; then
981
1014
docker cp $containerHash :$MACHINE_HOME /$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
982
1015
docker cp $containerHash :$MACHINE_HOME /$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
1016
+ docker cp $containerHash :$MACHINE_HOME /$ARTIFACTS_FILENAME .conf.gz $ARTIFACTS_DESTINATION /
983
1017
# TODO option: ln / cp
984
1018
# cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
985
1019
# cp "$TMP_PATH/nancy_$containerHash/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
986
1020
elif [ " $RUN_ON " = " aws" ]; then
987
1021
docker-machine scp $DOCKER_MACHINE :/home/storage/$ARTIFACTS_FILENAME .json $ARTIFACTS_DESTINATION /
988
1022
docker-machine scp $DOCKER_MACHINE :/home/storage/$ARTIFACTS_FILENAME .log.gz $ARTIFACTS_DESTINATION /
1023
+ docker-machine scp $DOCKER_MACHINE :/home/storage/$ARTIFACTS_FILENAME .conf.gz $ARTIFACTS_DESTINATION /
989
1024
else
990
1025
>&2 echo " ASSERT: must not reach this point"
991
1026
exit 1
0 commit comments