Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit 58ef40f

Browse files
authored
Merge pull request #23 from startupturbo/dmius-dual-params
Dual params for sql code and conf
2 parents 7d0c49e + 5c47798 commit 58ef40f

File tree

1 file changed

+125
-56
lines changed

1 file changed

+125
-56
lines changed

nancy_run.sh

Lines changed: 125 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ while true; do
2323
--db-dump-path )
2424
DB_DUMP_PATH="$2"; shift 2 ;;
2525
--after-db-init-code )
26-
#s3 url|filename +
26+
#s3 url|filename|content
2727
AFTER_DB_INIT_CODE="$2"; shift 2 ;;
2828
--workload-full-path )
2929
#s3 url
@@ -32,18 +32,18 @@ while true; do
3232
#Still unsuported
3333
WORKLOAD_BASIS_PATH="$2"; shift 2 ;;
3434
--workload-custom-sql )
35-
#s3 url|filename +
35+
#s3 url|filename|content
3636
WORKLOAD_CUSTOM_SQL="$2"; shift 2 ;;
3737
--workload-replay-speed )
3838
WORKLOAD_REPLAY_SPEED="$2"; shift 2 ;;
3939
--target-ddl-do )
40-
#s3 url|filename +
40+
#s3 url|filename|content
4141
TARGET_DDL_DO="$2"; shift 2 ;;
4242
--target-ddl-undo )
43-
#s3 url|filename +
43+
#s3 url|filename|content
4444
TARGET_DDL_UNDO="$2"; shift 2 ;;
4545
--target-config )
46-
#s3 url|filename +
46+
#s3 url|filename|content
4747
TARGET_CONFIG="$2"; shift 2 ;;
4848
--artifacts-destination )
4949
ARTIFACTS_DESTINATION="$2"; shift 2 ;;
@@ -96,7 +96,7 @@ then
9696
fi
9797

9898
function checkPath() {
99-
if [ ! -v $1 ]
99+
if [ -z $1 ]
100100
then
101101
return 1
102102
fi
@@ -108,6 +108,7 @@ function checkPath() {
108108
if [[ $path =~ "file:///" ]]
109109
then
110110
path=${path/file:\/\//}
111+
echo "CHECK $path"
111112
if [ -f $path ]
112113
then
113114
eval "$1=\"$path\"" # update original variable
@@ -125,13 +126,9 @@ function checkPath() {
125126
eval "$1=\"$path\"" # update original variable
126127
return 0 # file found
127128
else
128-
return 3 # file not found
129+
return 2 # file not found
129130
fi
130131
fi
131-
if [ -f $path ]
132-
then
133-
return 0;
134-
fi
135132
return -1 # incorrect path
136133
}
137134

@@ -142,43 +139,43 @@ function checkParams() {
142139
exit 1
143140
fi
144141
if [ "$RUN_ON" = "aws" ]; then
145-
if [ ! -v AWS_KEY_PAIR ] || [ ! -v AWS_KEY_PATH ]
142+
if [ -z ${AWS_KEY_PAIR+x} ] || [ -z ${AWS_KEY_PATH+x} ]
146143
then
147144
>&2 echo "ERROR: AWS keys not given."
148145
exit 1
149146
fi
150147

151-
if [ ! -v AWS_EC2_TYPE ]
148+
if [ -z ${AWS_EC2_TYPE+x} ]
152149
then
153150
>&2 echo "ERROR: AWS EC2 Instance type not given."
154151
exit 1
155152
fi
156153
fi
157154

158-
if [ ! -v PG_VERSION ]
155+
if [ -z ${PG_VERSION+x} ]
159156
then
160157
>&2 echo "WARNING: Postgres version not given. Will use 9.6."
161158
PG_VERSION="9.6"
162159
fi
163160

164-
if [ ! -v TMP_PATH ]
161+
if [ -z ${TMP_PATH+x} ]
165162
then
166163
TMP_PATH="/var/tmp/nancy_run"
167164
>&2 echo "WARNING: Temp path not given. Will use $TMP_PATH"
168165
fi
169166
#make tmp path if not found
170167
[ ! -d $TMP_PATH ] && mkdir $TMP_PATH
171168

172-
if [ ! -v S3_CFG_PATH ]
169+
if [ -z ${S3_CFG_PATH+x} ]
173170
then
174171
>&2 echo "WARNING: S3 config file path not given. Will use ~/.s3cfg"
175172
S3_CFG_PATH=$(echo ~)"/.s3cfg"
176173
fi
177174

178175
workloads_count=0
179-
[ -v WORKLOAD_BASIS_PATH ] && let workloads_count=$workloads_count+1
180-
[ -v WORKLOAD_FULL_PATH ] && let workloads_count=$workloads_count+1
181-
[ -v WORKLOAD_CUSTOM_SQL ] && let workloads_count=$workloads_count+1
176+
[ ! -z ${WORKLOAD_BASIS_PATH+x} ] && let workloads_count=$workloads_count+1
177+
[ ! -z ${WORKLOAD_FULL_PATH+x} ] && let workloads_count=$workloads_count+1
178+
[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && let workloads_count=$workloads_count+1
182179

183180
# --workload-full-path or --workload-basis-path or --workload-custom-sql
184181
if [ "$workloads_count" -eq "0" ]
@@ -194,51 +191,115 @@ function checkParams() {
194191
fi
195192

196193
#--db-prepared-snapshot or --db-dump-path
197-
if ([ ! -v DB_PREPARED_SNAPSHOT ] && [ ! -v DB_DUMP_PATH ])
194+
if ([ -z ${DB_PREPARED_SNAPSHOT+x} ] && [ -z ${DB_DUMP_PATH+x} ])
198195
then
199196
>&2 echo "ERROR: Snapshot or dump not given."
200197
exit 1;
201198
fi
202199

203-
if ([ -v DB_PREPARED_SNAPSHOT ] && [ -v DB_DUMP_PATH ])
200+
if ([ ! -z ${DB_PREPARED_SNAPSHOT+x} ] && [ ! -z ${DB_DUMP_PATH+x} ])
204201
then
205202
>&2 echo "ERROR: Both snapshot and dump sources given."
206203
exit 1
207204
fi
208205

209-
if (([ ! -v TARGET_DDL_UNDO ] && [ -v TARGET_DDL_DO ]) || ([ ! -v TARGET_DDL_DO ] && [ -v TARGET_DDL_UNDO ]))
206+
if [ ! -z ${DB_DUMP_PATH+x} ]
207+
then
208+
echo "DB_DUMP_PATH found"
209+
else
210+
echo "DB_DUMP_PATH NOT found"
211+
fi
212+
213+
[ ! -z ${DB_DUMP_PATH+x} ] && ! checkPath DB_DUMP_PATH && >&2 echo "ERROR: file $DB_DUMP_PATH given by db_dump_path not found" && exit 1
214+
215+
if (([ -z ${TARGET_DDL_UNDO+x} ] && [ ! -z ${TARGET_DDL_DO+x} ]) || ([ -z ${TARGET_DDL_DO+x} ] && [ ! -z ${TARGET_DDL_UNDO+x} ]))
210216
then
211217
>&2 echo "ERROR: DDL code must have do and undo part."
212218
exit 1;
213219
fi
214220

215-
if [ ! -v ARTIFACTS_DESTINATION ]
221+
if [ -z ${ARTIFACTS_DESTINATION+x} ]
216222
then
217223
>&2 echo "WARNING: Artifacts destination not given. Will use ./"
218224
ARTIFACTS_DESTINATION="."
219225
fi
220226

221-
if [ ! -v ARTIFACTS_FILENAME ]
227+
if [ -z ${ARTIFACTS_FILENAME+x} ]
222228
then
223229
>&2 echo "WARNING: Artifacts destination not given. Will use $DOCKER_MACHINE"
224230
ARTIFACTS_FILENAME=$DOCKER_MACHINE
225231
fi
226232

227-
[ -v WORKLOAD_FULL_PATH ] && ! checkPath WORKLOAD_FULL_PATH && >&2 echo "WARNING: file $AFTER_DB_INIT_CODE not found"
233+
[ ! -z ${WORKLOAD_FULL_PATH+x} ] && ! checkPath WORKLOAD_FULL_PATH && >&2 echo "ERROR: file $WORKLOAD_FULL_PATH given by workload_full_path not found" && exit 1
228234

229-
[ -v WORKLOAD_BASIS_PATH ] && ! checkPath WORKLOAD_BASIS_PATH && >&2 echo "WARNING: file $WORKLOAD_BASIS_PATH not found"
235+
echo "WORKLOAD_FULL_PATH: $WORKLOAD_FULL_PATH"
230236

231-
[ -v WORKLOAD_CUSTOM_SQL ] && ! checkPath WORKLOAD_CUSTOM_SQL && >&2 echo "WARNING: file $WORKLOAD_CUSTOM_SQL not found"
237+
[ ! -z ${WORKLOAD_BASIS_PATH+x} ] && ! checkPath WORKLOAD_BASIS_PATH && >&2 echo "WARNING: file $WORKLOAD_BASIS_PATH given by workload_basis_path not found"
232238

233-
[ -v DB_DUMP_PATH ] && ! checkPath DB_DUMP_PATH && >&2 echo "WARNING: file $DB_DUMP_PATH not found"
239+
if [ ! -z ${WORKLOAD_CUSTOM_SQL+x} ]
240+
then
241+
checkPath WORKLOAD_CUSTOM_SQL
242+
if [ "$?" -ne "0" ]
243+
then
244+
>&2 echo "WARNING: Value given as workload-custom-sql: '$WORKLOAD_CUSTOM_SQL' not found as file will use as content"
245+
echo "$WORKLOAD_CUSTOM_SQL" > $TMP_PATH/workload_custom_sql_tmp.sql
246+
WORKLOAD_CUSTOM_SQL="$TMP_PATH/workload_custom_sql_tmp.sql"
247+
else
248+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as workload-custom-sql will use as filename"
249+
fi
250+
fi
234251

235-
[ -v AFTER_DB_INIT_CODE ] && ! checkPath AFTER_DB_INIT_CODE && >&2 echo "WARNING: file $AFTER_DB_INIT_CODE not found"
252+
if [ ! -z ${AFTER_DB_INIT_CODE+x} ]
253+
then
254+
checkPath AFTER_DB_INIT_CODE
255+
if [ "$?" -ne "0" ]
256+
then
257+
>&2 echo "WARNING: Value given as after_db_init_code: '$AFTER_DB_INIT_CODE' not found as file will use as content"
258+
echo "$AFTER_DB_INIT_CODE" > $TMP_PATH/after_db_init_code_tmp.sql
259+
AFTER_DB_INIT_CODE="$TMP_PATH/after_db_init_code_tmp.sql"
260+
else
261+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as after_db_init_code will use as filename"
262+
fi
263+
fi
236264

237-
[ -v TARGET_DDL_DO ] && ! checkPath TARGET_DDL_DO && >&2 echo "WARNING: file $TARGET_DDL_DO not found"
265+
if [ ! -z ${TARGET_DDL_DO+x} ]
266+
then
267+
checkPath TARGET_DDL_DO
268+
if [ "$?" -ne "0" ]
269+
then
270+
>&2 echo "WARNING: Value given as target_ddl_do: '$TARGET_DDL_DO' not found as file will use as content"
271+
echo "$TARGET_DDL_DO" > $TMP_PATH/target_ddl_do_tmp.sql
272+
TARGET_DDL_DO="$TMP_PATH/target_ddl_do_tmp.sql"
273+
else
274+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as target_ddl_do will use as filename"
275+
fi
276+
fi
238277

239-
[ -v TARGET_DDL_UNDO ] && ! checkPath TARGET_DDL_UNDO && >&2 echo "WARNING: file $TARGET_DDL_UNDO not found"
278+
if [ ! -z ${TARGET_DDL_UNDO} ]
279+
then
280+
checkPath TARGET_DDL_UNDO
281+
if [ "$?" -ne "0" ]
282+
then
283+
>&2 echo "WARNING: Value given as target_ddl_undo: '$TARGET_DDL_UNDO' not found as file will use as content"
284+
echo "$TARGET_DDL_UNDO" > $TMP_PATH/target_ddl_undo_tmp.sql
285+
TARGET_DDL_UNDO="$TMP_PATH/target_ddl_undo_tmp.sql"
286+
else
287+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as target_ddl_undo will use as filename"
288+
fi
289+
fi
240290

241-
[ -v TARGET_CONFIG ] && ! checkPath TARGET_CONFIG && >&2 echo "WARNING: file $TARGET_CONFIG not found"
291+
if [ ! -z ${TARGET_CONFIG+x} ]
292+
then
293+
checkPath TARGET_CONFIG
294+
if [ "$?" -ne "0" ]
295+
then
296+
>&2 echo "WARNING: Value given as target_config: '$TARGET_CONFIG' not found as file will use as content"
297+
echo "$TARGET_CONFIG" > $TMP_PATH/target_config_tmp.conf
298+
TARGET_CONFIG="$TMP_PATH/target_config_tmp.conf"
299+
else
300+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as target_config will use as filename"
301+
fi
302+
fi
242303
}
243304

244305
checkParams;
@@ -258,7 +319,6 @@ function waitEC2Ready() {
258319
((STOP==1)) && return 0
259320
if [ $checkPrice -eq 1 ]
260321
then
261-
#status=$(aws ec2 describe-spot-instance-requests --filters="Name=launch.instance-type,Values=$AWS_EC2_TYPE" | jq '.SpotInstanceRequests[] | .Status.Code' | tail -n 1 )
262322
status=$(aws ec2 describe-spot-instance-requests --filters="Name=launch.instance-type,Values=$AWS_EC2_TYPE" | jq '.SpotInstanceRequests | sort_by(.CreateTime) | .[] | .Status.Code' | tail -n 1)
263323
if [ "$status" == "\"price-too-low\"" ]
264324
then
@@ -337,11 +397,13 @@ else
337397
fi
338398

339399
function cleanup {
340-
echo "Remove temp files..."
341-
rm -f "$TMP_PATH/conf_$DOCKER_MACHINE.tmp"
342-
rm -f "$TMP_PATH/ddl_do_$DOCKER_MACHINE.sql"
343-
rm -f "$TMP_PATH/ddl_undo_$DOCKER_MACHINE.sql"
344-
rm -f "$TMP_PATH/queries_custom_$DOCKER_MACHINE.sql"
400+
echo "Remove temp files..." # if exists
401+
rm -f "$TMP_PATH/after_db_init_code_tmp.sql"
402+
rm -f "$TMP_PATH/workload_custom_sql_tmp.sql"
403+
rm -f "$TMP_PATH/target_ddl_do_tmp.sql"
404+
rm -f "$TMP_PATH/target_ddl_undo_tmp.sql"
405+
rm -f "$TMP_PATH/target_config_tmp.conf"
406+
345407
if [ "$RUN_ON" = "localhost" ]; then
346408
rm -rf "$TMP_PATH/pg_nancy_home_${CURRENT_TS}"
347409
echo "Remove docker container"
@@ -375,22 +437,23 @@ function copyFile() {
375437
fi
376438
}
377439

378-
[ -v S3_CFG_PATH ] && copyFile $S3_CFG_PATH && docker_exec cp /machine_home/.s3cfg /root/.s3cfg
440+
[ ! -z ${S3_CFG_PATH+x} ] && copyFile $S3_CFG_PATH && docker_exec cp /machine_home/.s3cfg /root/.s3cfg
379441

380-
[ -v DB_DUMP_PATH ] && copyFile $DB_DUMP_PATH
381-
[ -v TARGET_CONFIG ] && copyFile $TARGET_CONFIG
382-
[ -v TARGET_DDL_DO ] && copyFile $TARGET_DDL_DO
383-
[ -v TARGET_DDL_UNDO ] && copyFile $TARGET_DDL_UNDO
384-
[ -v WORKLOAD_CUSTOM_SQL ] && copyFile $WORKLOAD_CUSTOM_SQL
385-
[ -v WORKLOAD_FULL_PATH ] && copyFile $WORKLOAD_FULL_PATH
442+
[ ! -z ${DB_DUMP_PATH+x} ] && copyFile $DB_DUMP_PATH
443+
[ ! -z ${TARGET_CONFIG+x} ] && copyFile $TARGET_CONFIG
444+
[ ! -z ${TARGET_DDL_DO+x} ] && copyFile $TARGET_DDL_DO
445+
[ ! -z ${TARGET_DDL_UNDO+x} ] && copyFile $TARGET_DDL_UNDO
446+
[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && copyFile $WORKLOAD_CUSTOM_SQL
447+
[ ! -z ${WORKLOAD_FULL_PATH+x} ] && copyFile $WORKLOAD_FULL_PATH
386448

387449
## Apply machine features
388450
# Dump
389451
sleep 1 # wait for postgres up&running
390452
DB_DUMP_FILENAME=$(basename $DB_DUMP_PATH)
391453
docker_exec bash -c "bzcat /machine_home/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres test"
392454
# After init database sql code apply
393-
if ([ -v AFTER_DB_INIT_CODE ] && [ "$AFTER_DB_INIT_CODE" != "" ])
455+
echo "Apply sql code after db init"
456+
if ([ ! -z ${AFTER_DB_INIT_CODE+x} ] && [ "$AFTER_DB_INIT_CODE" != "" ])
394457
then
395458
AFTER_DB_INIT_CODE_FILENAME=$(basename $AFTER_DB_INIT_CODE)
396459
if [[ $AFTER_DB_INIT_CODE =~ "s3://" ]]; then
@@ -402,13 +465,13 @@ then
402465
fi
403466
# Apply DDL code
404467
echo "Apply DDL SQL code"
405-
if ([ -v TARGET_DDL_DO ] && [ "$TARGET_DDL_DO" != "" ]); then
468+
if ([ ! -z ${TARGET_DDL_DO+x} ] && [ "$TARGET_DDL_DO" != "" ]); then
406469
TARGET_DDL_DO_FILENAME=$(basename $TARGET_DDL_DO)
407470
docker_exec bash -c "psql -U postgres test -E -f /machine_home/$TARGET_DDL_DO_FILENAME"
408471
fi
409472
# Apply postgres configuration
410-
echo "Apply postgres conf from /machine_home/conf_$DOCKER_MACHINE.tmp"
411-
if ([ -v TARGET_CONFIG ] && [ "$TARGET_CONFIG" != "" ]); then
473+
echo "Apply postgres conf"
474+
if ([ ! -z ${TARGET_CONFIG+x} ] && [ "$TARGET_CONFIG" != "" ]); then
412475
TARGET_CONFIG_FILENAME=$(basename $TARGET_CONFIG)
413476
docker_exec bash -c "cat /machine_home/$TARGET_CONFIG_FILENAME >> /etc/postgresql/$PG_VERSION/main/postgresql.conf"
414477
docker_exec bash -c "sudo /etc/init.d/postgresql restart"
@@ -419,13 +482,13 @@ docker_exec vacuumdb -U postgres test -j $(cat /proc/cpuinfo | grep processor |
419482
docker_exec bash -c "echo '' > /var/log/postgresql/postgresql-$PG_VERSION-main.log"
420483
# Execute workload
421484
echo "Execute workload..."
422-
if [ -v WORKLOAD_FULL_PATH ] && [ "$WORKLOAD_FULL_PATH" != '' ];then
485+
if [ ! -z ${WORKLOAD_FULL_PATH+x} ] && [ "$WORKLOAD_FULL_PATH" != '' ];then
423486
echo "Execute pgreplay queries..."
424487
docker_exec psql -U postgres test -c 'create role testuser superuser login;'
425488
WORKLOAD_FILE_NAME=$(basename $WORKLOAD_FULL_PATH)
426-
docker_exec bash -c "pgreplay -r -j ./$WORKLOAD_FILE_NAME"
489+
docker_exec bash -c "pgreplay -r -j /machine_home/$WORKLOAD_FILE_NAME"
427490
else
428-
if ([ -v WORKLOAD_CUSTOM_SQL ] && [ "$WORKLOAD_CUSTOM_SQL" != "" ]); then
491+
if ([ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && [ "$WORKLOAD_CUSTOM_SQL" != "" ]); then
429492
WORKLOAD_CUSTOM_FILENAME=$(basename $WORKLOAD_CUSTOM_SQL)
430493
echo "Execute custom sql queries..."
431494
docker_exec bash -c "psql -U postgres test -E -f /machine_home/$WORKLOAD_CUSTOM_FILENAME"
@@ -435,23 +498,29 @@ fi
435498
## Get statistics
436499
echo "Prepare JSON log..."
437500
docker_exec bash -c "/root/pgbadger/pgbadger -j $(cat /proc/cpuinfo | grep processor | wc -l) --prefix '%t [%p]: [%l-1] db=%d,user=%u (%a,%h)' /var/log/postgresql/* -f stderr -o /machine_home/$ARTIFACTS_FILENAME.json"
438-
echo "Upload JSON log..."
439501

502+
echo "Save JSON log..."
440503
if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then
441504
docker_exec s3cmd put /machine_home/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
442505
else
506+
logpath=$(docker_exec bash -c "psql -XtU postgres \
507+
-c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \
508+
| grep / | sed -e 's/^[ \t]*//'")
509+
docker_exec bash -c "gzip -c $logpath > /machine_home/$ARTIFACTS_FILENAME.log.gz"
443510
if [ "$RUN_ON" = "localhost" ]; then
444511
cp "$TMP_PATH/pg_nancy_home_${CURRENT_TS}/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
512+
cp "$TMP_PATH/pg_nancy_home_${CURRENT_TS}/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
445513
elif [ "$RUN_ON" = "aws" ]; then
446-
docker-machine scp /machine_home/$ARTIFACTS_FILENAME.json $DOCKER_MACHINE:/home/ubuntu
514+
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/
515+
docker-machine scp $DOCKER_MACHINE:/home/ubuntu/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/
447516
else
448517
>&2 echo "ASSERT: must not reach this point"
449518
exit 1
450519
fi
451520
fi
452521

453-
echo "Apply DDL undo SQL code from /machine_home/ddl_undo_$DOCKER_MACHINE.sql"
454-
if ([ -v TARGET_DDL_UNDO ] && [ "$TARGET_DDL_UNDO" != "" ]); then
522+
echo "Apply DDL undo SQL code"
523+
if ([ ! -z ${TARGET_DDL_UNDO+x} ] && [ "$TARGET_DDL_UNDO" != "" ]); then
455524
TARGET_DDL_UNDO_FILENAME=$(basename $TARGET_DDL_UNDO)
456525
docker_exec bash -c "psql -U postgres test -E -f /machine_home/$TARGET_DDL_UNDO_FILENAME"
457526
fi

0 commit comments

Comments
 (0)