Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit 9a0975d

Browse files
authored
Merge pull request #68 from postgres-ai/docs_and_improvements
Some code style improvements
2 parents 246e7b6 + 8d41dce commit 9a0975d

14 files changed

+42
-36
lines changed

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -107,15 +107,15 @@ echo "create table hello_world as select i::int4 from generate_series(1, (10^6):
107107
# (seqscan is expected, total time ~150ms, depending on resources)
108108
nancy run \
109109
--run-on localhost \
110-
--db-dump-path file://$(pwd)/sample.dump.bz2 \
110+
--db-dump file://$(pwd)/sample.dump.bz2 \
111111
--tmp-path /tmp \
112112
--workload-custom-sql "select count(1) from hello_world where i between 100000 and 100010;"
113113

114114
# Now check how a regular btree index affects performance
115115
# (expected total time: ~0.05ms)
116116
nancy run \
117117
--run-on localhost \
118-
--db-dump-path file://$(pwd)/sample.dump.bz2 \
118+
--db-dump file://$(pwd)/sample.dump.bz2 \
119119
--tmp-path /tmp \
120120
--workload-custom-sql "select count(1) from hello_world where i between 100000 and 100010;" \
121121
--target-ddl-do "create index i_hello_world_i on hello_world(i);" \
@@ -128,7 +128,7 @@ nancy run \
128128
--run-on aws \
129129
--aws-ec2-type "i3.large" \
130130
--aws-keypair-name awskey --aws-ssh-key-path file://$(echo ~)/.ssh/awskey.pem \
131-
--db-dump-path "create table a as select i::int4 from generate_series(1, (10^9)::int) _(i);" \
131+
--db-dump "create table a as select i::int4 from generate_series(1, (10^9)::int) _(i);" \
132132
--workload-custom-sql "select count(1) from a where i between 10 and 20;"
133133
```
134134

nancy_run.sh

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ while true; do
9191
9292
Reserved / Not yet implemented.
9393
94-
\033[1m--db-dump-path\033[22m (string)
94+
\033[1m--db-dump\033[22m (string)
9595
9696
Specify the path to database dump (created by pg_dump) to be used as an input.
9797
@@ -197,7 +197,7 @@ while true; do
197197
--db-prepared-snapshot )
198198
#Still unsupported
199199
DB_PREPARED_SNAPSHOT="$2"; shift 2 ;;
200-
--db-dump-path )
200+
--db-dump )
201201
DB_DUMP_PATH="$2"; shift 2 ;;
202202
--after-db-init-code )
203203
#s3 url|filename|content
@@ -377,10 +377,16 @@ function checkParams() {
377377
[ ! -z ${WORKLOAD_REAL+x} ] && let workloads_count=$workloads_count+1
378378
[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && let workloads_count=$workloads_count+1
379379

380+
#--db-prepared-snapshot or --db-dump
381+
if ([ -z ${DB_PREPARED_SNAPSHOT+x} ] && [ -z ${DB_DUMP_PATH+x} ]); then
382+
>&2 echo "ERROR: The object (database) is not defined."
383+
exit 1;
384+
fi
385+
380386
# --workload-real or --workload-basis-path or --workload-custom-sql
381387
if [ "$workloads_count" -eq "0" ]
382388
then
383-
>&2 echo "ERROR: Workload not given."
389+
>&2 echo "ERROR: The workload is not defined."
384390
exit 1;
385391
fi
386392

@@ -390,12 +396,6 @@ function checkParams() {
390396
exit 1
391397
fi
392398

393-
#--db-prepared-snapshot or --db-dump-path
394-
if ([ -z ${DB_PREPARED_SNAPSHOT+x} ] && [ -z ${DB_DUMP_PATH+x} ]); then
395-
>&2 echo "ERROR: Snapshot or dump not given."
396-
exit 1;
397-
fi
398-
399399
if ([ ! -z ${DB_PREPARED_SNAPSHOT+x} ] && [ ! -z ${DB_DUMP_PATH+x} ])
400400
then
401401
>&2 echo "ERROR: Both snapshot and dump sources are given."
@@ -408,7 +408,7 @@ function checkParams() {
408408
echo "$DB_DUMP_PATH" > $TMP_PATH/db_dump_tmp.sql
409409
DB_DUMP_PATH="$TMP_PATH/db_dump_tmp.sql"
410410
else
411-
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as db-dump-path will use as filename"
411+
[ "$DEBUG" -eq "1" ] && echo "DEBUG: Value given as db-dump will use as filename"
412412
fi
413413
DB_DUMP_FILENAME=$(basename $DB_DUMP_PATH)
414414
DB_DUMP_EXT=${DB_DUMP_FILENAME##*.}
@@ -1008,9 +1008,9 @@ echo -e "$(date "+%Y-%m-%d %H:%M:%S"): Run done for $DURATION"
10081008
echo -e " Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json"
10091009
echo -e " Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.log.gz"
10101010
echo -e " -------------------------------------------"
1011-
echo -e " Summary:"
1012-
echo -e " Queries duration:\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
1013-
echo -e " Queries count:\t\t" $( docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
1014-
echo -e " Normalized queries count:\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
1015-
echo -e " Errors count:\t\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
1011+
echo -e " Workload summary:"
1012+
echo -e " Summarized query duration:\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms"
1013+
echo -e " Queries:\t\t\t" $( docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number')
1014+
echo -e " Query groups:\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length')
1015+
echo -e " Errors:\t\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number')
10161016
echo -e "-------------------------------------------"

tests/nancy_run_before_init_code.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
output=$(${BASH_SOURCE%/*}/../nancy run \
44
--before-db-init-code "select abs from beforeinittable;" \
55
--workload-custom-sql "file://$srcDir/custom.sql" \
6-
--db-dump-path "file://$srcDir/test.dump.bz2" \
6+
--db-dump "file://$srcDir/test.dump.bz2" \
77
--tmp-path $srcDir/tmp \
88
2>&1)
99

tests/nancy_run_ebs_disk_size_warning.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
output=$(${BASH_SOURCE%/*}/../nancy run \
44
--ebs-volume-size sa \
55
--workload-custom-sql "file://$srcDir/custom.sql" \
6-
--db-dump-path "file://$srcDir/test.dump.bz2" \
6+
--db-dump "file://$srcDir/test.dump.bz2" \
77
--tmp-path $srcDir/tmp \
88
2>&1)
99

tests/nancy_run_localhost_real_workload.sh

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,14 @@ fi
99
nancyRun="$parentDir/nancy_run.sh"
1010

1111
output=$(
12-
$nancyRun --workload-real "file://$srcDir/sample.replay" \
13-
--db-dump-path "file://$srcDir/test.dump.bz2" \
12+
$nancyRun \
13+
--db-dump "create table hello_world as select i, i as id from generate_series(1, 1000) _(i);" \
14+
--workload-real "file://$srcDir/sample.replay" \
1415
--tmp-path $srcDir/tmp 2>&1
1516
)
1617

17-
if [[ $output =~ "Queries duration:" ]]; then
18+
regex="Queries:[[:blank:]]*1"
19+
if [[ $output =~ $regex ]]; then
1820
echo -e "\e[36mOK\e[39m"
1921
else
2022
>&2 echo -e "\e[31mFAILED\e[39m"

tests/nancy_run_localhost_simple_dump.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,12 @@ nancyRun="$parentDir/nancy_run.sh"
1010

1111
output=$(
1212
$nancyRun --workload-custom-sql "file://$srcDir/custom.sql" \
13-
--db-dump-path "file://$srcDir/test.dump.bz2" \
13+
--db-dump "file://$srcDir/test.dump.bz2" \
1414
--tmp-path $srcDir/tmp 2>&1
1515
)
1616

17-
if [[ $output =~ "Queries duration:" ]]; then
17+
regex="Errors:[[:blank:]]*0"
18+
if [[ $output =~ $regex ]]; then
1819
echo -e "\e[36mOK\e[39m"
1920
else
2021
>&2 echo -e "\e[31mFAILED\e[39m"

tests/nancy_run_localhost_simple_dump_with_index.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,13 @@ nancyRun="$parentDir/nancy_run.sh"
1111
output=$(
1212
$nancyRun --workload-custom-sql "file://$srcDir/custom.sql" \
1313
--tmp-path ${srcDir}/tmp \
14-
--db-dump-path "file://$srcDir/test.dump.bz2" \
14+
--db-dump "file://$srcDir/test.dump.bz2" \
1515
--target-ddl-do "create index i_speedup on t1 using btree(val);" \
1616
--target-ddl-undo "drop index i_speedup;" 2>&1
1717
)
1818

19-
if [[ $output =~ "Queries duration:" ]]; then
19+
regex="Errors:[[:blank:]]*0"
20+
if [[ $output =~ $regex ]]; then
2021
echo -e "\e[36mOK\e[39m"
2122
else
2223
>&2 echo -e "\e[31mFAILED\e[39m"

tests/nancy_run_localhost_simple_gz_dump.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,12 @@ nancyRun="$parentDir/nancy_run.sh"
1010

1111
output=$(
1212
$nancyRun --workload-custom-sql "file://$srcDir/custom.sql" \
13-
--db-dump-path "file://$srcDir/test.dump.gz" \
13+
--db-dump "file://$srcDir/test.dump.gz" \
1414
--tmp-path $srcDir/tmp 2>&1
1515
)
1616

17-
if [[ $output =~ "Queries duration:" ]]; then
17+
regex="Errors:[[:blank:]]*0"
18+
if [[ $output =~ $regex ]]; then
1819
echo -e "\e[36mOK\e[39m"
1920
else
2021
>&2 echo -e "\e[31mFAILED\e[39m"

tests/nancy_run_localhost_simple_sql_dump.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,12 @@ nancyRun="$parentDir/nancy_run.sh"
1010

1111
output=$(
1212
$nancyRun --workload-custom-sql "file://$srcDir/custom.sql" \
13-
--db-dump-path "file://$srcDir/test.dump.sql" \
13+
--db-dump "file://$srcDir/test.dump.sql" \
1414
--tmp-path $srcDir/tmp 2>&1
1515
)
1616

17-
if [[ $output =~ "Queries duration:" ]]; then
17+
regex="Errors:[[:blank:]]*0"
18+
if [[ $output =~ $regex ]]; then
1819
echo -e "\e[36mOK\e[39m"
1920
else
2021
>&2 echo -e "\e[31mFAILED\e[39m"

tests/nancy_run_options_both_dump_snapshot.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ read -r -d '' params <<PARAMS
77
--s3cfg-path "/home/someuser/.s3cfg" \
88
--workload-real "s3://somebucket/db.sql.30min.pgreplay" \
99
--tmp-path tmp \
10-
--db-dump-path "s3://somebucket/dump.sql.bz2" \
10+
--db-dump "s3://somebucket/dump.sql.bz2" \
1111
--db-prepared-snapshot "s3://somebucket/snapshot"
1212
PARAMS
1313

0 commit comments

Comments
 (0)