Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit 1490c30

Browse files
authored
Merge pull request #104 from postgres-ai/better_summary
Better summary + Reset pg_stat_* and logs correctly
2 parents d5862e0 + e4a2f9e commit 1490c30

File tree

2 files changed

+29
-20
lines changed

2 files changed

+29
-20
lines changed

README.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ Nancy helps to conduct automated database experiments.
88
The Nancy Command Line Interface is a unified way to manage automated
99
database experiments either in clouds or on-premise.
1010

11-
1211
What is a Database Experiment?
1312
===
1413
Database experiment is a set of actions performed to test

nancy_run.sh

Lines changed: 29 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
1313
DOCKER_MACHINE="nancy-$CURRENT_TS"
1414
DOCKER_MACHINE="${DOCKER_MACHINE//_/-}"
1515
KEEP_ALIVE=0
16+
DURATION_WRKLD=""
1617
VERBOSE_OUTPUT_REDIRECT=" > /dev/null"
1718
EBS_SIZE_MULTIPLIER=5
1819
POSTGRES_VERSION_DEFAULT=10
@@ -1365,7 +1366,9 @@ function apply_postgres_configuration() {
13651366
# None
13661367
#######################################
13671368
function prepare_start_workload() {
1368-
#Save before workload log
1369+
msg "Execute vacuumdb..."
1370+
docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze
1371+
13691372
msg "Save prepaparation log"
13701373
logpath=$( \
13711374
docker_exec bash -c "psql -XtU postgres \
@@ -1375,17 +1378,16 @@ function prepare_start_workload() {
13751378
docker_exec bash -c "mkdir $MACHINE_HOME/$ARTIFACTS_FILENAME"
13761379
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/postgresql.prepare.log.gz"
13771380

1378-
# Clear statistics and log
1379-
msg "Execute vacuumdb..."
1380-
docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze
1381+
msg "Reset pg_stat_*** and Postgres log"
1382+
docker_exec psql -U postgres $DB_NAME -c 'select pg_stat_reset(), pg_stat_statements_reset();' >/dev/null
13811383
docker_exec bash -c "echo '' > /var/log/postgresql/postgresql-$PG_VERSION-main.log"
13821384
}
13831385

13841386
#######################################
13851387
# Execute workload.
13861388
# Globals:
13871389
# WORKLOAD_REAL, WORKLOAD_REAL_REPLAY_SPEED, WORKLOAD_CUSTOM_SQL, MACHINE_HOME,
1388-
# DB_NAME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias
1390+
# DURATION_WRKLD, DB_NAME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias
13891391
# Arguments:
13901392
# None
13911393
# Returns:
@@ -1414,6 +1416,7 @@ function execute_workload() {
14141416
END_TIME=$(date +%s)
14151417
DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}')
14161418
msg "Time taken to execute workload: $DURATION."
1419+
DURATION_WRKLD="$DURATION"
14171420
}
14181421

14191422
#######################################
@@ -1506,17 +1509,24 @@ apply_ddl_undo_code
15061509

15071510
END_TIME=$(date +%s)
15081511
DURATION=$(echo $((END_TIME-START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}')
1509-
echo -e "$(date "+%Y-%m-%d %H:%M:%S"): Run done for $DURATION"
1510-
echo -e " JSON Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.json"
1511-
echo -e " HTML Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.html"
1512-
echo -e " Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.workload.log.gz"
1513-
echo -e " Prepare log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.prepare.log.gz"
1514-
echo -e " Postgresql configuration log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.conf"
1515-
1516-
echo -e " -------------------------------------------"
1517-
echo -e " Workload summary:"
1518-
echo -e " Summarized query duration:\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_duration') " ms"
1519-
echo -e " Queries:\t\t\t" $( docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_number')
1520-
echo -e " Query groups:\t\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.normalyzed_info| length')
1521-
echo -e " Errors:\t\t\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.errors_number')
1522-
echo -e "-------------------------------------------"
1512+
msg "Done."
1513+
echo -e "------------------------------------------------------------------------------"
1514+
echo -e "Artifacts (collected in \"$ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/\"):"
1515+
echo -e " Postgres config: postgresql.conf"
1516+
echo -e " Postgres logs: postgresql.prepare.log.gz (preparation),"
1517+
echo -e " postgresql.workload.log.gz (workload)"
1518+
echo -e " pgBadger reports: pgbadger.html (for humans),"
1519+
echo -e " pgbadger.json (for robots)"
1520+
echo -e " Stat stapshots: pg_stat_statements.csv,"
1521+
echo -e " pg_stat_***.csv"
1522+
echo -e "------------------------------------------------------------------------------"
1523+
echo -e "Total execution time: $DURATION"
1524+
echo -e "------------------------------------------------------------------------------"
1525+
echo -e "Workload:"
1526+
echo -e " Execution time: $DURATION_WRKLD"
1527+
echo -e " Total query time: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_duration') " ms"
1528+
echo -e " Queries: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_number')
1529+
echo -e " Query groups: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.normalyzed_info | length')
1530+
echo -e " Errors: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.errors_number')
1531+
echo -e " Errors groups: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.error_info | length')
1532+
echo -e "------------------------------------------------------------------------------"

0 commit comments

Comments
 (0)