@@ -13,6 +13,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
13
13
DOCKER_MACHINE=" nancy-$CURRENT_TS "
14
14
DOCKER_MACHINE=" ${DOCKER_MACHINE// _/ -} "
15
15
KEEP_ALIVE=0
16
+ DURATION_WRKLD=" "
16
17
VERBOSE_OUTPUT_REDIRECT=" > /dev/null"
17
18
EBS_SIZE_MULTIPLIER=5
18
19
POSTGRES_VERSION_DEFAULT=10
@@ -1365,7 +1366,9 @@ function apply_postgres_configuration() {
1365
1366
# None
1366
1367
# ######################################
1367
1368
function prepare_start_workload() {
1368
- # Save before workload log
1369
+ msg " Execute vacuumdb..."
1370
+ docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze
1371
+
1369
1372
msg " Save prepaparation log"
1370
1373
logpath=$( \
1371
1374
docker_exec bash -c " psql -XtU postgres \
@@ -1375,17 +1378,16 @@ function prepare_start_workload() {
1375
1378
docker_exec bash -c " mkdir $MACHINE_HOME /$ARTIFACTS_FILENAME "
1376
1379
docker_exec bash -c " gzip -c $logpath > $MACHINE_HOME /$ARTIFACTS_FILENAME /postgresql.prepare.log.gz"
1377
1380
1378
- # Clear statistics and log
1379
- msg " Execute vacuumdb..."
1380
- docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze
1381
+ msg " Reset pg_stat_*** and Postgres log"
1382
+ docker_exec psql -U postgres $DB_NAME -c ' select pg_stat_reset(), pg_stat_statements_reset();' > /dev/null
1381
1383
docker_exec bash -c " echo '' > /var/log/postgresql/postgresql-$PG_VERSION -main.log"
1382
1384
}
1383
1385
1384
1386
# ######################################
1385
1387
# Execute workload.
1386
1388
# Globals:
1387
1389
# WORKLOAD_REAL, WORKLOAD_REAL_REPLAY_SPEED, WORKLOAD_CUSTOM_SQL, MACHINE_HOME,
1388
- # DB_NAME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias
1390
+ # DURATION_WRKLD, DB_NAME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias
1389
1391
# Arguments:
1390
1392
# None
1391
1393
# Returns:
@@ -1414,6 +1416,7 @@ function execute_workload() {
1414
1416
END_TIME=$( date +%s)
1415
1417
DURATION=$( echo $(( END_TIME- OP_START_TIME)) | awk ' {printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}' )
1416
1418
msg " Time taken to execute workload: $DURATION ."
1419
+ DURATION_WRKLD=" $DURATION "
1417
1420
}
1418
1421
1419
1422
# ######################################
@@ -1506,17 +1509,24 @@ apply_ddl_undo_code
1506
1509
1507
1510
END_TIME=$( date +%s)
1508
1511
DURATION=$( echo $(( END_TIME- START_TIME)) | awk ' {printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}' )
1509
- echo -e " $( date " +%Y-%m-%d %H:%M:%S" ) : Run done for $DURATION "
1510
- echo -e " JSON Report: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME /pgbadger.json"
1511
- echo -e " HTML Report: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME /pgbadger.html"
1512
- echo -e " Query log: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME /postgresql.workload.log.gz"
1513
- echo -e " Prepare log: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME /postgresql.prepare.log.gz"
1514
- echo -e " Postgresql configuration log: $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME /postgresql.conf"
1515
-
1516
- echo -e " -------------------------------------------"
1517
- echo -e " Workload summary:"
1518
- echo -e " Summarized query duration:\t" $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .overall_stat.queries_duration' ) " ms"
1519
- echo -e " Queries:\t\t\t" $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .overall_stat.queries_number' )
1520
- echo -e " Query groups:\t\t" $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .normalyzed_info| length' )
1521
- echo -e " Errors:\t\t\t" $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .overall_stat.errors_number' )
1522
- echo -e " -------------------------------------------"
1512
+ msg " Done."
1513
+ echo -e " ------------------------------------------------------------------------------"
1514
+ echo -e " Artifacts (collected in \" $ARTIFACTS_DESTINATION /$ARTIFACTS_FILENAME /\" ):"
1515
+ echo -e " Postgres config: postgresql.conf"
1516
+ echo -e " Postgres logs: postgresql.prepare.log.gz (preparation),"
1517
+ echo -e " postgresql.workload.log.gz (workload)"
1518
+ echo -e " pgBadger reports: pgbadger.html (for humans),"
1519
+ echo -e " pgbadger.json (for robots)"
1520
+ echo -e " Stat stapshots: pg_stat_statements.csv,"
1521
+ echo -e " pg_stat_***.csv"
1522
+ echo -e " ------------------------------------------------------------------------------"
1523
+ echo -e " Total execution time: $DURATION "
1524
+ echo -e " ------------------------------------------------------------------------------"
1525
+ echo -e " Workload:"
1526
+ echo -e " Execution time: $DURATION_WRKLD "
1527
+ echo -e " Total query time: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .overall_stat.queries_duration' ) " ms"
1528
+ echo -e " Queries: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .overall_stat.queries_number' )
1529
+ echo -e " Query groups: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .normalyzed_info | length' )
1530
+ echo -e " Errors: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .overall_stat.errors_number' )
1531
+ echo -e " Errors groups: " $( docker_exec cat $MACHINE_HOME /$ARTIFACTS_FILENAME /pgbadger.json | jq ' .error_info | length' )
1532
+ echo -e " ------------------------------------------------------------------------------"
0 commit comments