Skip to content
This repository was archived by the owner on Aug 16, 2021. It is now read-only.

Commit d4467fb

Browse files
authored
Merge branch 'master' into master
2 parents ae1176e + 1ef4355 commit d4467fb

File tree

3 files changed

+41
-23
lines changed

3 files changed

+41
-23
lines changed

README.md

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,14 @@
1+
<a href="https://postgres.ai"><img src="https://img.shields.io/badge/Postgres-AI-orange.svg" alt="Postgres.AI" /></a>
2+
&nbsp;
3+
[![GitHub code size in bytes](https://img.shields.io/github/languages/code-size/badges/shields.svg)](github.com/postgres-ai/nancy)
4+
&nbsp;
5+
<img src="https://img.shields.io/docker/pulls/postgresmen/postgres-with-stuff.svg" />
6+
&nbsp;
17
[![CircleCI](https://circleci.com/gh/postgres-ai/nancy.svg?style=svg)](https://circleci.com/gh/postgres-ai/nancy)
28

3-
Description
9+
10+
About
11+
<img width="122" alt="screen shot 2018-09-18 at 03 04 09" src="https://user-images.githubusercontent.com/1345402/45656700-8a987f00-baef-11e8-87b6-cccf8f65ee8f.png" align="right">
412
===
513
Nancy helps to conduct automated database experiments.
614

docker/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ RUN apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys B97
2424
&& apt-get install -y git postgresql-client-10 pspg pgreplay jq etcd libjson-xs-perl \
2525
&& perl -MCPAN -e'install Text::CSV_XS' \
2626
&& git clone https://github.com/NikolayS/postgres_dba.git /root/postgres_dba \
27-
&& git clone https://github.com/NikolayS/pgbadger.git /root/pgbadger
27+
&& git clone https://github.com/darold/pgbadger.git /root/pgbadger
2828

2929
# additionally, install newer NodeJS, npm, Sqitch, and more
3030
RUN wget -q -S -O - https://deb.nodesource.com/setup_8.x | sudo bash \

nancy_run.sh

Lines changed: 31 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ CURRENT_TS=$(date +%Y%m%d_%H%M%S%N_%Z)
1313
DOCKER_MACHINE="nancy-$CURRENT_TS"
1414
DOCKER_MACHINE="${DOCKER_MACHINE//_/-}"
1515
KEEP_ALIVE=0
16+
DURATION_WRKLD=""
1617
VERBOSE_OUTPUT_REDIRECT=" > /dev/null"
1718
EBS_SIZE_MULTIPLIER=5
1819
POSTGRES_VERSION_DEFAULT=10
@@ -1065,7 +1066,7 @@ if [[ "$RUN_ON" == "localhost" ]]; then
10651066
if [[ -z ${CONTAINER_ID+x} ]]; then
10661067
CONTAINER_HASH=$(docker run --name="pg_nancy_${CURRENT_TS}" \
10671068
-v $TMP_PATH:/machine_home \
1068-
-dit "postgresmen/postgres-with-stuff:pg${PG_VERSION}" \
1069+
-dit "postgresmen/postgres-with-stuff:postgres${PG_VERSION}_pgbadger10" \
10691070
)
10701071
else
10711072
CONTAINER_HASH="$CONTAINER_ID"
@@ -1388,7 +1389,9 @@ function apply_postgres_configuration() {
13881389
# None
13891390
#######################################
13901391
function prepare_start_workload() {
1391-
#Save before workload log
1392+
msg "Execute vacuumdb..."
1393+
docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze
1394+
13921395
msg "Save prepaparation log"
13931396
logpath=$( \
13941397
docker_exec bash -c "psql -XtU postgres \
@@ -1398,17 +1401,16 @@ function prepare_start_workload() {
13981401
docker_exec bash -c "mkdir $MACHINE_HOME/$ARTIFACTS_FILENAME"
13991402
docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/postgresql.prepare.log.gz"
14001403

1401-
# Clear statistics and log
1402-
msg "Execute vacuumdb..."
1403-
docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze
1404+
msg "Reset pg_stat_*** and Postgres log"
1405+
docker_exec psql -U postgres $DB_NAME -c 'select pg_stat_reset(), pg_stat_statements_reset();' >/dev/null
14041406
docker_exec bash -c "echo '' > /var/log/postgresql/postgresql-$PG_VERSION-main.log"
14051407
}
14061408

14071409
#######################################
14081410
# Execute workload.
14091411
# Globals:
14101412
# WORKLOAD_REAL, WORKLOAD_REAL_REPLAY_SPEED, WORKLOAD_CUSTOM_SQL, MACHINE_HOME,
1411-
# DB_NAME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias
1413+
# DURATION_WRKLD, DB_NAME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias
14121414
# Arguments:
14131415
# None
14141416
# Returns:
@@ -1439,6 +1441,7 @@ function execute_workload() {
14391441
END_TIME=$(date +%s)
14401442
DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}')
14411443
msg "Time taken to execute workload: $DURATION."
1444+
DURATION_WRKLD="$DURATION"
14421445
}
14431446

14441447
#######################################
@@ -1490,7 +1493,7 @@ function collect_results() {
14901493
if [[ "$RUN_ON" == "localhost" ]]; then
14911494
docker cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME $ARTIFACTS_DESTINATION/
14921495
elif [[ "$RUN_ON" == "aws" ]]; then
1493-
mkdir $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME
1496+
mkdir -p $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME
14941497
docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME/* $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/
14951498
else
14961499
err "ASSERT: must not reach this point"
@@ -1531,17 +1534,24 @@ apply_ddl_undo_code
15311534

15321535
END_TIME=$(date +%s)
15331536
DURATION=$(echo $((END_TIME-START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}')
1534-
echo -e "$(date "+%Y-%m-%d %H:%M:%S"): Run done for $DURATION"
1535-
echo -e " JSON Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.json"
1536-
echo -e " HTML Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.html"
1537-
echo -e " Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.workload.log.gz"
1538-
echo -e " Prepare log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.prepare.log.gz"
1539-
echo -e " Postgresql configuration log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.conf"
1540-
1541-
echo -e " -------------------------------------------"
1542-
echo -e " Workload summary:"
1543-
echo -e " Summarized query duration:\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_duration') " ms"
1544-
echo -e " Queries:\t\t\t" $( docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_number')
1545-
echo -e " Query groups:\t\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.normalyzed_info| length')
1546-
echo -e " Errors:\t\t\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.errors_number')
1547-
echo -e "-------------------------------------------"
1537+
msg "Done."
1538+
echo -e "------------------------------------------------------------------------------"
1539+
echo -e "Artifacts (collected in \"$ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/\"):"
1540+
echo -e " Postgres config: postgresql.conf"
1541+
echo -e " Postgres logs: postgresql.prepare.log.gz (preparation),"
1542+
echo -e " postgresql.workload.log.gz (workload)"
1543+
echo -e " pgBadger reports: pgbadger.html (for humans),"
1544+
echo -e " pgbadger.json (for robots)"
1545+
echo -e " Stat stapshots: pg_stat_statements.csv,"
1546+
echo -e " pg_stat_***.csv"
1547+
echo -e "------------------------------------------------------------------------------"
1548+
echo -e "Total execution time: $DURATION"
1549+
echo -e "------------------------------------------------------------------------------"
1550+
echo -e "Workload:"
1551+
echo -e " Execution time: $DURATION_WRKLD"
1552+
echo -e " Total query time: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_duration') " ms"
1553+
echo -e " Queries: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.queries_number')
1554+
echo -e " Query groups: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.normalyzed_info | length')
1555+
echo -e " Errors: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.overall_stat.errors_number')
1556+
echo -e " Errors groups: "$(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.json | jq '.error_info | length')
1557+
echo -e "------------------------------------------------------------------------------"

0 commit comments

Comments
 (0)