Skip to content

Commit acd87e0

Browse files
drrrrr900akorotkov
authored andcommitted
Update perf-test: rename to TPC-C, support warehouses/vus_scale/pool_size
- Rename workflow to "TPC-C Performance Test" - Replace scale-factor with warehouses throughout - Pass vus-scale and pool-size to perf_compare.py - Update artifact name pattern matching for new naming convention - Disable build cache - Fail job if PG fails to start - preserve PG logs for each run - register Oriole extension
1 parent 3c72f7a commit acd87e0

File tree

5 files changed

+63
-21
lines changed

5 files changed

+63
-21
lines changed

.github/workflows/perf-test.yml

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: perf-test
1+
name: TPC-C Performance Test
22

33
on:
44
pull_request:
@@ -94,13 +94,18 @@ jobs:
9494
run: bash ./orioledb/ci/perf_build.sh
9595
- name: Start PostgreSQL
9696
run: bash ./orioledb/ci/perf_pg_start.sh
97+
- name: Verify PostgreSQL is ready
98+
run: |
99+
export PATH="$GITHUB_WORKSPACE/pgsql/bin:$PATH"
100+
pg_isready -t 5
101+
psql -d postgres -c "SELECT orioledb_version();"
97102
- name: Resolve current user
98103
run: echo "PGUSER=$(whoami)" >> $GITHUB_ENV
99104
- name: Run TPC-C benchmark
100105
uses: stroppy-io/stroppy-action@main
101106
with:
102107
preset: tpcc
103-
driver-url: postgres://${{ env.PGUSER }}@localhost:5432/postgres
108+
driver-url: postgres://${{ env.PGUSER }}@localhost:5432/postgres?sslmode=disable
104109
artifact-name: perf-results-base-${{ matrix.warehouses }}W-${{ matrix.run }}
105110
- name: Stop PostgreSQL
106111
if: always()
@@ -146,13 +151,18 @@ jobs:
146151
run: bash ./orioledb/ci/perf_build.sh
147152
- name: Start PostgreSQL
148153
run: bash ./orioledb/ci/perf_pg_start.sh
154+
- name: Verify PostgreSQL is ready
155+
run: |
156+
export PATH="$GITHUB_WORKSPACE/pgsql/bin:$PATH"
157+
pg_isready -t 5
158+
psql -d postgres -c "SELECT orioledb_version();"
149159
- name: Resolve current user
150160
run: echo "PGUSER=$(whoami)" >> $GITHUB_ENV
151161
- name: Run TPC-C benchmark
152162
uses: stroppy-io/stroppy-action@main
153163
with:
154164
preset: tpcc
155-
driver-url: postgres://${{ env.PGUSER }}@localhost:5432/postgres
165+
driver-url: postgres://${{ env.PGUSER }}@localhost:5432/postgres?sslmode=disable
156166
artifact-name: perf-results-head-${{ matrix.warehouses }}W-${{ matrix.run }}
157167
- name: Stop PostgreSQL
158168
if: always()
@@ -187,6 +197,8 @@ jobs:
187197
--runs "${{ inputs.bench_runs || '1' }}" \
188198
--duration "${{ inputs.bench_duration || '5m' }}" \
189199
--warehouses "${{ inputs.warehouses || '1' }}" \
200+
--vus-scale "${{ inputs.vus_scale || '1' }}" \
201+
--pool-size "${{ inputs.pool_size || '100' }}" \
190202
--output comment.md
191203
- name: Post PR comment
192204
if: github.event_name == 'pull_request'

ci/perf_build.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ if [ -d "$CACHE_DIR/pgsql" ]; then
1414
exit 0
1515
fi
1616

17+
rm -rf /tmp/perf-build-cache/
18+
1719
echo "=== Building from scratch ==="
1820

1921
if [ $COMPILER = "clang" ]; then

ci/perf_compare.py

Lines changed: 25 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -56,26 +56,26 @@ def parse_k6_summary(filepath):
5656
return metrics
5757

5858

59-
def find_result_files(results_dir, num_runs, scale_factor=None):
59+
def find_result_files(results_dir, num_runs, warehouses=None):
6060
"""Find stroppy JSON result files in the download directory.
6161
6262
stroppy-action artifacts are downloaded as:
63-
results-dir/perf-results-{branch}-sf{SF}-{N}/stroppy-results.json
64-
If scale_factor is given, only match directories containing that sf.
63+
results-dir/perf-results-{branch}-{W}W-{N}/stroppy-results.json
64+
If warehouses is given, only match directories containing that tag.
6565
"""
6666
pattern = os.path.join(results_dir, "**", "stroppy-results.json")
6767
files = sorted(glob.glob(pattern, recursive=True))
6868
if not files:
6969
files = sorted(glob.glob(os.path.join(results_dir, "*.json")))
70-
if scale_factor is not None:
71-
sf_tag = f"-sf{scale_factor}-"
72-
files = [f for f in files if sf_tag in f]
70+
if warehouses is not None:
71+
wh_tag = f"-{warehouses}W-"
72+
files = [f for f in files if wh_tag in f]
7373
return files[:num_runs]
7474

7575

76-
def load_run_results(results_dir, num_runs, scale_factor=None):
76+
def load_run_results(results_dir, num_runs, warehouses=None):
7777
"""Load and parse all result files from a results directory."""
78-
files = find_result_files(results_dir, num_runs, scale_factor)
78+
files = find_result_files(results_dir, num_runs, warehouses)
7979
all_metrics = []
8080
for filepath in files:
8181
print(f"Parsing: {filepath}", file=sys.stderr)
@@ -180,7 +180,8 @@ def generate_markdown(base_medians, head_medians, config):
180180
lines.append("")
181181
lines.append(
182182
f"**Config**: {config['runs']} runs, {config['duration']} each, "
183-
f"scale_factor={config['scale_factor']}"
183+
f"warehouses={config['warehouses']}, vus_scale={config['vus_scale']}, "
184+
f"pool_size={config['pool_size']}"
184185
)
185186
lines.append("")
186187

@@ -193,23 +194,27 @@ def main():
193194
parser.add_argument("--head-dir", required=True, help="Directory with head branch results")
194195
parser.add_argument("--runs", type=int, default=5, help="Number of benchmark runs")
195196
parser.add_argument("--duration", default="10m", help="Duration per run")
196-
parser.add_argument("--scale-factor", default="1",
197-
help="TPC-C scale factor (comma-separated for multiple)")
197+
parser.add_argument("--warehouses", default="1",
198+
help="Number of warehouses (comma-separated for multiple)")
199+
parser.add_argument("--vus-scale", default="1",
200+
help="VU scale multiplier")
201+
parser.add_argument("--pool-size", default="100",
202+
help="Connection pool size")
198203
parser.add_argument("--output", default="comment.md", help="Output markdown file")
199204
args = parser.parse_args()
200205

201-
scale_factors = [s.strip() for s in args.scale_factor.split(",")]
206+
warehouse_list = [s.strip() for s in args.warehouses.split(",")]
202207
sections = []
203208

204-
for sf in scale_factors:
205-
base_metrics = load_run_results(args.base_dir, args.runs, scale_factor=sf)
206-
head_metrics = load_run_results(args.head_dir, args.runs, scale_factor=sf)
209+
for wh in warehouse_list:
210+
base_metrics = load_run_results(args.base_dir, args.runs, warehouses=wh)
211+
head_metrics = load_run_results(args.head_dir, args.runs, warehouses=wh)
207212

208213
if not base_metrics:
209-
print(f"Error: no base branch results found for scale_factor={sf}", file=sys.stderr)
214+
print(f"Error: no base branch results found for warehouses={wh}", file=sys.stderr)
210215
sys.exit(1)
211216
if not head_metrics:
212-
print(f"Error: no head branch results found for scale_factor={sf}", file=sys.stderr)
217+
print(f"Error: no head branch results found for warehouses={wh}", file=sys.stderr)
213218
sys.exit(1)
214219

215220
base_medians = compute_medians(base_metrics)
@@ -218,7 +223,9 @@ def main():
218223
config = {
219224
"runs": args.runs,
220225
"duration": args.duration,
221-
"scale_factor": sf,
226+
"warehouses": wh,
227+
"vus_scale": args.vus_scale,
228+
"pool_size": args.pool_size,
222229
}
223230

224231
sections.append(generate_markdown(base_medians, head_medians, config))

ci/perf_pg_start.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,3 +30,6 @@ pg_ctl -D "$PGDATA" -l "$PGDATA/postgresql.log" start
3030
# Wait for PostgreSQL to be ready
3131
pg_isready -t 30
3232

33+
# Register the OrioleDB extension
34+
psql -d postgres -c "CREATE EXTENSION IF NOT EXISTS orioledb;"
35+

ci/perf_pg_stop.sh

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,22 @@ export PATH="$GITHUB_WORKSPACE/pgsql/bin:$PATH"
77
PGDATA="$GITHUB_WORKSPACE/pgdata"
88

99
pg_ctl -D "$PGDATA" stop || true
10+
11+
# Save PostgreSQL log before cleanup
12+
LOG_DIR="$HOME/logs"
13+
mkdir -p "$LOG_DIR"
14+
if [ -n "${GITHUB_RUN_ID:-}" ]; then
15+
LOG_NAME="${GITHUB_RUN_ID}-postgres.log"
16+
else
17+
# Incrementing counter fallback
18+
COUNTER=1
19+
while [ -f "$LOG_DIR/${COUNTER}-postgres.log" ]; do
20+
COUNTER=$((COUNTER + 1))
21+
done
22+
LOG_NAME="${COUNTER}-postgres.log"
23+
fi
24+
25+
echo "Saving PostgreSQL log to $LOG_DIR/$LOG_NAME"
26+
cp "$PGDATA/postgresql.log" "$LOG_DIR/$LOG_NAME" 2>/dev/null || echo "Warning: no postgresql.log found"
27+
1028
rm -rf "$PGDATA"

0 commit comments

Comments
 (0)