Skip to content

Commit a7de4d5

Browse files
committed
Document and refine naming for the ZkPrograms performance regression framework
1 parent 2726c84 commit a7de4d5

File tree

1 file changed

+77
-31
lines changed

1 file changed

+77
-31
lines changed

src/lib/testing/perf-regression.ts

Lines changed: 77 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,16 @@
1+
/**
2+
* Regression testing framework for individual ZkProgram examples.
3+
*
4+
* Stores and compares metadata such as compile & proving times.
5+
* Can run in two modes:
6+
* - **Dump**: write baseline results into
7+
* {@link tests/perf-regression/perf-regression.json}
8+
* - **Check**: validate current results against the stored baselines
9+
*
10+
* For regression testing of constraint systems (CS) and zkApps,
11+
* see {@link tests/perf-regression/perf-regression.ts}.
12+
*/
13+
114
import { ConstraintSystemSummary } from '../provable/core/provable-context.js';
215
import fs from 'fs';
316
import path from 'path';
@@ -24,7 +37,7 @@ type PerfRegressionEntry = {
2437
type PerfStack = {
2538
start: number;
2639
label?: 'compile' | 'prove' | string;
27-
contractName?: string;
40+
programName?: string;
2841
methodsSummary?: MethodsSummary;
2942
methodName?: string; // required for prove; optional for compile
3043
};
@@ -39,7 +52,14 @@ const STOP_AFTER = Number.isFinite(Number(process.env.STOP_AFTER ?? ''))
3952
? Number(process.env.STOP_AFTER)
4053
: undefined;
4154

42-
function createPerfSession(contractName?: string, methodsSummary?: MethodsSummary) {
55+
/**
56+
* Create a new performance tracking session for a contract.
57+
*
58+
* @param programName Name of the program (key in perf-regression.json)
59+
* @param methodsSummary Optional methods analysis (required for prove checks)
60+
* @returns An object with `start()` and `end()` methods
61+
*/
62+
function createPerfSession(programName?: string, methodsSummary?: MethodsSummary) {
4363
const perfStack: PerfStack[] = [];
4464
let __endCounter = 0;
4565

@@ -53,28 +73,40 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
5373
}
5474

5575
return {
76+
/**
77+
* Start measuring performance for a given phase.
78+
*
79+
* @param label The phase label: `'compile' | 'prove' | string`
80+
* @param methodName Method name (required for `prove`)
81+
*/
5682
start(label?: 'compile' | 'prove' | string, methodName?: string) {
5783
perfStack.push({
5884
label,
5985
start: performance.now(),
60-
contractName,
86+
programName,
6187
methodsSummary,
6288
methodName,
6389
});
6490
},
6591

92+
/**
93+
* End the most recent measurement and:
94+
* - Log results to console
95+
* - Dump into baseline JSON (if `--dump`)
96+
* - Check against baseline (if `--check`)
97+
*/
6698
end() {
6799
const frame = perfStack.pop()!;
68-
const { label, start, contractName } = frame;
100+
const { label, start, programName } = frame;
69101
let { methodsSummary: cs, methodName } = frame;
70102

71103
const time = (performance.now() - start) / 1000;
72104

73105
// Base logging — show contract.method for prove
74-
if (label === 'prove' && contractName && methodName) {
75-
console.log(`${label} ${contractName}.${methodName}... ${time.toFixed(3)} sec`);
106+
if (label === 'prove' && programName && methodName) {
107+
console.log(`${label} ${programName}.${methodName}... ${time.toFixed(3)} sec`);
76108
} else if (label) {
77-
console.log(`${label} ${contractName ?? ''}... ${time.toFixed(3)} sec`);
109+
console.log(`${label} ${programName ?? ''}... ${time.toFixed(3)} sec`);
78110
}
79111

80112
// If neither --dump nor --check, just log and honor STOP_AFTER
@@ -84,7 +116,7 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
84116
}
85117

86118
// Only act for compile/prove with required context
87-
if (!contractName || (label !== 'compile' && label !== 'prove')) {
119+
if (!programName || (label !== 'compile' && label !== 'prove')) {
88120
maybeStop();
89121
return;
90122
}
@@ -98,7 +130,7 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
98130
if (CHECK) {
99131
checkAgainstBaseline({
100132
perfRegressionJson,
101-
contractName,
133+
programName,
102134
label: 'compile', // compile checks don't use method/digest; pass empty strings
103135
methodName: '',
104136
digest: '',
@@ -110,12 +142,12 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
110142

111143
// DUMP: update only contract-level compileTime (does not touch methods)
112144
if (DUMP) {
113-
const prev = perfRegressionJson[contractName];
145+
const prev = perfRegressionJson[programName];
114146
const merged: PerfRegressionEntry = prev
115147
? { ...prev, compileTime: time }
116148
: { compileTime: time, methods: {} };
117149

118-
perfRegressionJson[contractName] = merged;
150+
perfRegressionJson[programName] = merged;
119151
fs.writeFileSync(FILE_PATH, JSON.stringify(perfRegressionJson, null, 2));
120152
maybeStop();
121153
return;
@@ -141,7 +173,7 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
141173
}
142174
if (!Object.prototype.hasOwnProperty.call(cs, methodName)) {
143175
throw new Error(
144-
`The method "${methodName}" does not exist in the analyzed constraint systems for "${contractName}". ` +
176+
`The method "${methodName}" does not exist in the analyzed constraint systems for "${programName}". ` +
145177
`Available: ${csMethodNames.join(', ')}`
146178
);
147179
}
@@ -156,7 +188,7 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
156188
if (CHECK) {
157189
checkAgainstBaseline({
158190
perfRegressionJson,
159-
contractName,
191+
programName,
160192
label: 'prove',
161193
methodName,
162194
digest: info.digest,
@@ -168,9 +200,9 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
168200

169201
// DUMP: update per-method rows/digest and proveTime; leave compileTime untouched
170202
if (DUMP) {
171-
const prev = perfRegressionJson[contractName];
203+
const prev = perfRegressionJson[programName];
172204
const merged: PerfRegressionEntry = prev
173-
? { ...prev, methods: { ...(prev.methods ?? {}) } }
205+
? { ...prev, methods: { ...prev.methods } }
174206
: { methods: {} };
175207

176208
merged.methods[methodName] = {
@@ -179,7 +211,7 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
179211
proveTime: time,
180212
};
181213

182-
perfRegressionJson[contractName] = merged;
214+
perfRegressionJson[programName] = merged;
183215
fs.writeFileSync(FILE_PATH, JSON.stringify(perfRegressionJson, null, 2));
184216
maybeStop();
185217
return;
@@ -191,11 +223,22 @@ function createPerfSession(contractName?: string, methodsSummary?: MethodsSummar
191223
};
192224
}
193225

194-
/// Public API
195226
const Performance = {
196-
//TODO add jsdoc
197-
create(contractName: string, methodsSummary?: MethodsSummary) {
198-
return createPerfSession(contractName, methodsSummary);
227+
/**
228+
* Initialize a new performance session.
229+
*
230+
* @param programName Optional identifier for the program or label.
231+
* - When provided with a ZkProgram name and its `methodsSummary`, the session
232+
* benchmarks compile and prove phases, storing or checking results against
233+
* `perf-regression.json`.
234+
* - When used without a ZkProgram, `programName` acts as a freeform label and
235+
* the session can be used like `console.time` / `console.timeEnd` to log
236+
* timestamps for arbitrary phases.
237+
* @param methodsSummary Optional analysis of ZkProgram methods, required when
238+
* measuring prove performance.
239+
*/
240+
create(programName?: string, methodsSummary?: MethodsSummary) {
241+
return createPerfSession(programName, methodsSummary);
199242
},
200243
};
201244

@@ -205,20 +248,23 @@ function flag(name: string) {
205248
return process.argv.includes(name);
206249
}
207250

208-
// Compare against baseline; throw on mismatch/regression
251+
/**
252+
* Compare a measured time/digest against stored baselines.
253+
* Throws an error if regression exceeds tolerance.
254+
*/
209255
function checkAgainstBaseline(params: {
210256
perfRegressionJson: Record<string, PerfRegressionEntry>;
211-
contractName: string;
257+
programName: string;
212258
label: 'compile' | 'prove';
213259
methodName: string;
214260
digest: string;
215261
actualTime: number;
216262
}) {
217-
const { perfRegressionJson, contractName, label, methodName, digest, actualTime } = params;
263+
const { perfRegressionJson, programName, label, methodName, digest, actualTime } = params;
218264

219-
const baseline = perfRegressionJson[contractName];
265+
const baseline = perfRegressionJson[programName];
220266
if (!baseline) {
221-
throw new Error(`No baseline for "${contractName}". Seed it with --dump first.`);
267+
throw new Error(`No baseline for "${programName}". Seed it with --dump first.`);
222268
}
223269

224270
// tolerances (same as other file)
@@ -231,7 +277,7 @@ function checkAgainstBaseline(params: {
231277
const expected = baseline.compileTime;
232278
if (expected == null) {
233279
throw new Error(
234-
`No baseline compileTime for "${contractName}". Run --dump (compile) to set it.`
280+
`No baseline compileTime for "${programName}". Run --dump (compile) to set it.`
235281
);
236282
}
237283
const tol = expected < 5e-5 ? compileTiny : compileTol;
@@ -240,7 +286,7 @@ function checkAgainstBaseline(params: {
240286
if (actualTime > expected * tol) {
241287
const regressionPct = ((actualTime - expected) / expected) * 100;
242288
throw new Error(
243-
`Compile regression for ${contractName}\n` +
289+
`Compile regression for ${programName}\n` +
244290
` Actual: ${actualTime.toFixed(6)}s\n` +
245291
` Regression: +${regressionPct.toFixed(2)}% (allowed +${allowedPct.toFixed(0)}%)`
246292
);
@@ -252,20 +298,20 @@ function checkAgainstBaseline(params: {
252298
const baseMethod = baseline.methods?.[methodName];
253299
if (!baseMethod) {
254300
throw new Error(
255-
`No baseline method entry for ${contractName}.${methodName}. Run --dump (prove) to add it.`
301+
`No baseline method entry for ${programName}.${methodName}. Run --dump (prove) to add it.`
256302
);
257303
}
258304
if (baseMethod.digest !== digest) {
259305
throw new Error(
260-
`Digest mismatch for ${contractName}.${methodName}\n` +
306+
`Digest mismatch for ${programName}.${methodName}\n` +
261307
` Actual: ${digest}\n` +
262308
` Expected: ${baseMethod.digest}\n`
263309
);
264310
}
265311
const expected = baseMethod.proveTime;
266312
if (expected == null) {
267313
throw new Error(
268-
`No baseline proveTime for ${contractName}.${methodName}. Run --dump (prove) to set it.`
314+
`No baseline proveTime for ${programName}.${methodName}. Run --dump (prove) to set it.`
269315
);
270316
}
271317
const tol = expected < 0.2 ? proveTolSmall : proveTolDefault;
@@ -274,7 +320,7 @@ function checkAgainstBaseline(params: {
274320
if (actualTime > expected * tol) {
275321
const regressionPct = ((actualTime - expected) / expected) * 100;
276322
throw new Error(
277-
`Prove regression for ${contractName}.${methodName}\n` +
323+
`Prove regression for ${programName}.${methodName}\n` +
278324
` Actual: ${actualTime.toFixed(3)}s\n` +
279325
` Regression: +${regressionPct.toFixed(2)}% (allowed +${allowedPct.toFixed(0)}%)`
280326
);

0 commit comments

Comments
 (0)