Skip to content

Commit 3670c8a

Browse files
committed
Add performance tests
1 parent 20833cf commit 3670c8a

File tree

3 files changed

+375
-1
lines changed

3 files changed

+375
-1
lines changed

package.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,10 +65,11 @@
6565
"build:src": "tsc && chmod +x ./dist/admin/admin-bin.js",
6666
"build:doc": "typedoc src/main.ts",
6767
"postbuild:doc": "touch ./typedoc/.nojekyll",
68-
"test": "npm run build && npm run test:node && npm run test:browser",
68+
"test": "npm run build && npm run test:node && npm run test:browser && npm run test:perf",
6969
"test:node": "NODE_EXTRA_CA_CERTS=./test/fixtures/test-ca.pem TS_NODE_FILES=true mocha -r ts-node/register 'test/**/*.spec.ts'",
7070
"test:browser": "npm run with-admin -- karma start",
7171
"test:browser:debug": "npm run with-admin -- karma start --single-run=false --browsers ChromeWithCert",
72+
"test:perf": "NODE_EXTRA_CA_CERTS=./test/fixtures/test-ca.pem TS_NODE_FILES=true mocha -r ts-node/register 'test/performance/*.perf.ts'",
7273
"admin": "TS_NODE_FILES=true ts-node -e 'require(\"./src/main\").getAdminServer({ debug: true }).start()'",
7374
"with-admin": "TS_NODE_FILES=true ts-node ./src/admin/admin-bin.ts -c ",
7475
"ci-tests": "npm run test && catch-uncommitted",
@@ -95,6 +96,7 @@
9596
},
9697
"homepage": "https://github.com/httptoolkit/mockttp#readme",
9798
"devDependencies": {
99+
"@types/autocannon": "^7.12.7",
98100
"@types/base64-arraybuffer": "0.1.0",
99101
"@types/body-parser": "1.19.1",
100102
"@types/chai": "4.2.21",
@@ -112,6 +114,7 @@
112114
"@types/source-map-support": "0.4.2",
113115
"@types/ws": " 8.5.3",
114116
"assert": "^2.0.0",
117+
"autocannon": "^8.0.0",
115118
"brotli-wasm": "^1.0.0",
116119
"browserify-zlib": "^0.2.0",
117120
"buffer": "^6.0.3",
Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
import * as autocannon from 'autocannon';
2+
import { expect } from '../test-utils';
3+
4+
export interface PerformanceResult {
5+
throughput: number; // req/sec
6+
latency: {
7+
mean: number;
8+
p50: number;
9+
p75: number;
10+
p90: number;
11+
p99: number;
12+
p99_9: number;
13+
};
14+
duration: number;
15+
requests: {
16+
total: number;
17+
average: number;
18+
};
19+
errors: number;
20+
timeouts: number;
21+
}
22+
23+
export interface PerformanceTestOptions {
24+
url: string;
25+
duration?: number; // seconds
26+
connections?: number;
27+
pipelining?: number;
28+
method?: 'GET' | 'POST' | 'PUT' | 'DELETE';
29+
body?: string | Buffer;
30+
headers?: Record<string, string>;
31+
}
32+
33+
/**
34+
* Run a performance test and return formatted results
35+
*/
36+
export async function runPerformanceTest(
37+
options: PerformanceTestOptions
38+
): Promise<PerformanceResult> {
39+
const result = await autocannon({
40+
url: options.url,
41+
duration: options.duration || 10,
42+
connections: options.connections || 10,
43+
pipelining: options.pipelining || 1,
44+
method: options.method || 'GET',
45+
body: options.body,
46+
headers: options.headers
47+
});
48+
49+
return {
50+
throughput: result.requests.average,
51+
latency: {
52+
mean: result.latency.mean,
53+
p50: result.latency.p50,
54+
p75: result.latency.p75,
55+
p90: result.latency.p90,
56+
p99: result.latency.p99,
57+
p99_9: result.latency.p99_9
58+
},
59+
duration: result.duration,
60+
requests: {
61+
total: result.requests.total,
62+
average: result.requests.average
63+
},
64+
errors: result.errors,
65+
timeouts: result.timeouts
66+
};
67+
}
68+
69+
/**
70+
* Print performance results in a readable format
71+
*/
72+
export function printResults(name: string, result: PerformanceResult): void {
73+
console.log(`
74+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
75+
${name}
76+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
77+
78+
Throughput: ${result.throughput.toFixed(2)} req/sec
79+
Total Reqs: ${result.requests.total}
80+
Duration: ${result.duration}s
81+
82+
Latency:
83+
Mean: ${result.latency.mean.toFixed(2)}ms
84+
p50: ${result.latency.p50.toFixed(2)}ms
85+
p75: ${result.latency.p75.toFixed(2)}ms
86+
p90: ${result.latency.p90.toFixed(2)}ms
87+
p99: ${result.latency.p99.toFixed(2)}ms
88+
p99.9: ${result.latency.p99_9.toFixed(2)}ms
89+
90+
Errors: ${result.errors}
91+
Timeouts: ${result.timeouts}
92+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
93+
`);
94+
}
95+
96+
/**
97+
* Assert performance meets minimum thresholds
98+
*/
99+
export function assertPerformance(
100+
result: PerformanceResult,
101+
thresholds: {
102+
minThroughput?: number;
103+
maxP99Latency?: number;
104+
maxErrors?: number;
105+
}
106+
): void {
107+
if (thresholds.minThroughput !== undefined) {
108+
expect(result.throughput).to.be.greaterThan(
109+
thresholds.minThroughput,
110+
`Throughput ${result.throughput.toFixed(2)} req/sec is below threshold ${thresholds.minThroughput} req/sec`
111+
);
112+
}
113+
114+
if (thresholds.maxP99Latency !== undefined) {
115+
expect(result.latency.p99).to.be.lessThan(
116+
thresholds.maxP99Latency,
117+
`P99 latency ${result.latency.p99.toFixed(2)}ms exceeds threshold ${thresholds.maxP99Latency}ms`
118+
);
119+
}
120+
121+
if (thresholds.maxErrors !== undefined) {
122+
expect(result.errors).to.be.lessThanOrEqual(
123+
thresholds.maxErrors,
124+
`Errors ${result.errors} exceeds threshold ${thresholds.maxErrors}`
125+
);
126+
}
127+
}

0 commit comments

Comments
 (0)