Skip to content

Commit daa99fb

Browse files
authored
Merge pull request #21046 from emberjs/nvp/tracerbench-script
Add benchmark infra with starting basic bench
2 parents 4db9136 + f6e042f commit daa99fb

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+4928
-2213
lines changed

.github/workflows/ci-jobs.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -192,3 +192,16 @@ jobs:
192192
- run: firefox --version
193193
- name: test
194194
run: pnpm ember test --path dist -c testem.ci-browsers.js
195+
196+
perf-check:
197+
name: Perf script still works
198+
runs-on: ubuntu-latest
199+
steps:
200+
- uses: actions/checkout@v6
201+
with:
202+
fetch-depth: 0
203+
- uses: ./.github/actions/setup
204+
- name: Check that the perf script works, so we don't regress
205+
run: RUNS='2' pnpm bench
206+
env:
207+
GIT_LFS_SKIP_SMUDGE: 1

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
tracerbench-testing/
12
*.bpkg
23
*.gem
34
*.rbc

.prettierignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,4 @@ package.json
1212
pnpm-lock.yaml
1313
glimmer-vm/**/*.md
1414
glimmer-vm/**/*.yaml
15+
tracerbench-testing/

bin/benchmark.mjs

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
/* eslint-disable no-console */
2+
/* eslint-disable n/no-process-exit */
3+
4+
import { runBenchmark } from './benchmark/run.mjs';
5+
import { hasFlag } from './benchmark/utils.mjs';
6+
7+
if (hasFlag(process.argv, '--help', '-h')) {
8+
console.log(`
9+
Runs tracerbench compare between origin/main and your current working tree.
10+
11+
NOTE: only ember-source is linked, not other packages.
12+
13+
Output directory:
14+
tracerbench-testing/
15+
16+
Options:
17+
--force delete cached directories before running
18+
--reuse reuse existing apps and tarballs, if available (by default only the control app/tarball is reused)
19+
20+
Notes:
21+
- This script runs \`pnpm install\` and \`node ./bin/build-for-publishing.js\` in both repos.
22+
- build-for-publishing updates files in-place; it will modify your working tree.
23+
- Benchmark apps are built with \`vite build\` and served using \`vite preview\`.
24+
`);
25+
process.exit(0);
26+
}
27+
28+
const FORCE = hasFlag(process.argv, '--force');
29+
const REUSE = hasFlag(process.argv, '--reuse');
30+
31+
try {
32+
const result = await runBenchmark({
33+
force: FORCE,
34+
reuse: REUSE,
35+
});
36+
37+
console.log(`\nWrote report: ${result.msgFile}`);
38+
} catch (error) {
39+
console.error(error);
40+
process.exit(1);
41+
}

bin/benchmark/control.mjs

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import { join } from 'node:path';
2+
3+
import fs from 'fs-extra';
4+
5+
import { buildEmberSource, latestTarball, run } from './utils.mjs';
6+
7+
const { ensureDir, pathExists } = fs;
8+
9+
export async function getOrBuildControlTarball({ repoRoot, controlRepoDir, controlBranchName }) {
10+
try {
11+
return await latestTarball(controlRepoDir);
12+
} catch {
13+
// fall through; rebuild
14+
}
15+
16+
await run('git', ['fetch', 'origin'], { cwd: repoRoot, quiet: true });
17+
const controlRef = (
18+
await run('git', ['rev-parse', `origin/${controlBranchName}`], {
19+
cwd: repoRoot,
20+
quiet: true,
21+
})
22+
).stdout.trim();
23+
24+
if (!(await pathExists(controlRepoDir))) {
25+
await ensureDir(controlRepoDir);
26+
// clone from the local .git directory (fast, avoids network)
27+
if (process.env.CI) {
28+
await run('git', ['clone', 'https://github.com/emberjs/ember.js.git', controlRepoDir]);
29+
} else {
30+
await run('git', ['clone', join(repoRoot, '.git'), controlRepoDir]);
31+
}
32+
} else {
33+
await run('git', ['fetch'], { cwd: controlRepoDir, quiet: true });
34+
}
35+
36+
await run('git', ['checkout', '--force', controlRef], { cwd: controlRepoDir });
37+
await buildEmberSource(controlRepoDir);
38+
return await latestTarball(controlRepoDir);
39+
}

bin/benchmark/experiment.mjs

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
import { latestTarball, buildEmberSource } from './utils.mjs';
2+
3+
export async function buildExperimentTarball({ repoDir, reuse = false }) {
4+
if (reuse) {
5+
try {
6+
return await latestTarball(repoDir);
7+
} catch {
8+
// fall through; rebuild
9+
}
10+
}
11+
12+
await buildEmberSource(repoDir);
13+
return await latestTarball(repoDir);
14+
}

bin/benchmark/run.mjs

Lines changed: 201 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,201 @@
1+
/* eslint-disable no-console */
2+
import { join } from 'node:path';
3+
import { killPortProcess } from 'kill-port-process';
4+
5+
import fs from 'fs-extra';
6+
7+
import { getOrBuildControlTarball } from './control.mjs';
8+
import { buildExperimentTarball } from './experiment.mjs';
9+
import { run, prepareApp, sleep, startVitePreview, lsof } from './utils.mjs';
10+
11+
const { ensureDir, remove, writeFile } = fs;
12+
13+
function buildMarkersString(markers) {
14+
return markers
15+
.reduce((acc, marker) => {
16+
return acc + ',' + marker + 'Start,' + marker + 'End';
17+
}, '')
18+
.split(',')
19+
.map((s) => s.trim())
20+
.filter(Boolean)
21+
.join(',');
22+
}
23+
24+
// Default configuration for runBenchmark
25+
const DEFAULT_CONTROL_BRANCH_NAME = 'main';
26+
const DEFAULT_CONTROL_APP_FROM_MAIN = false;
27+
const DEFAULT_CONTROL_PORT = 4500;
28+
const DEFAULT_EXPERIMENT_PORT = 4501;
29+
const DEFAULT_FIDELITY = process.env['RUNS'] || '20';
30+
const DEFAULT_THROTTLE = '1';
31+
const DEFAULT_REGRESSION_THRESHOLD = '25';
32+
const DEFAULT_SAMPLE_TIMEOUT = '60';
33+
const DEFAULT_MARKERS = [
34+
// Copied from glimmer-vm/bin/setup-bench.mts (krausest benchmark)
35+
'render',
36+
'render1000Items1',
37+
'clearItems1',
38+
'render1000Items2',
39+
'clearItems2',
40+
'render5000Items1',
41+
'clearManyItems1',
42+
'render5000Items2',
43+
'clearManyItems2',
44+
'render1000Items3',
45+
'append1000Items1',
46+
'append1000Items2',
47+
'updateEvery10thItem1',
48+
'updateEvery10thItem2',
49+
'selectFirstRow1',
50+
'selectSecondRow1',
51+
'removeFirstRow1',
52+
'removeSecondRow1',
53+
'swapRows1',
54+
'swapRows2',
55+
'clearItems4',
56+
];
57+
58+
import { REPO_ROOT, BENCH_ROOT } from './utils.mjs';
59+
60+
export async function runBenchmark({ force = false, reuse = false } = {}) {
61+
// Use config constants directly; no local re-assignment
62+
63+
await ensureDir(BENCH_ROOT);
64+
65+
const CONTROL_DIRS = {
66+
repo: join(BENCH_ROOT, 'ember-source-control'),
67+
app: join(BENCH_ROOT, 'control'),
68+
};
69+
const EXPERIMENT_DIRS = {
70+
app: join(BENCH_ROOT, 'experiment'),
71+
repo: REPO_ROOT,
72+
};
73+
74+
const controlUrl = `http://127.0.0.1:${DEFAULT_CONTROL_PORT}`;
75+
const experimentUrl = `http://127.0.0.1:${DEFAULT_EXPERIMENT_PORT}`;
76+
const markersString = buildMarkersString(DEFAULT_MARKERS);
77+
78+
if (force) {
79+
await killPortProcess([DEFAULT_CONTROL_PORT, DEFAULT_EXPERIMENT_PORT]);
80+
await remove(CONTROL_DIRS.repo);
81+
await remove(CONTROL_DIRS.app);
82+
await remove(EXPERIMENT_DIRS.app);
83+
}
84+
85+
await ensureDir(BENCH_ROOT);
86+
await ensureDir(EXPERIMENT_DIRS.app);
87+
await ensureDir(CONTROL_DIRS.app);
88+
89+
const controlTarball = await getOrBuildControlTarball({
90+
repoRoot: REPO_ROOT,
91+
controlRepoDir: CONTROL_DIRS.repo,
92+
controlBranchName: DEFAULT_CONTROL_BRANCH_NAME,
93+
});
94+
95+
const experimentTarball = await buildExperimentTarball({
96+
repoDir: EXPERIMENT_DIRS.repo,
97+
reuse,
98+
});
99+
100+
const experimentAppSource = join(REPO_ROOT, 'smoke-tests/benchmark-app');
101+
const controlAppSource = DEFAULT_CONTROL_APP_FROM_MAIN
102+
? join(CONTROL_DIRS.repo, 'smoke-tests/benchmark-app')
103+
: experimentAppSource;
104+
105+
await Promise.all([
106+
prepareApp({
107+
sourceAppDir: controlAppSource,
108+
destAppDir: CONTROL_DIRS.app,
109+
emberSourceTarball: controlTarball,
110+
reuse,
111+
}),
112+
prepareApp({
113+
sourceAppDir: experimentAppSource,
114+
destAppDir: EXPERIMENT_DIRS.app,
115+
emberSourceTarball: experimentTarball,
116+
reuse,
117+
}),
118+
]);
119+
120+
// These will error if the parts are occupied (--strict-port)
121+
startVitePreview({ appDir: CONTROL_DIRS.app, port: DEFAULT_CONTROL_PORT });
122+
startVitePreview({
123+
appDir: EXPERIMENT_DIRS.app,
124+
port: DEFAULT_EXPERIMENT_PORT,
125+
});
126+
127+
async function cleanup() {
128+
console.log(`\n\tCleaning up servers...`);
129+
130+
await killPortProcess([DEFAULT_CONTROL_PORT, DEFAULT_EXPERIMENT_PORT]);
131+
}
132+
133+
process.on('exit', cleanup);
134+
process.on('SIGINT', () => {
135+
cleanup();
136+
// eslint-disable-next-line n/no-process-exit
137+
process.exit(1);
138+
});
139+
140+
// give servers a moment to start
141+
await sleep(5000);
142+
143+
/**
144+
* We need to make sure both servers are running before starting the benchmark.
145+
*/
146+
let controlLsof = await lsof(DEFAULT_CONTROL_PORT);
147+
let experimentLsof = await lsof(DEFAULT_EXPERIMENT_PORT);
148+
149+
if (!controlLsof || !experimentLsof) {
150+
throw new Error(
151+
`One of the servers failed to start. Control server lsof:\n${controlLsof}\n\nExperiment server lsof:\n${experimentLsof}`
152+
);
153+
}
154+
155+
const tracerbenchBin = join(REPO_ROOT, 'node_modules/tracerbench/bin/run');
156+
157+
const args = [
158+
'--single-threaded-gc',
159+
tracerbenchBin,
160+
'compare',
161+
'--regressionThreshold',
162+
DEFAULT_REGRESSION_THRESHOLD,
163+
'--sampleTimeout',
164+
DEFAULT_SAMPLE_TIMEOUT,
165+
'--fidelity',
166+
DEFAULT_FIDELITY,
167+
'--controlURL',
168+
controlUrl,
169+
'--experimentURL',
170+
experimentUrl,
171+
'--report',
172+
'--headless',
173+
'--cpuThrottleRate',
174+
DEFAULT_THROTTLE,
175+
'--markers',
176+
markersString,
177+
'--debug',
178+
'--browserArgs',
179+
`"--incognito,--disable-gpu,--mute-audio,--log-level=3,--headless=new"`,
180+
];
181+
182+
const output = await run('node', args, { cwd: EXPERIMENT_DIRS.app });
183+
const msgFile = join(BENCH_ROOT, 'msg.txt');
184+
185+
if (!process.env.CI) {
186+
await writeFile(
187+
msgFile,
188+
output.stdout.split('Benchmark Results Summary').pop() ?? output.stdout,
189+
'utf8'
190+
);
191+
}
192+
193+
await cleanup();
194+
195+
return {
196+
benchRoot: BENCH_ROOT,
197+
msgFile,
198+
controlUrl,
199+
experimentUrl,
200+
};
201+
}

0 commit comments

Comments
 (0)