Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion benchmark/http/headers.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ function main({ len, n, duration }) {
'Transfer-Encoding': 'chunked',
};

const Is = [...Array(n / len).keys()];
const Is = [...Array(parseInt(n / len)).keys()];
const Js = [...Array(len).keys()];

for (const i of Is) {
Expand Down
21 changes: 14 additions & 7 deletions benchmark/test_runner/global-concurrent-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,20 @@
const common = require('../common');
const { it } = require('node:test');

const bench = common.createBenchmark(main, {
n: [100, 1000, 1e4],
type: ['sync', 'async'],
}, {
// We don't want to test the reporter here
flags: ['--test-reporter=./benchmark/fixtures/empty-test-reporter.js'],
});
const bench = common.createBenchmark(
main,
{
n: [100, 1000, 1e4],
type: ['sync', 'async'],
},
{
// We don't want to test the reporter here
flags: [
'--test-reporter=./benchmark/fixtures/empty-test-reporter.js',
'--test-reporter-destination=stdout',
],
},
);

async function run(n, type) {
const promises = new Array(n);
Expand Down
22 changes: 14 additions & 8 deletions benchmark/test_runner/global-sequential-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,20 @@
const common = require('../common');
const { it } = require('node:test');


const bench = common.createBenchmark(main, {
n: [100, 1000, 1e4],
type: ['sync', 'async'],
}, {
// We don't want to test the reporter here
flags: ['--test-reporter=./benchmark/fixtures/empty-test-reporter.js'],
});
const bench = common.createBenchmark(
main,
{
n: [100, 1000, 1e4],
type: ['sync', 'async'],
},
{
// We don't want to test the reporter here
flags: [
'--test-reporter=./benchmark/fixtures/empty-test-reporter.js',
'--test-reporter-destination=stdout',
],
},
);

async function run(n, type) {
// eslint-disable-next-line no-unused-vars
Expand Down
21 changes: 14 additions & 7 deletions benchmark/test_runner/mock-fn.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,20 @@ const common = require('../common');
const assert = require('node:assert');
const { test } = require('node:test');

const bench = common.createBenchmark(main, {
n: [1e6],
mode: ['define', 'execute'],
}, {
// We don't want to test the reporter here
flags: ['--test-reporter=./benchmark/fixtures/empty-test-reporter.js'],
});
const bench = common.createBenchmark(
main,
{
n: [1e6],
mode: ['define', 'execute'],
},
{
// We don't want to test the reporter here
flags: [
'--test-reporter=./benchmark/fixtures/empty-test-reporter.js',
'--test-reporter-destination=stdout',
],
},
);

const noop = () => {};

Expand Down
21 changes: 14 additions & 7 deletions benchmark/test_runner/run-single-test-file.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,20 @@ function setup(numberOfTestFiles) {
* Specifically, it compares the performance of running tests in the
* same process versus creating multiple processes.
*/
const bench = common.createBenchmark(main, {
numberOfTestFiles: [1, 10, 100],
isolation: ['none', 'process'],
}, {
// We don't want to test the reporter here
flags: ['--test-reporter=./benchmark/fixtures/empty-test-reporter.js'],
});
const bench = common.createBenchmark(
main,
{
numberOfTestFiles: [1, 10, 100],
isolation: ['none', 'process'],
},
{
// We don't want to test the reporter here
flags: [
'--test-reporter=./benchmark/fixtures/empty-test-reporter.js',
'--test-reporter-destination=stdout',
],
},
);

async function runBenchmark({ numberOfTestFiles, isolation }) {
const dirPath = getTestDirPath(numberOfTestFiles);
Expand Down
25 changes: 16 additions & 9 deletions benchmark/test_runner/suite-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,22 @@ const reporter = require('../fixtures/empty-test-reporter');

const { describe, it } = require('node:test');

const bench = common.createBenchmark(main, {
numberOfSuites: [10, 100],
testsPerSuite: [10, 100, 1000],
testType: ['sync', 'async'],
concurrency: ['yes', 'no'],
}, {
// We don't want to test the reporter here
flags: ['--test-reporter=./benchmark/fixtures/empty-test-reporter.js'],
});
const bench = common.createBenchmark(
main,
{
numberOfSuites: [10, 100],
testsPerSuite: [10, 100, 1000],
testType: ['sync', 'async'],
concurrency: ['yes', 'no'],
},
{
// We don't want to test the reporter here
flags: [
'--test-reporter=./benchmark/fixtures/empty-test-reporter.js',
'--test-reporter-destination=stdout',
],
},
);

async function run({ numberOfSuites, testsPerSuite, testType, concurrency }) {
concurrency = concurrency === 'yes';
Expand Down
57 changes: 57 additions & 0 deletions doc/contributing/writing-and-running-benchmarks.md
Original file line number Diff line number Diff line change
Expand Up @@ -700,6 +700,63 @@ Supported options keys are:
* `benchmarker` - benchmarker to use, defaults to the first available http
benchmarker

### Creating Benchmark Tests

It is recommended to create a new test file when a new benchmark is introduced
so it can be easily made creating the new test file in `test/benchmark`.

When calling the `runBenchmark`, provide the benchmark group name
(which is the folder name in the `benchmark/` folder) as the first parameter,
and optionally pass environment variables as the second parameter.

```js
'use strict';

require('../common'); // Import the common module - required for all benchmark files

const runBenchmark = require('../common/benchmark');

runBenchmark('buffers', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 });
```

The environment variable `NODEJS_BENCHMARK_ZERO_ALLOWED` is required
when tests execute so quickly that they may produce errors or inconsistent results.
Setting this variable instructs the benchmark to disregard such issues.

Test execution behavior depends on the `NODE_RUN_ALL_BENCH_TESTS` environment variable.
When set to **true**, benchmarks run with minimal iterations (`n=1`, `rounds=1`).
This approach bypasses performance analysis to verify that tests can complete without failures.
Despite the minimal iterations, execution remains time-consuming
as all configurations must be tested.

When `NODE_RUN_ALL_BENCH_TESTS` is not set,
only a single configuration per benchmark executes.
While this dramatically reduces execution time, it provides limited coverage
and cannot guarantee that all configurations function properly.

This PR introduces the usage of a new environment variable `NODE_RUN_ALL_BENCH_TESTS`,
which can be set to run all benchmark configurations in tests to cover more scenarios where benchmarks might fail.
This PR also documents how to write benchmark tests and provides more details about the environment variables:

* NODE\_RUN\_ALL\_BENCH\_TESTS
* NODEJS\_BENCHMARK\_ZERO\_ALLOWED

Benchmark tests were added for the following groups:

* abort\_controller
* error
* https
* perf\_hooks
* permission
* sqlite
* test\_runner
* websocket

Additionally, some inconsistent test files were renamed:

test/benchmark/test-benchmark-async-hooks.js → test/benchmark/test-benchmark-async\_hooks.js
test/benchmark/test-benchmark-child-process.js → test/benchmark/test-benchmark-child\_process.js

[autocannon]: https://github.com/mcollina/autocannon
[benchmark-ci]: https://github.com/nodejs/benchmarking/blob/HEAD/docs/core_benchmarks.md
[git-for-windows]: https://git-scm.com/download/win
Expand Down
10 changes: 10 additions & 0 deletions test/benchmark/test-benchmark-abort_controller.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
'use strict';

require('../common');

// Minimal test for assert benchmarks. This makes sure the benchmarks aren't
// completely broken but nothing more than that.

const runBenchmark = require('../common/benchmark');

runBenchmark('abort_controller');
7 changes: 7 additions & 0 deletions test/benchmark/test-benchmark-error.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
'use strict';

require('../common');

const runBenchmark = require('../common/benchmark');

runBenchmark('error');
14 changes: 14 additions & 0 deletions test/benchmark/test-benchmark-https.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
'use strict';

const common = require('../common');

if (!common.enoughTestMem)
common.skip('Insufficient memory for HTTPS benchmark test');

// Because the http benchmarks use hardcoded ports, this should be in sequential
// rather than parallel to make sure it does not conflict with tests that choose
// random available ports.

const runBenchmark = require('../common/benchmark');

runBenchmark('https');
7 changes: 7 additions & 0 deletions test/benchmark/test-benchmark-perf_hooks.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
'use strict';

require('../common');

const runBenchmark = require('../common/benchmark');

runBenchmark('perf_hooks');
7 changes: 7 additions & 0 deletions test/benchmark/test-benchmark-permission.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
'use strict';

require('../common');

const runBenchmark = require('../common/benchmark');

runBenchmark('permission');
7 changes: 7 additions & 0 deletions test/benchmark/test-benchmark-sqlite.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
'use strict';

require('../common');

const runBenchmark = require('../common/benchmark');

runBenchmark('sqlite');
7 changes: 7 additions & 0 deletions test/benchmark/test-benchmark-test_runner.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
'use strict';

require('../common');

const runBenchmark = require('../common/benchmark');

runBenchmark('test_runner', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 });
9 changes: 9 additions & 0 deletions test/benchmark/test-benchmark-webstorage.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
'use strict';

const common = require('../common');
if (!common.enoughTestMem)
common.skip('Insufficient memory for Websocket benchmark test');

const runBenchmark = require('../common/benchmark');

runBenchmark('websocket', { NODEJS_BENCHMARK_ZERO_ALLOWED: 1 });
10 changes: 6 additions & 4 deletions test/common/benchmark.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const path = require('path');
const runjs = path.join(__dirname, '..', '..', 'benchmark', 'run.js');

function runBenchmark(name, env) {
const argv = ['test'];
const argv = process.env.NODE_RUN_ALL_BENCH_TESTS ? ['--set', 'n=1', '--set', 'roundtrips=1'] : ['test'];

argv.push(name);

Expand All @@ -28,16 +28,18 @@ function runBenchmark(name, env) {
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);

// This bit makes sure that each benchmark file is being sent settings such
// that the benchmark file runs just one set of options. This helps keep the
// If NODE_RUN_ALL_BENCH_TESTS is passed it means all the configs need to be run
if (process.env.NODE_RUN_ALL_BENCH_TESTS) return;

// If NODE_RUN_ALL_BENCH_TESTS isn't passed this bit makes sure that each benchmark file
// is being sent settings such that the benchmark file runs just one set of options. This helps keep the
// benchmark tests from taking a long time to run. Therefore, stdout should be composed as follows:
// The first and last lines should be empty.
// Each test should be separated by a blank line.
// The first line of each test should contain the test's name.
// The second line of each test should contain the configuration for the test.
// If the test configuration is not a group, there should be exactly two lines.
// Otherwise, it is possible to have more than two lines.

const splitTests = stdout.split(/\n\s*\n/);

for (let testIdx = 1; testIdx < splitTests.length - 1; testIdx++) {
Expand Down
Loading