Skip to content

Commit 1f20f5a

Browse files
committed
feat: add setWorkers() method chain, benchmarks.js, updated docs
- Add setWorkers(n) to turbo executor for customizing worker count - Add benchmarks.js for reproducible performance testing (10 runs avg) - Update README with benchmark results (Bun 1.83x speedup, Node non-blocking) - Show blocking vs non-blocking status for each mode - Add tests for setWorkers() validation
1 parent 449786f commit 1f20f5a

File tree

4 files changed

+275
-0
lines changed

4 files changed

+275
-0
lines changed

README.md

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -503,6 +503,68 @@ const stream = beeThreads
503503

504504
---
505505

506+
## Benchmarks
507+
508+
Run the benchmark yourself:
509+
510+
```bash
511+
bun benchmarks.js # Bun
512+
node benchmarks.js # Node
513+
```
514+
515+
### Results (1M items, heavy function, 12 CPUs, 10 runs avg)
516+
517+
**Bun** - Real parallel speedup:
518+
519+
| Mode | Time (±std) | Speedup | Main Thread |
520+
|------|-------------|---------|-------------|
521+
| main | 285±5ms | 1.00x | ❌ blocked |
522+
| bee | 1138±51ms | 0.25x | ✅ free |
523+
| turbo(4) | 255±7ms | 1.12x | ✅ free |
524+
| turbo(8) | 180±8ms | **1.58x** | ✅ free |
525+
| **turbo(12)** | **156±12ms** | **1.83x** | ✅ free |
526+
| turbo(16) | 204±28ms | 1.40x | ✅ free |
527+
528+
**Node** - Non-blocking I/O (slower, but frees main thread):
529+
530+
| Mode | Time (±std) | Speedup | Main Thread |
531+
|------|-------------|---------|-------------|
532+
| main | 368±13ms | 1.00x | ❌ blocked |
533+
| bee | 5569±203ms | 0.07x | ✅ free |
534+
| turbo(4) | 1793±85ms | 0.21x | ✅ free |
535+
| turbo(8) | 1052±22ms | 0.35x | ✅ free |
536+
| **turbo(12)** | **1017±57ms** | **0.36x** | ✅ free |
537+
| turbo(16) | 1099±98ms | 0.34x | ✅ free |
538+
539+
### Key Insights
540+
541+
- **Bun + turbo(cpus)**: Up to **1.83x faster** than main thread
542+
- **bee/turbo**: Non-blocking - main thread stays **free for HTTP/I/O**
543+
- **Node + turbo**: Slower, but useful for keeping servers responsive
544+
- **bee vs turbo**: turbo is **7x faster** than bee for large arrays
545+
- **Default workers**: `cpus - 1` (safe for all systems)
546+
547+
### Customize Workers
548+
549+
```js
550+
// Method chain
551+
await beeThreads.turbo(data).setWorkers(12).map(fn)
552+
553+
// Or via options
554+
await beeThreads.turbo(data, { workers: 12 }).map(fn)
555+
```
556+
557+
### When to Use
558+
559+
| Scenario | Recommendation |
560+
|----------|----------------|
561+
| Bun + heavy function | `turbo(cpus)` → real speedup |
562+
| Node + HTTP server | `turbo()` → non-blocking I/O |
563+
| Light function (`x*x`) | Main thread → overhead not worth it |
564+
| CLI/batch processing | `turbo(cpus + 4)` → max throughput |
565+
566+
---
567+
506568
## Why bee-threads?
507569

508570
- **Zero dependencies** - Lightweight and secure

benchmarks.js

Lines changed: 174 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,174 @@
1+
/**
2+
* 🐝 bee-threads Benchmark Suite
3+
*
4+
* Run with:
5+
* bun benchmarks.js
6+
* node benchmarks.js
7+
*
8+
* Compares: main thread vs turbo
9+
* Measures: execution time, CPU usage
10+
*/
11+
12+
const os = require('os');
13+
const { bee, beeThreads } = require('./dist/index.js');
14+
15+
const cpus = os.cpus().length;
16+
const runtime = typeof Bun !== 'undefined' ? 'Bun' : 'Node';
17+
18+
// Config - Adjust based on your system
19+
const SIZE = 1_000_000;
20+
const RUNS = 10; // Number of runs for averaging
21+
22+
// Heavy function (CPU intensive)
23+
const heavyFn = (x) => {
24+
let v = x;
25+
for (let i = 0; i < 10; i++) {
26+
v = Math.sqrt(Math.abs(Math.sin(v) * 1000));
27+
}
28+
return v;
29+
};
30+
31+
// CPU usage measurement
32+
function getCpuUsage() {
33+
const usage = process.cpuUsage();
34+
return {
35+
user: usage.user / 1000,
36+
system: usage.system / 1000
37+
};
38+
}
39+
40+
async function benchmark(name, fn) {
41+
const times = [];
42+
const cpuTimes = [];
43+
44+
// Warmup run
45+
await fn();
46+
47+
// Measured runs
48+
for (let run = 0; run < RUNS; run++) {
49+
const cpuStart = getCpuUsage();
50+
const start = performance.now();
51+
52+
await fn();
53+
54+
const elapsed = performance.now() - start;
55+
const cpuEnd = getCpuUsage();
56+
const cpuUsed = (cpuEnd.user - cpuStart.user) + (cpuEnd.system - cpuStart.system);
57+
58+
times.push(elapsed);
59+
cpuTimes.push(cpuUsed);
60+
}
61+
62+
// Calculate stats
63+
const avg = arr => arr.reduce((a, b) => a + b, 0) / arr.length;
64+
const stdDev = arr => {
65+
const mean = avg(arr);
66+
return Math.sqrt(arr.reduce((sum, x) => sum + (x - mean) ** 2, 0) / arr.length);
67+
};
68+
69+
const ms = avg(times);
70+
const msStd = stdDev(times);
71+
const cpu = avg(cpuTimes);
72+
// CPU usage as percentage: (total CPU time / elapsed time) * 100
73+
// >100% means multiple cores were used
74+
const cpuUsage = (cpu / ms) * 100;
75+
76+
return { name, ms, msStd, cpu, cpuUsage };
77+
}
78+
79+
async function main() {
80+
console.log(`
81+
╔═══════════════════════════════════════════════════════════════╗
82+
║ 🐝 bee-threads Benchmark Suite ║
83+
╠═══════════════════════════════════════════════════════════════╣
84+
║ Runtime: ${runtime.padEnd(10)} │ CPUs: ${String(cpus).padEnd(4)} │ Array: ${(SIZE/1e6).toFixed(1)}M items ║
85+
║ Function: Heavy (Math.sqrt + Math.sin × 10 iterations) ║
86+
╚═══════════════════════════════════════════════════════════════╝
87+
`);
88+
89+
const arr = new Array(SIZE).fill(0).map((_, i) => i);
90+
const results = [];
91+
92+
// 1) Main thread
93+
console.log('⏳ Testing main thread...');
94+
results.push(await benchmark('main', () => {
95+
arr.map(heavyFn);
96+
}));
97+
98+
// 2) bee() - single worker
99+
console.log('⏳ Testing bee()...');
100+
try {
101+
const beeResult = await Promise.race([
102+
benchmark('bee', async () => {
103+
await bee((data) => {
104+
return data.map(x => {
105+
let v = x;
106+
for (let i = 0; i < 10; i++) v = Math.sqrt(Math.abs(Math.sin(v) * 1000));
107+
return v;
108+
});
109+
})(arr);
110+
}),
111+
new Promise((_, rej) => setTimeout(() => rej(new Error('timeout')), 120000))
112+
]);
113+
results.push(beeResult);
114+
} catch (e) {
115+
console.log(' ⚠️ bee() timed out or failed');
116+
results.push({ name: 'bee', ms: Infinity, msStd: 0, cpu: 0, cpuUsage: 0 });
117+
}
118+
119+
// 3) turbo with different worker counts
120+
const workerConfigs = [4, 8, cpus];
121+
if (cpus > 8) workerConfigs.push(cpus + 4);
122+
for (const workers of workerConfigs) {
123+
console.log(`⏳ Testing turbo(${workers})...`);
124+
try {
125+
const result = await Promise.race([
126+
benchmark(`turbo(${workers})`, async () => {
127+
await beeThreads.turbo(arr, { workers, force: true }).map(heavyFn);
128+
}),
129+
new Promise((_, rej) => setTimeout(() => rej(new Error('timeout')), 60000))
130+
]);
131+
results.push(result);
132+
} catch (e) {
133+
console.log(` ⚠️ turbo(${workers}) timed out`);
134+
results.push({ name: `turbo(${workers})`, ms: Infinity, msStd: 0, cpu: 0, cpuUsage: 0 });
135+
}
136+
}
137+
138+
// Print results
139+
const mainMs = results[0].ms;
140+
141+
console.log(`
142+
┌─────────────┬────────────────┬─────────┬─────────────┐
143+
│ Mode │ Time (±std) │ vs Main │ Main Thread │
144+
├─────────────┼────────────────┼─────────┼─────────────┤`);
145+
146+
for (const r of results) {
147+
const speedup = (mainMs / r.ms).toFixed(2);
148+
const marker = parseFloat(speedup) >= 1 ? '✅' : ' ';
149+
const timeStr = `${r.ms.toFixed(0)}±${r.msStd.toFixed(0)}ms`;
150+
const blocking = r.name === 'main' ? '❌ blocked' : '✅ free';
151+
console.log(`│ ${r.name.padEnd(11)}${timeStr.padStart(14)}${speedup.padStart(5)}x ${marker}${blocking.padEnd(11)} │`);
152+
}
153+
154+
console.log(`└─────────────┴────────────────┴─────────┴─────────────┘`);
155+
console.log(`\n 📈 Stats: ${RUNS} runs per config (+ 1 warmup)`);
156+
157+
// Summary
158+
const best = results.slice(1).reduce((a, b) => a.ms < b.ms ? a : b);
159+
const bestSpeedup = (mainMs / best.ms).toFixed(2);
160+
161+
console.log(`
162+
📊 Summary:
163+
• Best turbo config: ${best.name} (${bestSpeedup}x vs main)
164+
• Recommended: turbo(${cpus}) for this system
165+
166+
💡 Customize workers:
167+
beeThreads.turbo(arr).setWorkers(${cpus}).map(fn)
168+
`);
169+
170+
await beeThreads.shutdown();
171+
}
172+
173+
main().catch(console.error);
174+

src/turbo.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,8 @@ function isTypedArray(value: unknown): value is NumericTypedArray {
106106
// ============================================================================
107107

108108
export interface TurboExecutor<TItem> {
109+
/** Set the number of workers to use. Returns a new executor. */
110+
setWorkers(count: number): TurboExecutor<TItem>;
109111
map<TResult>(fn: (item: TItem, index: number) => TResult): Promise<TResult[]>;
110112
mapWithStats<TResult>(fn: (item: TItem, index: number) => TResult): Promise<TurboResult<TResult>>;
111113
filter(fn: (item: TItem, index: number) => boolean): Promise<TItem[]>;
@@ -132,6 +134,13 @@ export function createTurboExecutor<TItem>(
132134
): TurboExecutor<TItem> {
133135
// V8: Monomorphic object shape - all methods declared upfront
134136
const executor: TurboExecutor<TItem> = {
137+
setWorkers(count: number): TurboExecutor<TItem> {
138+
if (!Number.isInteger(count) || count < 1) {
139+
throw new TypeError('setWorkers() requires a positive integer');
140+
}
141+
return createTurboExecutor<TItem>(data, { ...options, workers: count });
142+
},
143+
135144
map<TResult>(fn: (item: TItem, index: number) => TResult): Promise<TResult[]> {
136145
const fnString = fn.toString();
137146
return executeTurboMap<TResult>(fnString, data as unknown[], options);
@@ -897,6 +906,13 @@ export function createMaxExecutor<TItem>(
897906
): TurboExecutor<TItem> {
898907
// V8: Monomorphic object shape
899908
const executor: TurboExecutor<TItem> = {
909+
setWorkers(count: number): TurboExecutor<TItem> {
910+
if (!Number.isInteger(count) || count < 1) {
911+
throw new TypeError('setWorkers() requires a positive integer');
912+
}
913+
return createMaxExecutor<TItem>(data, { ...options, workers: count });
914+
},
915+
900916
map<TResult>(fn: (item: TItem, index: number) => TResult): Promise<TResult[]> {
901917
const fnString = fn.toString();
902918
return executeMaxMap<TResult>(fnString, data as unknown[], options);

test.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3246,6 +3246,29 @@ async function runTests(): Promise<void> {
32463246
assert.deepStrictEqual(result, data, 'Order must be preserved');
32473247
});
32483248

3249+
await test('turbo().setWorkers(n) customizes worker count', async () => {
3250+
const data = [1, 2, 3, 4, 5];
3251+
const result = await beeThreads.turbo(data, { force: true }).setWorkers(2).map((x: number) => x * 2);
3252+
3253+
assert.deepStrictEqual(result, [2, 4, 6, 8, 10], 'setWorkers should work');
3254+
});
3255+
3256+
await test('turbo().setWorkers() throws for invalid input', () => {
3257+
const data = [1, 2, 3];
3258+
3259+
assert.throws(() => {
3260+
beeThreads.turbo(data).setWorkers(0);
3261+
}, TypeError);
3262+
3263+
assert.throws(() => {
3264+
beeThreads.turbo(data).setWorkers(-1);
3265+
}, TypeError);
3266+
3267+
assert.throws(() => {
3268+
beeThreads.turbo(data).setWorkers(1.5);
3269+
}, TypeError);
3270+
});
3271+
32493272
// ---------- TURBO BENCHMARKS ----------
32503273
section('Turbo Mode - Benchmarks');
32513274

0 commit comments

Comments
 (0)