Skip to content

Commit 16c7f55

Browse files
committed
adding in benchmarks
1 parent dd450e8 commit 16c7f55

File tree

4 files changed

+96
-24
lines changed

4 files changed

+96
-24
lines changed

packages/benchmark/memory-lru.ts

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
import { createBenchmark, getModuleName, printToConsole, generateAlphaNumeric } from "index.js";
2+
import { CacheableMemory } from "cacheable";
3+
import QuickLRU from 'quick-lru';
4+
import { createLRU } from 'lru.min';
5+
6+
const bench = createBenchmark("Memory LRU Benchmark", 100000);
7+
8+
// Cacheable Memory
9+
const cacheable = new CacheableMemory({lruSize: 80000 });
10+
let cacheableName = getModuleName("Cacheable Memory", "1.10.0");
11+
12+
// QuickLRU
13+
const quickLRU = new QuickLRU({maxSize: 80000});
14+
let quickLRUName = getModuleName("quick-lru");
15+
16+
// lru.min
17+
const lruMin = createLRU({ max: 80000 });
18+
let lruMinName = getModuleName("lru.min");
19+
20+
// Map
21+
const map = new Map<string, string>();
22+
let mapName = getModuleName("Map", "22");
23+
24+
bench.add(`${cacheableName} - set / get`, async () => {
25+
const alphaNumericData = generateAlphaNumeric();
26+
cacheable.set(alphaNumericData.key, alphaNumericData.value);
27+
cacheable.get(alphaNumericData.key);
28+
});
29+
30+
bench.add(`${quickLRUName} - set / get`, async () => {
31+
const alphaNumericData = generateAlphaNumeric();
32+
quickLRU.set(alphaNumericData.key, alphaNumericData.value);
33+
quickLRU.get(alphaNumericData.key);
34+
});
35+
36+
bench.add(`${lruMinName} - set / get`, async () => {
37+
const alphaNumericData = generateAlphaNumeric();
38+
lruMin.set(alphaNumericData.key, alphaNumericData.value);
39+
lruMin.get(alphaNumericData.key);
40+
});
41+
42+
bench.add(`${mapName} - set / get`, async () => {
43+
const alphaNumericData = generateAlphaNumeric();
44+
map.set(alphaNumericData.key, alphaNumericData.value);
45+
map.get(alphaNumericData.key);
46+
});
47+
48+
await bench.run();
49+
50+
console.log(`*${bench.name} Results:*`);
51+
printToConsole(bench);

packages/benchmark/memory.ts

Lines changed: 27 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,30 @@
11
import { createBenchmark, getModuleName, printToConsole, generateAlphaNumeric } from "index.js";
22
import { CacheableMemory } from "cacheable";
3-
import QuickLRU from 'quick-lru';
4-
import { createLRU } from 'lru.min';
3+
import NodeCache from 'node-cache';
4+
import { BentoCache, bentostore } from 'bentocache';
5+
import { memoryDriver } from 'bentocache/drivers/memory';
56

67
const bench = createBenchmark("Memory Benchmark", 100000);
78

89
// Cacheable Memory
910
const cacheable = new CacheableMemory();
10-
let cacheableName = getModuleName("Cacheable Memory", "1.9.0");
11-
12-
// QuickLRU
13-
const quickLRU = new QuickLRU({maxSize: 80000});
14-
let quickLRUName = getModuleName("quick-lru");
15-
16-
// lru.min
17-
const lruMin = createLRU({ max: 80000 });
18-
let lruMinName = getModuleName("lru.min");
11+
let cacheableName = getModuleName("Cacheable Memory", "1.10.0");
12+
13+
// Node Cache
14+
const nodeCache = new NodeCache();
15+
let nodeCacheName = getModuleName("Node Cache");
16+
17+
// BentoCache with Memory Driver
18+
const bento = new BentoCache({
19+
default: 'myCache',
20+
stores: {
21+
// A first cache store named "myCache" using
22+
// only L1 in-memory cache
23+
myCache: bentostore()
24+
.useL1Layer(memoryDriver({ maxSize: '10mb' }))
25+
}
26+
});
27+
let bentoName = getModuleName("BentoCache");
1928

2029
// Map
2130
const map = new Map<string, string>();
@@ -27,16 +36,16 @@ bench.add(`${cacheableName} - set / get`, async () => {
2736
cacheable.get(alphaNumericData.key);
2837
});
2938

30-
bench.add(`${quickLRUName} - set / get`, async () => {
39+
bench.add(`${nodeCacheName} - set / get`, async () => {
3140
const alphaNumericData = generateAlphaNumeric();
32-
quickLRU.set(alphaNumericData.key, alphaNumericData.value);
33-
quickLRU.get(alphaNumericData.key);
41+
nodeCache.set(alphaNumericData.key, alphaNumericData.value);
42+
nodeCache.get(alphaNumericData.key);
3443
});
3544

36-
bench.add(`${lruMinName} - set / get`, async () => {
45+
bench.add(`${bentoName} - set / get`, async () => {
3746
const alphaNumericData = generateAlphaNumeric();
38-
lruMin.set(alphaNumericData.key, alphaNumericData.value);
39-
lruMin.get(alphaNumericData.key);
47+
await bento.set({ key: alphaNumericData.key, value: alphaNumericData.value});
48+
await bento.get({ key: alphaNumericData.key});
4049
});
4150

4251
bench.add(`${mapName} - set / get`, async () => {
@@ -47,4 +56,5 @@ bench.add(`${mapName} - set / get`, async () => {
4756

4857
await bench.run();
4958

59+
console.log(`*${bench.name} Results:*`);
5060
printToConsole(bench);

packages/benchmark/package.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,13 @@
3434
"prepublish": "pnpm build",
3535
"test": "echo 'no tests needed'",
3636
"test:ci": "echo 'no tests needed'",
37-
"benchmark:memory": "tsx ./memory.ts",
37+
"benchmark:memory": "tsx ./memory.ts && tsx ./memory-lru.ts",
3838
"clean": "rimraf ./dist ./coverage ./node_modules"
3939
},
4040
"dependencies": {
4141
"@faker-js/faker": "^9.8.0",
4242
"@monstermann/tinybench-pretty-printer": "^0.1.0",
43+
"bentocache": "^1.4.0",
4344
"cache-manager": "workspace:^",
4445
"cacheable": "workspace:^",
4546
"lru.min": "^1.1.2",
@@ -51,6 +52,7 @@
5152
"license"
5253
],
5354
"devDependencies": {
55+
"node-cache": "^5.1.2",
5456
"tsx": "^4.19.4"
5557
}
5658
}

packages/cacheable/README.md

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -471,12 +471,21 @@ NOTE: if you set the `lruSize` property to `0` after it was enabled the `hashSto
471471

472472
Our goal with `cacheable` and `CacheableMemory` is to provide a high performance caching engine that is simple to use and has a robust API. We test it against other cacheing engines such that are less feature rich to make sure there is little difference. Here are some of the benchmarks we have run:
473473

474-
| name | summary | ops/sec | time/op | margin | samples |
475-
|-----------------------------------------|:---------:|----------:|----------:|:--------:|----------:|
476-
| quick-lru (v7.0.1) - set / get | 🥇 | 126K | 8µs | ±0.80% | 120K |
477-
| Cacheable Memory (v1.9.0) - set / get | -1.2% | 125K | 8µs | ±0.65% | 119K |
478-
| Map (v22) - set / get | -1.4% | 125K | 9µs | ±1.39% | 117K |
479-
| lru.min (v1.1.2) - set / get | -1.4% | 125K | 8µs | ±0.88% | 118K |
474+
*Memory Benchmark Results:*
475+
| name | summary | ops/sec | time/op | margin | samples |
476+
|------------------------------------------|:---------:|----------:|----------:|:--------:|----------:|
477+
| Map (v22) - set / get | 🥇 | 127K | 8µs | ±0.93% | 120K |
478+
| Cacheable Memory (v1.10.0) - set / get | -1.3% | 125K | 8µs | ±1.17% | 118K |
479+
| Node Cache - set / get | -4.8% | 121K | 9µs | ±1.24% | 114K |
480+
| bentocache (v1.4.0) - set / get | -20% | 101K | 10µs | ±0.73% | 100K |
481+
482+
*Memory LRU Benchmark Results:*
483+
| name | summary | ops/sec | time/op | margin | samples |
484+
|------------------------------------------|:---------:|----------:|----------:|:--------:|----------:|
485+
| Map (v22) - set / get | 🥇 | 127K | 8µs | ±1.30% | 120K |
486+
| quick-lru (v7.0.1) - set / get | -0.3% | 126K | 8µs | ±0.87% | 120K |
487+
| lru.min (v1.1.2) - set / get | -1.4% | 125K | 8µs | ±0.82% | 119K |
488+
| Cacheable Memory (v1.10.0) - set / get | -5.5% | 120K | 9µs | ±0.83% | 113K |
480489

481490
## CacheableMemory Options
482491

0 commit comments

Comments
 (0)