Skip to content

Commit 958cd8f

Browse files
authored
Merge pull request #110 from alexdln/nic-107
nic-107 add layers support in widget and tools
2 parents bc1241b + fe826fb commit 958cd8f

File tree

19 files changed

+285
-116
lines changed

19 files changed

+285
-116
lines changed

examples/redis-cache/cache-handler.js

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,6 @@
22
// eslint-disable-next-line @typescript-eslint/no-require-imports
33
const { CacheHandler } = require("@nimpl/cache-redis/cache-handler");
44

5-
module.exports = new CacheHandler({ redisOptions: { connectionStrategy: "wait-exit" } });
5+
global.cacheHandler ||= new CacheHandler({ redisOptions: { connectionStrategy: "wait-exit" } });
6+
7+
module.exports = global.cacheHandler;

examples/redis-cache/src/app/page.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { ClientTime } from "./client-time";
44

55
export default async function Home() {
66
"use cache";
7-
cacheLife({ stale: 5, revalidate: 10, expire: 10000 });
7+
cacheLife({ stale: 30, revalidate: 60, expire: 300 });
88

99
const revalidateStateDate = new Date();
1010
await new Promise((resolve) => setTimeout(resolve, 5000));

packages/cache-redis/README.md

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,9 +130,11 @@ You can initialize the cache handler with custom configuration in an independent
130130
// cache-handlers/redis.js
131131
import { CacheHandler } from "@nimpl/cache-redis/cache-handler";
132132

133-
module.exports = new CacheHandler(/* Options */);
133+
global.cacheHandler ||= new CacheHandler(/* Options */);
134134
```
135135

136+
> **Note**: It is recommended to write to `global`, as otherwise the instance will be created differently for Next.js and for your independent use (for example, for cache-widget or your internal utilities). As a result, in-memory entries will be different, and will also be duplicated
137+
136138
```ts
137139
import { type NextConfig } from "next/types";
138140

@@ -206,6 +208,17 @@ Currently in Next.js background revalidation doesn't work correctly with dynamic
206208

207209
In serverless environments, the `CacheHandler` is initialized on each request, which makes the in-memory LRU cache layer less usable since it's reset between invocations. The cache handler will still function correctly but will primarily rely on Redis for caching in these environments.
208210

211+
## Examples
212+
213+
- **[Base Example](https://github.com/alexdln/nimpl-cache/tree/main/examples/redis-cache)**
214+
- Minimal Next.js example demonstrating redis cache handler and widget setup
215+
216+
- **[React Router Example](https://router-bsky.contection.dev/)** - [View source code](https://github.com/alexdln/contection/tree/main/examples/react-router-bsky)
217+
- Demonstrates cache widget integration with React Router 7 and redis cache handler
218+
219+
- **[Next.js Example](https://bsky.contection.dev/)** - [View source code](https://github.com/alexdln/contection/tree/main/examples/nextjs-bsky)
220+
- Shows cache widget usage in a Next.js cacheComponents application and redis cache handler
221+
209222
## License
210223

211224
[MIT](https://github.com/alexdln/nimpl-cache/blob/main/LICENSE)

packages/cache-redis/src/cache-handler.ts

Lines changed: 28 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,19 @@
1-
import { type Durations, type Logger, type Entry, type LogData, type Options } from "./types";
1+
import { type Durations, type Logger, type Entry, type LogData, type Options, type CacheEntry } from "./types";
22
import { logger as defaultLogger } from "./lib/logger";
33
import { RedisLayer } from "./layers/redis-layer";
44
import { LruLayer } from "./layers/lru-layer";
55
import { PendingsLayer } from "./layers/pendings-layer";
66
import { CacheError } from "./lib/error";
7+
import { calculateStreamSize } from "./lib/stream";
78

89
export class CacheHandler {
910
ephemeralLayer: LruLayer;
1011

1112
persistentLayer: RedisLayer;
1213

13-
private pendingGetsLayer = new PendingsLayer<Entry | undefined | null>();
14+
private pendingGetsLayer = new PendingsLayer<CacheEntry | undefined | null>();
1415

15-
private pendingSetsLayer = new PendingsLayer<Entry | undefined | null>();
16+
private pendingSetsLayer = new PendingsLayer<CacheEntry | undefined | null>();
1617

1718
private logger: Logger;
1819

@@ -34,24 +35,24 @@ export class CacheHandler {
3435
this.logger({ type, status, source, key, message });
3536
}
3637

37-
async get(key: string) {
38+
async getEntry(key: string): Promise<CacheEntry | undefined | null> {
3839
const pendingSet = await this.pendingSetsLayer.get(key);
3940
if (pendingSet === null) return undefined;
4041
if (pendingSet) {
4142
this.logOperation("GET", "REVALIDATED", "NEW", key);
42-
const [cacheStream, responseStream] = pendingSet.value.tee();
43-
pendingSet.value = cacheStream;
44-
return { ...pendingSet, value: responseStream };
43+
const [cacheStream, responseStream] = pendingSet.entry.value.tee();
44+
pendingSet.entry.value = cacheStream;
45+
return { entry: { ...pendingSet.entry, value: responseStream }, size: pendingSet.size, status: "valid" };
4546
}
4647

47-
const ephemeralCache = await this.ephemeralLayer.get(key);
48+
const ephemeralCache = await this.ephemeralLayer.getEntry(key);
4849
if (ephemeralCache) {
4950
if (ephemeralCache.status === "revalidate") {
5051
this.logOperation("GET", "REVALIDATING", "MEMORY", key);
5152
return undefined;
5253
}
5354
this.logOperation("GET", "HIT", "MEMORY", key);
54-
return ephemeralCache.entry;
55+
return ephemeralCache;
5556
}
5657

5758
const pendingGet = await this.pendingGetsLayer.get(key);
@@ -61,15 +62,15 @@ export class CacheHandler {
6162
}
6263
if (pendingGet) {
6364
this.logOperation("GET", "HIT", "REDIS", key);
64-
const [cacheStream, responseStream] = pendingGet.value.tee();
65-
pendingGet.value = cacheStream;
66-
return { ...pendingGet, value: responseStream };
65+
const [cacheStream, responseStream] = pendingGet.entry.value.tee();
66+
pendingGet.entry.value = cacheStream;
67+
return { entry: { ...pendingGet.entry, value: responseStream }, size: pendingGet.size, status: "valid" };
6768
}
6869

6970
const resolvePending = this.pendingGetsLayer.set(key);
7071

7172
try {
72-
const persistentCache = await this.persistentLayer.get(key);
73+
const persistentCache = await this.persistentLayer.getEntry(key);
7374

7475
if (persistentCache === null) {
7576
await this.persistentLayer.delete(key);
@@ -87,7 +88,7 @@ export class CacheHandler {
8788
return undefined;
8889
}
8990

90-
const { entry, status } = persistentCache;
91+
const { entry, size, status } = persistentCache;
9192
const [cacheStream, responseStream] = entry.value.tee();
9293
entry.value = cacheStream;
9394

@@ -99,9 +100,9 @@ export class CacheHandler {
99100
resolvePending(undefined);
100101
return undefined;
101102
}
102-
resolvePending(responseEntry);
103+
resolvePending({ entry: responseEntry, size, status: "valid" });
103104
this.logOperation("GET", "HIT", "REDIS", key);
104-
return responseEntry;
105+
return { entry: responseEntry, size, status: "valid" };
105106
} catch (error) {
106107
this.logOperation("GET", "ERROR", "REDIS", key, error instanceof Error ? error.message : undefined);
107108
resolvePending(null);
@@ -110,6 +111,11 @@ export class CacheHandler {
110111
}
111112
}
112113

114+
async get(key: string): Promise<Entry | undefined | null> {
115+
const cacheEntry = await this.getEntry(key);
116+
return cacheEntry ? cacheEntry.entry : undefined;
117+
}
118+
113119
async set(key: string, pendingEntry: Promise<Entry>) {
114120
const resolvePending = this.pendingSetsLayer.set(key);
115121

@@ -123,7 +129,10 @@ export class CacheHandler {
123129
try {
124130
await this.persistentLayer.set(key, { ...entry, value: cacheStreamPersistent });
125131

126-
resolvePending(entry);
132+
const [responseStreamSize, responseStreamMain] = responseStream.tee();
133+
entry.value = responseStreamMain;
134+
const size = await calculateStreamSize(responseStreamSize);
135+
resolvePending({ entry, size, status: "valid" });
127136
this.logOperation("SET", "REVALIDATED", "NEW", key);
128137
} catch (error) {
129138
resolvePending(undefined);
@@ -173,11 +182,11 @@ export class CacheHandler {
173182
return ephemeralReady && persistentReady;
174183
}
175184

176-
async keys(): Promise<{ ephemeralKeys: string[]; persistentKeys: string[] }> {
185+
async keys(): Promise<string[]> {
177186
const [ephemeralKeys, persistentKeys] = await Promise.all([
178187
this.ephemeralLayer.keys(),
179188
this.persistentLayer.keys(),
180189
]);
181-
return { ephemeralKeys, persistentKeys };
190+
return Array.from(new Set([...ephemeralKeys, ...persistentKeys]));
182191
}
183192
}

packages/cache-redis/src/layers/lru-layer.ts

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { LRUCache } from "lru-cache";
33
import { type Options, type Logger, type Durations, type Entry, type CacheEntry } from "../types";
44
import { DEFAULT_LRU_MAX_SIZE, DEFAULT_LRU_TTL } from "../lib/constants";
55
import { getCacheStatus, getUpdatedMetadata } from "../lib/helpers";
6+
import { calculateStreamSize } from "../lib/stream";
67

78
export class LruLayer {
89
private lruClient: LRUCache<string, CacheEntry, unknown>;
@@ -12,27 +13,32 @@ export class LruLayer {
1213
private lruTtl: number | "auto";
1314

1415
constructor(options: Options["lruOptions"], logger: Logger) {
15-
const { ttl, ...lruOptions } = options || {};
16-
this.lruTtl = (ttl ?? (process.env.LRU_TTL && parseInt(process.env.LRU_TTL)) ?? DEFAULT_LRU_TTL) || 0;
16+
const { ttl, maxSize, ...lruOptions } = options || {};
17+
const lruTtl = ttl ?? (process.env.LRU_TTL && parseInt(process.env.LRU_TTL)) ?? DEFAULT_LRU_TTL;
18+
if (typeof lruTtl === "number") {
19+
this.lruTtl = lruTtl;
20+
} else {
21+
this.lruTtl = "auto";
22+
}
23+
1724
this.logger = logger;
1825

1926
this.lruClient = new LRUCache<string, CacheEntry, unknown>({
2027
maxSize:
21-
options?.maxSize ||
22-
(process.env.LRU_MAX_SIZE && parseInt(process.env.LRU_MAX_SIZE)) ||
23-
DEFAULT_LRU_MAX_SIZE,
28+
maxSize || (process.env.LRU_MAX_SIZE && parseInt(process.env.LRU_MAX_SIZE)) || DEFAULT_LRU_MAX_SIZE,
2429
sizeCalculation: (entry) => entry.size,
2530
ttlAutopurge: true,
26-
...(lruOptions || {}),
31+
...lruOptions,
2732
});
2833
}
2934

3035
private calculateLruTtl(expire: number): number {
3136
return this.lruTtl === "auto" ? expire * 1000 : this.lruTtl * 1000;
3237
}
3338

34-
async get(key: string): Promise<CacheEntry | undefined | null> {
39+
async getEntry(key: string): Promise<CacheEntry | undefined | null> {
3540
const memoryEntry = this.lruClient.get(key);
41+
3642
if (!memoryEntry) return undefined;
3743

3844
const { entry, size } = memoryEntry;
@@ -52,14 +58,16 @@ export class LruLayer {
5258
};
5359
}
5460

61+
async get(key: string): Promise<Entry | undefined | null> {
62+
const cacheEntry = await this.getEntry(key);
63+
return cacheEntry && cacheEntry.status === "valid" ? cacheEntry.entry : undefined;
64+
}
65+
5566
async set(key: string, pendingEntry: Promise<Entry> | Entry) {
5667
const entry = await pendingEntry;
5768
const [cacheStream, responseStream] = entry.value.tee();
5869
entry.value = responseStream;
59-
let size = 0;
60-
for await (const chunk of cacheStream) {
61-
size += Buffer.byteLength(chunk);
62-
}
70+
const size = await calculateStreamSize(cacheStream);
6371
this.lruClient.set(
6472
key,
6573
{ entry, size: size || 1, status: "valid" },
@@ -87,10 +95,6 @@ export class LruLayer {
8795
}
8896

8997
async keys(): Promise<string[]> {
90-
const keys: string[] = [];
91-
this.lruClient.forEach((_, key) => {
92-
keys.push(key);
93-
});
94-
return keys;
98+
return Array.from(this.lruClient.keys());
9599
}
96100
}

packages/cache-redis/src/layers/redis-layer.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import {
1111
} from "../types";
1212
import { PREFIX_META } from "../lib/constants";
1313
import { getCacheKeys, getCacheStatus, getUpdatedMetadata } from "../lib/helpers";
14-
import { bufferToStream } from "../lib/stream";
14+
import { bufferToStream, streamToBuffer } from "../lib/stream";
1515
import { PendingsLayer } from "./pendings-layer";
1616
import { CacheConnectionError, CacheError } from "../lib/error";
1717

@@ -147,7 +147,7 @@ export class RedisLayer {
147147
return isConnected;
148148
}
149149

150-
async get(key: string): Promise<CacheEntry | undefined | null> {
150+
async getEntry(key: string): Promise<CacheEntry | undefined | null> {
151151
const connected = await this.connect();
152152
if (!connected) return undefined;
153153

@@ -192,20 +192,20 @@ export class RedisLayer {
192192
return cacheEntry;
193193
}
194194

195+
async get(key: string): Promise<Entry | undefined | null> {
196+
const cacheEntry = await this.getEntry(key);
197+
return cacheEntry && cacheEntry.status === "valid" ? cacheEntry.entry : undefined;
198+
}
199+
195200
async set(key: string, pendingEntry: Promise<Entry> | Entry) {
196201
const connected = await this.connect();
197202
if (!connected) return;
198203

199204
const entry = await pendingEntry;
200205
const { cacheKey, metaKey } = getCacheKeys(key, this.keyPrefix);
201206
const pipeline = this.redisClient.pipeline();
202-
const chunks: Uint8Array[] = [];
203-
204-
for await (const chunk of entry.value) {
205-
chunks.push(chunk);
206-
}
207207

208-
pipeline.set(cacheKey, Buffer.concat(chunks), "EX", entry.expire);
208+
pipeline.set(cacheKey, await streamToBuffer(entry.value), "EX", entry.expire);
209209
pipeline.set(
210210
metaKey,
211211
JSON.stringify({

packages/cache-redis/src/lib/stream.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,13 @@ export const bufferToStream = (buffer: Buffer): ReadableStream => {
2222
},
2323
});
2424
};
25+
26+
export const calculateStreamSize = async (
27+
stream: ReadableStream<Uint8Array> | WebReadableStream<Uint8Array>,
28+
): Promise<number> => {
29+
let size = 0;
30+
for await (const chunk of stream) {
31+
size += chunk.byteLength;
32+
}
33+
return size;
34+
};

packages/cache-tools/README.md

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,9 @@ Call `getCachedFeed` instead of the raw fetcher to reuse cached payloads across
5757
// app/api/cache-widget/route.ts (React Router)
5858
import { getCacheData } from "@/cache-handler";
5959

60-
export const loader = async ({ params }: { params: { id?: string } }) => {
61-
const data = await getCacheData(params.id ? [params.id] : undefined);
60+
export const loader = async ({ params }: { params: { "*"?: string } }) => {
61+
const segments = params["*"]?.split("/").filter(Boolean) ?? [];
62+
const data = await getCacheData(segments);
6263

6364
if (!data) return new Response("", { status: 404 });
6465

@@ -88,6 +89,17 @@ export const GET = async (
8889

8990
Use `getCacheData` as the single entry point for the [widget](https://www.npmjs.com/package/@nimpl/cache-widget).
9091

92+
## Examples
93+
94+
- **[Base Example](https://github.com/alexdln/nimpl-cache/tree/main/examples/redis-cache)**
95+
- Minimal Next.js example demonstrating redis cache handler and widget setup
96+
97+
- **[React Router Example](https://router-bsky.contection.dev/)** - [View source code](https://github.com/alexdln/contection/tree/main/examples/react-router-bsky)
98+
- Demonstrates cache widget integration with React Router 7 and redis cache handler
99+
100+
- **[Next.js Example](https://bsky.contection.dev/)** - [View source code](https://github.com/alexdln/contection/tree/main/examples/nextjs-bsky)
101+
- Shows cache widget usage in a Next.js cacheComponents application and redis cache handler
102+
91103
## License
92104

93105
MIT

0 commit comments

Comments
 (0)