-
Notifications
You must be signed in to change notification settings - Fork 52
fix(cache): correctness fixes and stale window bump #1124
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -155,6 +155,9 @@ caches?.open("loader") | |||||||||||||||||||||
| .catch(() => maybeCache = undefined); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| const MAX_AGE_S = parseInt(Deno.env.get("CACHE_MAX_AGE_S") ?? "60"); // 60 seconds | ||||||||||||||||||||||
| const CACHE_MAX_ENTRY_SIZE = parseInt( | ||||||||||||||||||||||
| Deno.env.get("CACHE_MAX_ENTRY_SIZE") ?? "2097152", // 2 MB | ||||||||||||||||||||||
| ) || 2097152; | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| // Reuse TextEncoder instance to avoid repeated instantiation | ||||||||||||||||||||||
| const textEncoder = new TextEncoder(); | ||||||||||||||||||||||
|
|
@@ -248,7 +251,14 @@ const wrapLoader = ( | |||||||||||||||||||||
| !shouldNotCache && ctx.vary?.push(cacheKeyValue); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| status = "bypass"; | ||||||||||||||||||||||
| stats.cache.add(1, { status, loader }); | ||||||||||||||||||||||
| const bypassReason = isCacheNoStore | ||||||||||||||||||||||
| ? "no-store" | ||||||||||||||||||||||
| : isCacheNoCache | ||||||||||||||||||||||
| ? "no-cache" | ||||||||||||||||||||||
| : isCacheKeyNull | ||||||||||||||||||||||
| ? "null-key" | ||||||||||||||||||||||
| : "disabled"; | ||||||||||||||||||||||
| stats.cache.add(1, { status, loader, reason: bypassReason }); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| RequestContext?.signal?.throwIfAborted(); | ||||||||||||||||||||||
| return await handler(props, req, ctx); | ||||||||||||||||||||||
|
|
@@ -297,6 +307,15 @@ const wrapLoader = ( | |||||||||||||||||||||
| // Serialize and encode once on the main thread. | ||||||||||||||||||||||
| const jsonStringEncoded = textEncoder.encode(JSON.stringify(json)); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| // Skip caching oversized entries to protect disk and memory. | ||||||||||||||||||||||
| // Also evict any existing stale entry so it doesn't stay pinned forever. | ||||||||||||||||||||||
| if (jsonStringEncoded.length > CACHE_MAX_ENTRY_SIZE) { | ||||||||||||||||||||||
| cache.delete(request).catch((error) => | ||||||||||||||||||||||
| logger.error(`loader error ${error}`) | ||||||||||||||||||||||
| ); | ||||||||||||||||||||||
| return json; | ||||||||||||||||||||||
|
Comment on lines
+322
to
+326
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Await the eviction in the oversized branch. This path returns before the delete settles. In the stale-while-revalidate flow, another request can still hit the old stale entry even though this refresh already decided it must be removed. 🐛 Proposed fix if (jsonStringEncoded.length > CACHE_MAX_ENTRY_SIZE) {
- cache.delete(request).catch((error) =>
+ await cache.delete(request).catch((error) =>
logger.error(`loader error ${error}`)
);
return json;
}📝 Committable suggestion
Suggested change
🤖 Prompt for AI Agents |
||||||||||||||||||||||
| } | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| const expires = new Date(Date.now() + (cacheMaxAge * 1e3)) | ||||||||||||||||||||||
| .toUTCString(); | ||||||||||||||||||||||
| const headerPairs: [string, string][] = [ | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| Original file line number | Diff line number | Diff line change | ||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -18,10 +18,11 @@ const CACHE_TTL_AUTOPURGE = Deno.env.get("CACHE_TTL_AUTOPURGE") === "true"; // c | |||||||||||||
| const CACHE_TTL_RESOLUTION = parseInt( | ||||||||||||||
| Deno.env.get("CACHE_TTL_RESOLUTION") ?? "1000", | ||||||||||||||
| ); // updates the lru cache timer every 1 second | ||||||||||||||
| // Additional time-to-live increment in milliseconds to extend the cache expiration beyond the response's Expires header. | ||||||||||||||
| // If not set, the cache will use only the expiration timestamp from response headers | ||||||||||||||
| // How long stale content remains serveable (and stays on disk) beyond its expires header. | ||||||||||||||
| // Default: 1 hour — long enough for low-traffic sites to keep serving cached content across | ||||||||||||||
| // quiet periods while background revalidation catches up. | ||||||||||||||
| const STALE_TTL_PERIOD = parseInt( | ||||||||||||||
| Deno.env.get("STALE_TTL_PERIOD") ?? "30000", | ||||||||||||||
| Deno.env.get("STALE_TTL_PERIOD") ?? "3600000", // 1h | ||||||||||||||
| ); | ||||||||||||||
|
|
||||||||||||||
| const cacheOptions = (cache: Cache) => ( | ||||||||||||||
|
|
@@ -37,11 +38,14 @@ const cacheOptions = (cache: Cache) => ( | |||||||||||||
| ); | ||||||||||||||
|
|
||||||||||||||
| function createLruCacheStorage(cacheStorageInner: CacheStorage): CacheStorage { | ||||||||||||||
| const openedCachesByName = new Map<string, Promise<Cache>>(); | ||||||||||||||
| const caches = createBaseCacheStorage( | ||||||||||||||
| cacheStorageInner, | ||||||||||||||
| (_cacheName, cacheInner, requestURLSHA1) => { | ||||||||||||||
| const existing = openedCachesByName.get(_cacheName); | ||||||||||||||
| if (existing) return existing; | ||||||||||||||
| const fileCache = new LRUCache(cacheOptions(cacheInner)); | ||||||||||||||
| return Promise.resolve({ | ||||||||||||||
| const cache = Promise.resolve({ | ||||||||||||||
| ...baseCache, | ||||||||||||||
| delete: async ( | ||||||||||||||
| request: RequestInfo | URL, | ||||||||||||||
|
|
@@ -58,8 +62,7 @@ function createLruCacheStorage(cacheStorageInner: CacheStorage): CacheStorage { | |||||||||||||
| assertNoOptions(options); | ||||||||||||||
| const cacheKey = await requestURLSHA1(request); | ||||||||||||||
| if (fileCache.has(cacheKey)) { | ||||||||||||||
| const result = cacheInner.match(cacheKey); | ||||||||||||||
| return result; | ||||||||||||||
| return cacheInner.match(cacheKey); | ||||||||||||||
|
Comment on lines
116
to
+117
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🧩 Analysis chain🌐 Web query:
💡 Result: For npm:lru-cache@10.2.0, cache.has(key) does NOT update recency / LRU ordering by default. Use cache.get(key) when read hits must keep entries hot. Citations:
Use
♻️ Proposed fix- if (fileCache.has(cacheKey)) {
+ if (fileCache.get(cacheKey) !== undefined) {
return cacheInner.match(cacheKey);
}📝 Committable suggestion
Suggested change
🤖 Prompt for AI Agents |
||||||||||||||
| } | ||||||||||||||
| return undefined; | ||||||||||||||
| }, | ||||||||||||||
|
|
@@ -96,6 +99,8 @@ function createLruCacheStorage(cacheStorageInner: CacheStorage): CacheStorage { | |||||||||||||
| return cacheInner.put(cacheKey, response); | ||||||||||||||
| }, | ||||||||||||||
| }); | ||||||||||||||
| openedCachesByName.set(_cacheName, cache); | ||||||||||||||
| return cache; | ||||||||||||||
| }, | ||||||||||||||
| ); | ||||||||||||||
| return caches; | ||||||||||||||
|
|
||||||||||||||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Keep
CACHE_MAX_ENTRY_SIZE=0distinguishable from "unset".parseInt(...) || 2097152turns an explicit0into the default, so this knob can't intentionally disable writes or force the oversized path in staging. Use an explicit parse-failure check instead.♻️ Proposed fix
📝 Committable suggestion
🤖 Prompt for AI Agents