Skip to content

Commit 3a4724f

Browse files
rosaclaude
andcommitted
Remove maxSize option in favor of maxEntries + maxEntrySize
The combination of maxEntries and maxEntrySize provides a guaranteed upper bound (maxEntries × maxEntrySize) without the complexity of tracking aggregate cache size. This simplifies the implementation by removing stats tracking in IndexedDB. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent f040c2a commit 3a4724f

File tree

5 files changed

+5
-152
lines changed

5 files changed

+5
-152
lines changed

src/offline/cache_registry.js

Lines changed: 0 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -66,20 +66,6 @@ class CacheRegistryDatabase {
6666
return this.#performOperation(STORE_NAME, countOp, "readonly")
6767
}
6868

69-
getTotalSize(cacheName) {
70-
const sumOp = (store) => {
71-
const index = store.index("cacheNameAndTimestamp")
72-
const range = IDBKeyRange.bound(
73-
[cacheName, 0],
74-
[cacheName, Infinity]
75-
)
76-
const cursorRequest = index.openCursor(range)
77-
78-
return this.#sumSizesFromCursor(cursorRequest)
79-
}
80-
return this.#performOperation(STORE_NAME, sumOp, "readonly")
81-
}
82-
8369
getOldestEntries(cacheName, limit) {
8470
const getOldestOp = (store) => {
8571
const index = store.index("cacheNameAndTimestamp")
@@ -94,20 +80,6 @@ class CacheRegistryDatabase {
9480
return this.#performOperation(STORE_NAME, getOldestOp, "readonly")
9581
}
9682

97-
getEntriesForSizeReduction(cacheName, targetReduction) {
98-
const getEntriesOp = (store) => {
99-
const index = store.index("cacheNameAndTimestamp")
100-
const range = IDBKeyRange.bound(
101-
[cacheName, 0],
102-
[cacheName, Infinity]
103-
)
104-
const cursorRequest = index.openCursor(range)
105-
106-
return this.#getEntriesUntilSizeReached(cursorRequest, targetReduction)
107-
}
108-
return this.#performOperation(STORE_NAME, getEntriesOp, "readonly")
109-
}
110-
11183
delete(key) {
11284
const deleteOp = (store) => this.#requestToPromise(store.delete(key))
11385
return this.#performOperation(STORE_NAME, deleteOp, "readwrite")
@@ -173,44 +145,6 @@ class CacheRegistryDatabase {
173145
request.onerror = () => reject(request.error)
174146
})
175147
}
176-
177-
#sumSizesFromCursor(request) {
178-
return new Promise((resolve, reject) => {
179-
let total = 0
180-
181-
request.onsuccess = (event) => {
182-
const cursor = event.target.result
183-
if (cursor) {
184-
total += cursor.value.size ?? 0
185-
cursor.continue()
186-
} else {
187-
resolve(total)
188-
}
189-
}
190-
191-
request.onerror = () => reject(request.error)
192-
})
193-
}
194-
195-
#getEntriesUntilSizeReached(request, targetSize) {
196-
return new Promise((resolve, reject) => {
197-
const results = []
198-
let accumulated = 0
199-
200-
request.onsuccess = (event) => {
201-
const cursor = event.target.result
202-
if (cursor && accumulated < targetSize) {
203-
results.push(cursor.value)
204-
accumulated += cursor.value.size ?? 0
205-
cursor.continue()
206-
} else {
207-
resolve(results)
208-
}
209-
}
210-
211-
request.onerror = () => reject(request.error)
212-
})
213-
}
214148
}
215149

216150
let cacheRegistryDatabase = null
@@ -252,18 +186,10 @@ export class CacheRegistry {
252186
return this.database.getEntryCount(this.cacheName)
253187
}
254188

255-
getTotalSize() {
256-
return this.database.getTotalSize(this.cacheName)
257-
}
258-
259189
getOldestEntries(limit) {
260190
return this.database.getOldestEntries(this.cacheName, limit)
261191
}
262192

263-
getEntriesForSizeReduction(targetReduction) {
264-
return this.database.getEntriesForSizeReduction(this.cacheName, targetReduction)
265-
}
266-
267193
delete(key) {
268194
return this.database.delete(key)
269195
}

src/offline/cache_trimmer.js

Lines changed: 3 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -26,23 +26,18 @@ export class CacheTrimmer {
2626
}
2727

2828
#shouldTrim() {
29-
const { maxAge, maxEntries, maxSize } = this.options
30-
return (maxAge && maxAge > 0) ||
31-
(maxEntries && maxEntries > 0) ||
32-
(maxSize && maxSize > 0)
29+
const { maxAge, maxEntries } = this.options
30+
return (maxAge && maxAge > 0) || (maxEntries && maxEntries > 0)
3331
}
3432

3533
async deleteEntries() {
36-
// Order: age first count → size (each reduces the set for subsequent operations)
34+
// Order: age first, then count (age reduces the set for count trimming)
3735
if (this.options.maxAge) {
3836
await this.deleteEntriesByAge()
3937
}
4038
if (this.options.maxEntries) {
4139
await this.deleteEntriesByCount()
4240
}
43-
if (this.options.maxSize) {
44-
await this.deleteEntriesBySize()
45-
}
4641
}
4742

4843
async deleteEntriesByAge() {
@@ -79,25 +74,6 @@ export class CacheTrimmer {
7974
console.debug(`Successfully trimmed ${entriesToDelete.length} entries from cache "${this.cacheName}"`)
8075
}
8176

82-
async deleteEntriesBySize() {
83-
const currentSize = await this.cacheRegistry.getTotalSize()
84-
const excess = currentSize - this.options.maxSize
85-
86-
if (excess <= 0) {
87-
return
88-
}
89-
90-
const entriesToDelete = await this.cacheRegistry.getEntriesForSizeReduction(excess)
91-
92-
if (entriesToDelete.length === 0) {
93-
return
94-
}
95-
96-
console.debug(`Trimming ${entriesToDelete.length} entries (size limit) from cache "${this.cacheName}"`)
97-
await this.#deleteEntryList(entriesToDelete)
98-
console.debug(`Successfully trimmed ${entriesToDelete.length} entries from cache "${this.cacheName}"`)
99-
}
100-
10177
async #deleteEntryList(entries) {
10278
const cache = await caches.open(this.cacheName)
10379

src/offline/handlers/handler.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,14 @@ import { CacheTrimmer } from "../cache_trimmer"
33
import { buildPartialResponse } from "../range_request"
44

55
export class Handler {
6-
constructor({ cacheName, networkTimeout, maxAge, maxEntries, maxSize, maxEntrySize, fetchOptions }) {
6+
constructor({ cacheName, networkTimeout, maxAge, maxEntries, maxEntrySize, fetchOptions }) {
77
this.cacheName = cacheName
88
this.networkTimeout = networkTimeout
99
this.fetchOptions = fetchOptions || {}
1010
this.maxEntrySize = maxEntrySize
1111

1212
this.cacheRegistry = new CacheRegistry(cacheName)
13-
this.cacheTrimmer = new CacheTrimmer(cacheName, this.cacheRegistry, { maxAge, maxEntries, maxSize })
13+
this.cacheTrimmer = new CacheTrimmer(cacheName, this.cacheRegistry, { maxAge, maxEntries })
1414
}
1515

1616
async handle(request) {

src/tests/fixtures/service_workers/max_size.js

Lines changed: 0 additions & 20 deletions
This file was deleted.

src/tests/functional/offline_tests.js

Lines changed: 0 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -424,35 +424,6 @@ test("trims oldest entries when maxEntries is exceeded", async ({ page }) => {
424424
await assertCachedContent(page, "test-max-entries", urls[4], contents[4])
425425
})
426426

427-
test("trims oldest entries when maxSize is exceeded", async ({ page }) => {
428-
await registerServiceWorker(page, "/src/tests/fixtures/service_workers/max_size.js")
429-
await waitForServiceWorkerToControl(page)
430-
431-
// Cache multiple URLs - each response is ~60-70 bytes, maxSize is 150 bytes
432-
const urls = [
433-
"/__turbo/dynamic.txt?id=1",
434-
"/__turbo/dynamic.txt?id=2",
435-
"/__turbo/dynamic.txt?id=3"
436-
]
437-
438-
const contents = []
439-
for (const url of urls) {
440-
contents.push(await fetchContent(page, url))
441-
// Wait between requests to ensure distinct timestamps
442-
await page.waitForTimeout(100)
443-
}
444-
445-
// Wait for trimming to complete
446-
await page.waitForTimeout(300)
447-
448-
// With maxSize: 150 and ~70 byte responses, only ~2 should fit
449-
// The oldest entry should be removed
450-
await assertNotCached(page, "test-max-size", urls[0])
451-
452-
// At least the newest entry should remain
453-
await assertCachedContent(page, "test-max-size", urls[2], contents[2])
454-
})
455-
456427
test("rejects entries exceeding maxEntrySize", async ({ page }) => {
457428
await registerServiceWorker(page, "/src/tests/fixtures/service_workers/max_entry_size.js")
458429
await waitForServiceWorkerToControl(page)

0 commit comments

Comments
 (0)