|
| 1 | +import { unstable_cache } from "next/cache"; |
| 2 | +import superjson from "superjson"; |
| 3 | + |
| 4 | +const MAX_CACHE_SIZE_STRING = 2 * 1024 * 1024 - 510_000; // your buffer size |
| 5 | + |
| 6 | +type ChunkedCacheResult = { |
| 7 | + chunk: string; |
| 8 | + chunksNumber: number; |
| 9 | +}; |
| 10 | + |
| 11 | +type CacheFetcher<Args extends unknown[]> = (...args: [...Args, number?]) => Promise<ChunkedCacheResult>; |
| 12 | + |
| 13 | +/** |
| 14 | + * Wraps an async function that returns data of type T, and caches it in chunks. |
| 15 | + * @param keyCacheBase - base cache key prefix |
| 16 | + * @param fetchFullData - async function returning the full data T for given args except chunk number |
| 17 | + * @returns a new async function that fetches chunked cached data with the same args plus optional chunk number |
| 18 | + */ |
| 19 | +export function createChunkedCacheFetcher<T, Args extends unknown[]>( |
| 20 | + fetchFullData: (...args: Args) => Promise<T> |
| 21 | +): CacheFetcher<Args> { |
| 22 | + // chunk number default 0 = first chunk |
| 23 | + return unstable_cache( |
| 24 | + async (...argsWithChunk: [...Args, number?]) => { |
| 25 | + const [args, chunk] = (() => { |
| 26 | + if(argsWithChunk.length === 1) { |
| 27 | + return [[argsWithChunk[0]], 0] as [Args, number]; |
| 28 | + } |
| 29 | + return [argsWithChunk.slice(0, -1) as Args, argsWithChunk.at(-1) ?? 0] as [Args, number]; |
| 30 | + })(); |
| 31 | + |
| 32 | + // Fetch fresh full data and serialize it |
| 33 | + // Could memoize inside this function but using Next's cache instead |
| 34 | + const fullData = await fetchFullData(...args); |
| 35 | + const serialized = superjson.stringify(fullData); |
| 36 | + |
| 37 | + // Break into chunks |
| 38 | + const chunksNumber = Math.ceil(serialized.length / MAX_CACHE_SIZE_STRING); |
| 39 | + const chunks = []; |
| 40 | + for (let i = 0; i < chunksNumber; i++) { |
| 41 | + const start = i * MAX_CACHE_SIZE_STRING; |
| 42 | + const end = (i + 1) * MAX_CACHE_SIZE_STRING; |
| 43 | + chunks.push(serialized.slice(start, end)); |
| 44 | + } |
| 45 | + |
| 46 | + return { |
| 47 | + chunk: chunks[chunk] ?? '', |
| 48 | + chunksNumber, |
| 49 | + }; |
| 50 | + }, |
| 51 | + [], |
| 52 | + { revalidate: false } |
| 53 | + ); |
| 54 | +} |
| 55 | + |
| 56 | +/** |
| 57 | + * Utility function to fetch all chunks and combine them transparently |
| 58 | + * @param fetcher - cached fetcher returned by createChunkedCacheFetcher |
| 59 | + * @param args - args to the cached fetcher except chunk number |
| 60 | + */ |
| 61 | +export async function fetchAllChunks<T, Args extends unknown[]>( |
| 62 | + fetcher: CacheFetcher<Args>, |
| 63 | + ...args: Args |
| 64 | +): Promise<T> { |
| 65 | + const firstChunkData = await fetcher(...args, 0); |
| 66 | + const chunksNumber = firstChunkData.chunksNumber; |
| 67 | + if (chunksNumber <= 1) { |
| 68 | + return superjson.parse(firstChunkData.chunk); |
| 69 | + } |
| 70 | + |
| 71 | + const otherChunks = await Promise.all( |
| 72 | + Array.from({ length: chunksNumber - 1 }, (_, i) => fetcher(...args, i + 1)) |
| 73 | + ); |
| 74 | + |
| 75 | + const fullString = |
| 76 | + firstChunkData.chunk + otherChunks.map(({ chunk }) => chunk).join(""); |
| 77 | + |
| 78 | + return superjson.parse(fullString); |
| 79 | +} |
0 commit comments