-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcacheLoaders.ts
More file actions
314 lines (286 loc) · 9.59 KB
/
cacheLoaders.ts
File metadata and controls
314 lines (286 loc) · 9.59 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
/**
* Cache loading utilities for Notion data fetching
*
* Provides generic caching infrastructure with:
* - LRU prefetch cache integration
* - In-flight request deduplication
* - Cache hit/miss tracking
* - Progress logging
*/
import chalk from "chalk";
import type { LRUCache } from "./cacheStrategies";
import { buildCacheKey } from "./cacheStrategies";
import { fetchNotionBlocks } from "../fetchNotionData";
import { n2m } from "../notionClient";
import { isUrlExpiringSoon } from "./imageReplacer";
/**
* Logs progress for data fetching operations
* Throttles output to reduce noise: logs at index 0, last item, and every 10th item
*/
export function logProgress(
index: number,
total: number,
prefix: string,
title: string
): void {
if (
total > 0 &&
(index === 0 ||
index === total - 1 ||
((index + 1) % 10 === 0 && index + 1 < total))
) {
console.log(
chalk.gray(` ${prefix} ${index + 1}/${total} for "${title}"`)
);
}
}
/**
* Generic cache loader configuration
*/
export interface CacheLoaderConfig<T> {
/** Main map cache for storing fetched data */
mainMap: Map<string, { key: string; data: T }>;
/** LRU prefetch cache */
prefetchCache: LRUCache<T>;
/** Tracks in-flight requests to prevent duplicate fetches */
inFlightMap: Map<string, Promise<T>>;
/** Counter for cache hits */
cacheHits: { value: number };
/** Counter for fetch operations */
fetchCount: { value: number };
/** Function to fetch data when not cached */
fetchFn: (pageId: string) => Promise<T>;
/** Normalizes fetched data to expected type */
normalizeResult: (result: any) => T;
/** Optional validator for fetched data. Returns true if valid. */
validateResult?: (result: T) => boolean;
/** Prefix for progress log messages */
logPrefix: string;
}
/**
* Helper to check if data contains expiring S3 URLs using recursive traversal
* Avoids JSON.stringify overhead and regex DoS risks
*/
export function containsExpiringUrls(
data: any,
visited = new WeakSet()
): boolean {
if (data === null || data === undefined) {
return false;
}
// Check strings directly
if (typeof data === "string") {
return isUrlExpiringSoon(data);
}
// Skip non-objects
if (typeof data !== "object") {
return false;
}
// Handle circular references
if (visited.has(data)) {
return false;
}
visited.add(data);
// Traverse Maps
if (data instanceof Map) {
for (const value of data.values()) {
if (containsExpiringUrls(value, visited)) {
return true;
}
}
return false;
}
// Traverse Sets
if (data instanceof Set) {
for (const value of data.values()) {
if (containsExpiringUrls(value, visited)) {
return true;
}
}
return false;
}
// Traverse arrays
if (Array.isArray(data)) {
for (const item of data) {
if (containsExpiringUrls(item, visited)) {
return true;
}
}
return false;
}
// Traverse object values
for (const value of Object.values(data)) {
if (containsExpiringUrls(value, visited)) {
return true;
}
}
return false;
}
/**
* Generic cache loader that handles:
* 1. Main map cache lookup
* 2. Prefetch cache lookup
* 3. In-flight request deduplication
* 4. Cache hit/miss tracking
* 5. Validation and retry for fresh content
*
* @returns Object with fetched/cached data and source indicator
*/
export async function loadWithCache<T>(
pageRecord: Record<string, any>,
pageIndex: number,
totalCount: number,
title: string,
config: CacheLoaderConfig<T>
): Promise<{ data: T; source: "cache" | "fetched" }> {
const pageId = pageRecord?.id;
if (!pageId) {
return { data: config.normalizeResult([]), source: "cache" };
}
const cacheKey = buildCacheKey(pageId, pageRecord?.last_edited_time);
// Check main map cache
const existing = config.mainMap.get(pageId);
if (existing && existing.key === cacheKey) {
config.cacheHits.value += 1;
return { data: existing.data, source: "cache" };
}
// Check prefetch cache
if (config.prefetchCache.has(cacheKey)) {
config.cacheHits.value += 1;
const cached = config.prefetchCache.get(cacheKey);
// Data is already normalized when stored in prefetchCache (line 108)
config.mainMap.set(pageId, { key: cacheKey, data: cached });
return { data: cached, source: "cache" };
}
// Check in-flight requests or start new fetch
let inFlight = config.inFlightMap.get(cacheKey);
if (!inFlight) {
config.fetchCount.value += 1;
logProgress(pageIndex, totalCount, config.logPrefix, title);
inFlight = (async () => {
let attempts = 0;
const MAX_ATTEMPTS = 3;
let lastNormalized: T | null = null;
while (attempts < MAX_ATTEMPTS) {
attempts++;
try {
const result = await config.fetchFn(pageId);
const normalized = config.normalizeResult(result);
lastNormalized = normalized;
if (config.validateResult) {
const isValid = config.validateResult(normalized);
if (!isValid) {
if (attempts === MAX_ATTEMPTS) {
console.warn(
chalk.yellow(
` ⚠️ Content validation failed for "${title}" after ${MAX_ATTEMPTS} attempts; using latest result.`
)
);
config.prefetchCache.set(cacheKey, normalized);
return normalized;
}
const delay = attempts * 1000; // Linear backoff: 1s, 2s
console.warn(
chalk.yellow(
` ⚠️ Content validation failed for "${title}" (attempt ${attempts}/${MAX_ATTEMPTS}), retrying in ${delay}ms...`
)
);
await new Promise((resolve) => setTimeout(resolve, delay));
continue;
}
}
// Validation passed
config.prefetchCache.set(cacheKey, normalized);
return normalized;
} catch (error) {
// If it was our validation error, just propagate it if we are out of retries
// But if it was a fetch error, we might want to retry that too?
// The current logic places the retry loop AROUND the fetch+validate.
// So if fetch throws, we also want to separate network retry from validation retry?
// Standard fetchFn likely has its own retries (notionClient.ts usually does).
// Assuming fetchFn throws on permanent failure.
// We will re-throw fetch errors immediately unless we want to use this loop for fetch retries too.
// The previous code re-threw unexpected errors.
// However, we want to respect the 'continue' for validation failures.
// If strict validation error thrown above:
if (
error instanceof Error &&
error.message.includes("Content validation failed")
) {
throw error;
}
// If fetch error, we let it bubble up (assuming fetchFn manages its own resiliency usually,
// OR if we want to use this loop for generic retries, we could 'continue' here too.
// But instructions were specific about validation retry.
throw error;
}
}
throw new Error("Unexpected end of retry loop");
})()
.catch((error) => {
config.prefetchCache.delete(cacheKey);
throw error;
})
.finally(() => {
config.inFlightMap.delete(cacheKey);
});
config.inFlightMap.set(cacheKey, inFlight);
}
const result = await inFlight;
// Result is already normalized by the inFlight promise
config.mainMap.set(pageId, { key: cacheKey, data: result });
return { data: result, source: "fetched" };
}
/**
* Specialized loader for fetching raw Notion blocks with caching
*/
export async function loadBlocksForPage(
pageRecord: Record<string, any>,
pageIndex: number,
totalCount: number,
title: string,
blocksMap: Map<string, { key: string; data: any[] }>,
blockPrefetchCache: LRUCache<any[]>,
inFlightBlockFetches: Map<string, Promise<any[]>>,
blockCacheHits: { value: number },
blockFetchCount: { value: number }
): Promise<{ data: any[]; source: "cache" | "fetched" }> {
return loadWithCache<any[]>(pageRecord, pageIndex, totalCount, title, {
mainMap: blocksMap,
prefetchCache: blockPrefetchCache,
inFlightMap: inFlightBlockFetches,
cacheHits: blockCacheHits,
fetchCount: blockFetchCount,
fetchFn: fetchNotionBlocks,
normalizeResult: (result) => (Array.isArray(result) ? result : []),
validateResult: (blocks) => !containsExpiringUrls(blocks),
logPrefix: "Fetching blocks",
});
}
/**
* Specialized loader for fetching markdown from Notion pages with caching
*/
export async function loadMarkdownForPage(
pageRecord: Record<string, any>,
pageIndex: number,
totalCount: number,
title: string,
markdownMap: Map<string, { key: string; data: any }>,
markdownPrefetchCache: LRUCache<any>,
inFlightMarkdownFetches: Map<string, Promise<any>>,
markdownCacheHits: { value: number },
markdownFetchCount: { value: number }
): Promise<{ data: any; source: "cache" | "fetched" }> {
return loadWithCache<any>(pageRecord, pageIndex, totalCount, title, {
mainMap: markdownMap,
prefetchCache: markdownPrefetchCache,
inFlightMap: inFlightMarkdownFetches,
cacheHits: markdownCacheHits,
fetchCount: markdownFetchCount,
fetchFn: (pageId) => n2m.pageToMarkdown(pageId),
normalizeResult: (result) =>
Array.isArray(result) ? result : (result ?? []),
validateResult: (markdown) => !containsExpiringUrls(markdown),
logPrefix: "Converting markdown",
});
}