Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions packages/gitbook/openNext/customWorkers/defaultWrangler.jsonc
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
"env": {
"dev": {
"vars": {
"STAGE": "dev"
"STAGE": "dev",
"OPEN_NEXT_REQUEST_ID_HEADER": "true"
},
"r2_buckets": [
{
Expand Down Expand Up @@ -84,6 +85,9 @@
}
},
"staging": {
"vars": {
"OPEN_NEXT_REQUEST_ID_HEADER": "true"
},
"r2_buckets": [
{
"binding": "NEXT_INC_CACHE_R2_BUCKET",
Expand Down Expand Up @@ -125,7 +129,8 @@
"vars": {
// This is a bit misleading, but it means that we can have 500 concurrent revalidations
// This means that we'll have up to 100 durable objects instance running at the same time
"MAX_REVALIDATE_CONCURRENCY": "100"
"MAX_REVALIDATE_CONCURRENCY": "100",
"OPEN_NEXT_REQUEST_ID_HEADER": "true"
},
"r2_buckets": [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@
"dev": {
"vars": {
"STAGE": "dev",
"NEXT_PRIVATE_DEBUG_CACHE": "true"
"NEXT_PRIVATE_DEBUG_CACHE": "true",
"OPEN_NEXT_REQUEST_ID_HEADER": "true"
},
"r2_buckets": [
{
Expand Down Expand Up @@ -85,7 +86,8 @@
"staging": {
"vars": {
"STAGE": "staging",
"WORKER_VERSION_ID": "TO_REPLACE"
"WORKER_VERSION_ID": "TO_REPLACE",
"OPEN_NEXT_REQUEST_ID_HEADER": "true"
},
"routes": [
{
Expand Down Expand Up @@ -153,7 +155,8 @@
// TODO: remove this once the issue is fixed
"DEBUG_CLOUDFLARE": "true",
"WORKER_VERSION_ID": "TO_REPLACE",
"STAGE": "production"
"STAGE": "production",
"OPEN_NEXT_REQUEST_ID_HEADER": "true"
},
"routes": [
{
Expand Down
173 changes: 73 additions & 100 deletions packages/gitbook/openNext/incrementalCache.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import { createHash } from 'node:crypto';

import { trace } from '@/lib/tracing';
import type {
CacheEntryType,
CacheValue,
Expand Down Expand Up @@ -34,41 +32,31 @@ class GitbookIncrementalCache implements IncrementalCache {
cacheType?: CacheType
): Promise<WithLastModified<CacheValue<CacheType>> | null> {
const cacheKey = this.getR2Key(key, cacheType);
return trace(
{
operation: 'openNextIncrementalCacheGet',
name: cacheKey,
},
async (span) => {
span.setAttribute('cacheType', cacheType ?? 'cache');
const r2 = getCloudflareContext().env[BINDING_NAME];
const localCache = await this.getCacheInstance();
if (!r2) throw new Error('No R2 bucket');
try {
// Check local cache first if available
const localCacheEntry = await localCache.match(this.getCacheUrlKey(cacheKey));
if (localCacheEntry) {
span.setAttribute('cacheHit', 'local');
const result = (await localCacheEntry.json()) as WithLastModified<
CacheValue<CacheType>
>;
return this.returnNullOn404(result);
}

const r2Object = await r2.get(cacheKey);
if (!r2Object) return null;

span.setAttribute('cacheHit', 'r2');
return this.returnNullOn404({
value: await r2Object.json(),
lastModified: r2Object.uploaded.getTime(),
});
} catch (e) {
console.error('Failed to get from cache', e);
return null;
}
const r2 = getCloudflareContext().env[BINDING_NAME];
const localCache = await this.getCacheInstance();
if (!r2) throw new Error('No R2 bucket');
try {
// Check local cache first if available
const localCacheEntry = await localCache.match(this.getCacheUrlKey(cacheKey));
if (localCacheEntry) {
const result = (await localCacheEntry.json()) as WithLastModified<
CacheValue<CacheType>
>;
return this.returnNullOn404(result);
}
);

const r2Object = await r2.get(cacheKey);
if (!r2Object) return null;

return this.returnNullOn404({
value: await r2Object.json(),
lastModified: r2Object.uploaded.getTime(),
});
} catch (e) {
console.error('Failed to get from cache', e);
return null;
}
}

//TODO: This is a workaround to handle 404 responses in the cache.
Expand All @@ -89,75 +77,60 @@ class GitbookIncrementalCache implements IncrementalCache {
cacheType?: CacheType
): Promise<void> {
const cacheKey = this.getR2Key(key, cacheType);
return trace(
{
operation: 'openNextIncrementalCacheSet',
name: cacheKey,
},
async (span) => {
span.setAttribute('cacheType', cacheType ?? 'cache');
const localCache = await this.getCacheInstance();

try {
await this.writeToR2(cacheKey, JSON.stringify(value));

//TODO: Check if there is any places where we don't have tags
// Ideally we should always have tags, but in case we don't, we need to decide how to handle it
// For now we default to a build ID tag, which allow us to invalidate the cache in case something is wrong in this deployment
const tags = this.getTagsFromCacheEntry(value) ?? [
`build_id/${process.env.NEXT_BUILD_ID}`,
];

// We consider R2 as the source of truth, so we update the local cache
// only after a successful R2 write
await localCache.put(
this.getCacheUrlKey(cacheKey),
new Response(
JSON.stringify({
value,
// Note: `Date.now()` returns the time of the last IO rather than the actual time.
// See https://developers.cloudflare.com/workers/reference/security-model/
lastModified: Date.now(),
}),
{
headers: {
// Cache-Control default to 30 minutes, will be overridden by `revalidate`
// In theory we should always get the `revalidate` value
'cache-control': `max-age=${value.revalidate ?? 60 * 30}`,
'cache-tag': tags.join(','),
},
}
)
);
} catch (e) {
console.error('Failed to set to cache', e);
}
}
);

const localCache = await this.getCacheInstance();

try {
await this.writeToR2(cacheKey, JSON.stringify(value));

//TODO: Check if there is any places where we don't have tags
// Ideally we should always have tags, but in case we don't, we need to decide how to handle it
// For now we default to a build ID tag, which allow us to invalidate the cache in case something is wrong in this deployment
const tags = this.getTagsFromCacheEntry(value) ?? [
`build_id/${process.env.NEXT_BUILD_ID}`,
];

// We consider R2 as the source of truth, so we update the local cache
// only after a successful R2 write
await localCache.put(
this.getCacheUrlKey(cacheKey),
new Response(
JSON.stringify({
value,
// Note: `Date.now()` returns the time of the last IO rather than the actual time.
// See https://developers.cloudflare.com/workers/reference/security-model/
lastModified: Date.now(),
}),
{
headers: {
// Cache-Control default to 30 minutes, will be overridden by `revalidate`
// In theory we should always get the `revalidate` value
'cache-control': `max-age=${value.revalidate ?? 60 * 30}`,
'cache-tag': tags.join(','),
},
}
)
);
} catch (e) {
console.error('Failed to set to cache', e);
}
}

async delete(key: string): Promise<void> {
const cacheKey = this.getR2Key(key);
return trace(
{
operation: 'openNextIncrementalCacheDelete',
name: cacheKey,
},
async () => {
const r2 = getCloudflareContext().env[BINDING_NAME];
const localCache = await this.getCacheInstance();
if (!r2) throw new Error('No R2 bucket');

try {
await r2.delete(cacheKey);

// Here again R2 is the source of truth, so we delete from local cache first
await localCache.delete(this.getCacheUrlKey(cacheKey));
} catch (e) {
console.error('Failed to delete from cache', e);
}
}
);

const r2 = getCloudflareContext().env[BINDING_NAME];
const localCache = await this.getCacheInstance();
if (!r2) throw new Error('No R2 bucket');

try {
await r2.delete(cacheKey);

// Here again R2 is the source of truth, so we delete from local cache first
await localCache.delete(this.getCacheUrlKey(cacheKey));
} catch (e) {
console.error('Failed to delete from cache', e);
}
}

async writeToR2(key: string, value: string): Promise<void> {
Expand Down
7 changes: 2 additions & 5 deletions packages/gitbook/openNext/queue/middleware.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import { trace } from '@/lib/tracing';
import type { Queue } from '@opennextjs/aws/types/overrides.js';
import { getCloudflareContext } from '@opennextjs/cloudflare';
import doQueue from '@opennextjs/cloudflare/overrides/queue/do-queue';

export default {
name: 'GitbookISRQueue',
send: async (msg) => {
return trace({ operation: 'gitbookISRQueueSend', name: msg.MessageBody.url }, async () => {
const { ctx } = getCloudflareContext();
ctx.waitUntil(doQueue.send(msg));
});
const { ctx } = getCloudflareContext();
ctx.waitUntil(doQueue.send(msg));
},
} satisfies Queue;
4 changes: 3 additions & 1 deletion packages/gitbook/openNext/queue/server.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { getLogger } from '@/lib/logger';
import type { Queue } from '@opennextjs/aws/types/overrides.js';

export default {
name: 'GitbookISRQueue',
send: async (msg) => {
const logger = getLogger().subLogger('GitbookISRQueue');
// We should never reach this point in the server. If that's the case, we should log it.
console.warn('GitbookISRQueue: send called on server side, this should not happen.', msg);
logger.warn('send called on server side, this should not happen.', msg);
},
} satisfies Queue;
58 changes: 13 additions & 45 deletions packages/gitbook/openNext/tagCache/middleware.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { trace } from '@/lib/tracing';
import { getLogger } from '@/lib/logger';
import type { NextModeTagCache } from '@opennextjs/aws/types/overrides.js';
import doShardedTagCache from '@opennextjs/cloudflare/overrides/tag-cache/do-sharded-tag-cache';
import { softTagFilter } from '@opennextjs/cloudflare/overrides/tag-cache/tag-cache-filter';
Expand All @@ -23,59 +23,27 @@ export default {
getLastRevalidated: async (tags: string[]) => {
const tagsToCheck = tags.filter(softTagFilter);
if (tagsToCheck.length === 0) {
// If we reach here, it probably means that there is an issue that we'll need to address.
console.warn(
'getLastRevalidated - No valid tags to check for last revalidation, original tags:',
tags
);
return 0; // If no tags to check, return 0
}
return trace(
{
operation: 'gitbookTagCacheGetLastRevalidated',
name: tagsToCheck.join(', '),
},
async () => {
return await originalTagCache.getLastRevalidated(tagsToCheck);
}
);

return await originalTagCache.getLastRevalidated(tagsToCheck);
},
hasBeenRevalidated: async (tags: string[], lastModified?: number) => {
const tagsToCheck = tags.filter(softTagFilter);
if (tagsToCheck.length === 0) {
// If we reach here, it probably means that there is an issue that we'll need to address.
console.warn(
'hasBeenRevalidated - No valid tags to check for revalidation, original tags:',
tags
);
return false; // If no tags to check, return false
}
return trace(
{
operation: 'gitbookTagCacheHasBeenRevalidated',
name: tagsToCheck.join(', '),
},
async () => {
const result = await originalTagCache.hasBeenRevalidated(tagsToCheck, lastModified);
return result;
}
);

return await originalTagCache.hasBeenRevalidated(tagsToCheck, lastModified);
},
writeTags: async (tags: string[]) => {
return trace(
{
operation: 'gitbookTagCacheWriteTags',
name: tags.join(', '),
},
async () => {
const tagsToWrite = tags.filter(softTagFilter);
if (tagsToWrite.length === 0) {
console.warn('writeTags - No valid tags to write');
return; // If no tags to write, exit early
}
// Write only the filtered tags
await originalTagCache.writeTags(tagsToWrite);
}
);
const tagsToWrite = tags.filter(softTagFilter);
if (tagsToWrite.length === 0) {
const logger = getLogger().subLogger('gitbookTagCache');
logger.warn('writeTags - No valid tags to write');
return; // If no tags to write, exit early
}
// Write only the filtered tags
await originalTagCache.writeTags(tagsToWrite);
},
} satisfies NextModeTagCache;
5 changes: 3 additions & 2 deletions packages/gitbook/src/app/~gitbook/revalidate/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server';

import { getLogger } from '@/lib/logger';
import { withVerifySignature } from '@/lib/routes';
import { revalidateTag } from 'next/cache';

Expand All @@ -12,6 +13,7 @@ interface JsonBody {
* The body should be a JSON with { tags: string[] }
*/
export async function POST(req: NextRequest) {
const logger = getLogger().subLogger('revalidate');
return withVerifySignature<JsonBody>(req, async (body) => {
if (!body.tags || !Array.isArray(body.tags)) {
return NextResponse.json(
Expand All @@ -23,8 +25,7 @@ export async function POST(req: NextRequest) {
}

body.tags.forEach((tag) => {
// biome-ignore lint/suspicious/noConsole: we want to log here
console.log(`Revalidating tag: ${tag}`);
logger.log(`Revalidating tag: ${tag}`);
revalidateTag(tag);
});

Expand Down
Loading