Skip to content

Commit b1791b3

Browse files
committed
debug channel in restartable render
1 parent bd8c4e6 commit b1791b3

File tree

1 file changed

+96
-32
lines changed

1 file changed

+96
-32
lines changed

packages/next/src/server/app-render/app-render.tsx

Lines changed: 96 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -2275,13 +2275,28 @@ async function renderToStream(
22752275
const environmentName = () =>
22762276
requestStore.prerenderPhase === true ? 'Prerender' : 'Server'
22772277

2278+
const [resolveValidation, validationOutlet] = createValidationOutlet()
2279+
const debugChannel = setReactDebugChannel && createDebugChannel()
2280+
2281+
if (debugChannel) {
2282+
const [readableSsr, readableBrowser] =
2283+
debugChannel.clientSide.readable.tee()
2284+
2285+
reactDebugStream = readableSsr
2286+
2287+
setReactDebugChannel(
2288+
{ readable: readableBrowser },
2289+
htmlRequestId,
2290+
requestId
2291+
)
2292+
}
2293+
22782294
if (process.env.NEXT_RESTART_ON_CACHE_MISS !== '0') {
22792295
// Try to render the page and see if there's any cache misses.
22802296
// If there are, wait for caches to finish and restart the render.
22812297

2282-
const [resolveValidation, validationOutlet] = createValidationOutlet()
2283-
22842298
const renderRestartable = async (
2299+
serverDebugChannel: DebugChannelServer | undefined,
22852300
signal: AbortSignal | undefined,
22862301
onPrerenderStageEnd: (() => void) | undefined
22872302
) => {
@@ -2305,8 +2320,7 @@ async function renderToStream(
23052320
onError: serverComponentsErrorHandler,
23062321
environmentName,
23072322
filterStackFrame,
2308-
// TODO(restart-on-cache-miss): implement `debugChannel`
2309-
// debugChannel: debugChannel?.serverSide,
2323+
debugChannel: serverDebugChannel,
23102324
signal,
23112325
}
23122326
)
@@ -2331,12 +2345,18 @@ async function renderToStream(
23312345
requestStore.cacheSignal = cacheSignal
23322346

23332347
const initialRenderReactController = new AbortController()
2348+
// We don't know if we'll use this render, so buffer debug channel writes until we find out.
2349+
const initialRenderServerDebugChannel = debugChannel
2350+
? createBufferedServerDebugChannel()
2351+
: undefined
2352+
23342353
const hadCacheMissInStaticStagePromise =
23352354
createPromiseWithResolvers<boolean>()
23362355

23372356
console.debug(`renderToStream (1) :: attempting render`)
23382357

23392358
const reactServerStreamPromise = renderRestartable(
2359+
initialRenderServerDebugChannel?.channel,
23402360
initialRenderReactController.signal,
23412361
() => {
23422362
console.debug(
@@ -2359,6 +2379,14 @@ async function renderToStream(
23592379

23602380
if (!hasCacheMissInStaticStage) {
23612381
// No cache misses. Use the stream as is.
2382+
2383+
// The debug info from this render should be written to the real debug channel.
2384+
if (debugChannel && initialRenderServerDebugChannel) {
2385+
void initialRenderServerDebugChannel.pipeToChannel(
2386+
debugChannel.serverSide
2387+
)
2388+
}
2389+
23622390
reactServerResult = new ReactServerResult(
23632391
await reactServerStreamPromise
23642392
)
@@ -2391,14 +2419,23 @@ async function renderToStream(
23912419
)
23922420
requestStore.cacheSignal = null
23932421

2422+
// We know we'll use this render, so unlike the initial one,
2423+
// it can write into the debug channel directly instead of buffering.
2424+
const finalRenderServerDebugChannel = debugChannel?.serverSide
2425+
23942426
reactServerResult = new ReactServerResult(
2395-
await renderRestartable(undefined, () => {
2396-
console.debug(
2397-
`renderToStream (2) :: end of static stage after restart. ${cacheSignal['count']} caches pending`
2398-
)
2399-
})
2427+
await renderRestartable(
2428+
finalRenderServerDebugChannel,
2429+
undefined,
2430+
() => {
2431+
console.debug(
2432+
`renderToStream (2) :: end of static stage after restart. ${cacheSignal['count']} caches pending`
2433+
)
2434+
}
2435+
)
24002436
)
24012437
}
2438+
24022439
// TODO(restart-on-cache-miss):
24032440
// This can probably be optimized to do less work,
24042441
// because we've already made sure that we have warm caches.
@@ -2415,25 +2452,8 @@ async function renderToStream(
24152452
)
24162453
} else {
24172454
const rscPayload = await getPayload()
2418-
2419-
const [resolveValidation, validationOutlet] = createValidationOutlet()
24202455
rscPayload._validation = validationOutlet
24212456

2422-
const debugChannel = setReactDebugChannel && createDebugChannel()
2423-
2424-
if (debugChannel) {
2425-
const [readableSsr, readableBrowser] =
2426-
debugChannel.clientSide.readable.tee()
2427-
2428-
reactDebugStream = readableSsr
2429-
2430-
setReactDebugChannel(
2431-
{ readable: readableBrowser },
2432-
htmlRequestId,
2433-
requestId
2434-
)
2435-
}
2436-
24372457
const reactServerStream = await workUnitAsyncStorage.run(
24382458
requestStore,
24392459
scheduleInSequentialTasks,
@@ -2822,12 +2842,21 @@ async function renderToStream(
28222842
}
28232843
}
28242844

2825-
function createDebugChannel():
2826-
| {
2827-
serverSide: { readable?: ReadableStream; writable: WritableStream }
2828-
clientSide: { readable: ReadableStream; writable?: WritableStream }
2829-
}
2830-
| undefined {
2845+
type DebugChannelPair = {
2846+
serverSide: DebugChannelServer
2847+
clientSide: DebugChannelClient
2848+
}
2849+
2850+
type DebugChannelServer = {
2851+
readable?: ReadableStream<Uint8Array>
2852+
writable: WritableStream<Uint8Array>
2853+
}
2854+
type DebugChannelClient = {
2855+
readable: ReadableStream<Uint8Array>
2856+
writable?: WritableStream<Uint8Array>
2857+
}
2858+
2859+
function createDebugChannel(): DebugChannelPair | undefined {
28312860
if (process.env.NODE_ENV === 'production') {
28322861
return undefined
28332862
}
@@ -2860,6 +2889,41 @@ function createDebugChannel():
28602889
}
28612890
}
28622891

2892+
function createBufferedServerDebugChannel() {
2893+
// We buffer all chunks until we're connected to a real debug channel using `connect()`.
2894+
const chunks: Uint8Array[] = []
2895+
let onWrite = function bufferChunk(chunk: Uint8Array) {
2896+
chunks.push(chunk)
2897+
}
2898+
let onClose: (() => Promise<void>) | undefined = undefined
2899+
2900+
const writable = new WritableStream<Uint8Array>({
2901+
write(chunk) {
2902+
onWrite(chunk)
2903+
},
2904+
close() {
2905+
return onClose?.()
2906+
},
2907+
})
2908+
2909+
return {
2910+
channel: { writable } as DebugChannelServer,
2911+
/** Attach this stream to a real debug channel. */
2912+
async pipeToChannel(debugChannel: DebugChannelServer) {
2913+
// Once we're comitted to using this stream, write out the chunks we already have.
2914+
const writer = debugChannel.writable.getWriter()
2915+
for (const chunk of chunks) {
2916+
await writer.write(chunk)
2917+
}
2918+
chunks.length = 0
2919+
2920+
// After this point, we stop buffering, and future chunks will be written directly to the destination.
2921+
onWrite = writer.write.bind(writer)
2922+
onClose = writer.close.bind(writer)
2923+
},
2924+
}
2925+
}
2926+
28632927
function createValidationOutlet() {
28642928
let resolveValidation: (value: React.ReactNode) => void
28652929
let outlet = new Promise<React.ReactNode>((resolve) => {

0 commit comments

Comments
 (0)