Skip to content

Commit ce8222e

Browse files
committed
wip
1 parent d279c31 commit ce8222e

File tree

9 files changed

+476
-62
lines changed

9 files changed

+476
-62
lines changed

packages/next/errors.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -867,5 +867,7 @@
867867
"866": "Both \"%s\" and \"%s\" files are detected. Please use \"%s\" instead.",
868868
"867": "The %s \"%s\" must export a %s or a \\`default\\` function",
869869
"868": "No reference found for param: %s in reference: %s",
870-
"869": "No reference found for segment: %s with reference: %s"
870+
"869": "No reference found for segment: %s with reference: %s",
871+
"870": "`pipelineInSequentialTasks` should not be called in edge runtime.",
872+
"871": "dynamicInDevStagedRendering should only be used in development mode and when Cache Components is enabled."
871873
}

packages/next/src/server/app-render/app-render-render-utils.ts

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,3 +29,40 @@ export function scheduleInSequentialTasks<R>(
2929
})
3030
}
3131
}
32+
33+
/**
34+
* This is a utility function to make scheduling sequential tasks that run back to back easier.
35+
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
36+
* The function that runs in the second task gets access to the first tasks's result.
37+
*/
38+
export function pipelineInSequentialTasks<A, B>(
39+
render: () => A,
40+
followup: (a: A) => B | Promise<B>
41+
): Promise<B> {
42+
if (process.env.NEXT_RUNTIME === 'edge') {
43+
throw new InvariantError(
44+
'`pipelineInSequentialTasks` should not be called in edge runtime.'
45+
)
46+
} else {
47+
return new Promise((resolve, reject) => {
48+
let renderResult: A | undefined = undefined
49+
setTimeout(() => {
50+
try {
51+
renderResult = render()
52+
} catch (err) {
53+
clearTimeout(followupId)
54+
reject(err)
55+
}
56+
}, 0)
57+
const followupId = setTimeout(() => {
58+
// if `render` threw, then the `followup` timeout would've been cleared,
59+
// so if we got here, we're guaranteed to have a `renderResult`.
60+
try {
61+
resolve(followup(renderResult!))
62+
} catch (err) {
63+
reject(err)
64+
}
65+
}, 0)
66+
})
67+
}
68+
}

packages/next/src/server/app-render/app-render.tsx

Lines changed: 191 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,10 @@ import {
168168
prerenderAndAbortInSequentialTasks,
169169
} from './app-render-prerender-utils'
170170
import { printDebugThrownValueForProspectiveRender } from './prospective-render-utils'
171-
import { scheduleInSequentialTasks } from './app-render-render-utils'
171+
import {
172+
pipelineInSequentialTasks,
173+
scheduleInSequentialTasks,
174+
} from './app-render-render-utils'
172175
import { waitAtLeastOneReactRenderTask } from '../../lib/scheduler'
173176
import {
174177
workUnitAsyncStorage,
@@ -197,6 +200,7 @@ import {
197200
trackPendingChunkLoad,
198201
trackPendingImport,
199202
trackPendingModules,
203+
trackPendingModulesInRender,
200204
} from './module-loading/track-module-loading.external'
201205
import { isReactLargeShellError } from './react-large-shell-error'
202206
import type { GlobalErrorComponent } from '../../client/components/builtin/global-error'
@@ -2143,57 +2147,198 @@ async function renderToStream(
21432147
// We only have a Prerender environment for projects opted into cacheComponents
21442148
experimental.cacheComponents
21452149
) {
2146-
// This is a dynamic render. We don't do dynamic tracking because we're not prerendering
2147-
const RSCPayload: InitialRSCPayload & {
2150+
type RSCPayloadWithValidation = InitialRSCPayload & {
21482151
/** Only available during cacheComponents development builds. Used for logging errors. */
21492152
_validation?: Promise<React.ReactNode>
2150-
} = await workUnitAsyncStorage.run(
2151-
requestStore,
2152-
getRSCPayload,
2153-
tree,
2154-
ctx,
2155-
res.statusCode === 404
2156-
)
2153+
}
2154+
21572155
const [resolveValidation, validationOutlet] = createValidationOutlet()
2158-
RSCPayload._validation = validationOutlet
21592156

2160-
const debugChannel = setReactDebugChannel && createDebugChannel()
2157+
const getPayload = async (): Promise<RSCPayloadWithValidation> => {
2158+
const payload: RSCPayloadWithValidation =
2159+
await workUnitAsyncStorage.run(
2160+
requestStore,
2161+
getRSCPayload,
2162+
tree,
2163+
ctx,
2164+
res.statusCode === 404
2165+
)
2166+
// Placing the validation outlet in the payload is safe
2167+
// even if we end up discarding a render and restarting,
2168+
// because we're not going to wait for the stream to complete,
2169+
// so leaving the validation unresolved is fine.
2170+
payload._validation = validationOutlet
2171+
return payload
2172+
}
21612173

2162-
if (debugChannel) {
2174+
const setDebugChannelForClientRender = (
2175+
debugChannel: DebugChannelPair
2176+
) => {
21632177
const [readableSsr, readableBrowser] =
21642178
debugChannel.clientSide.readable.tee()
21652179

21662180
reactDebugStream = readableSsr
21672181

2168-
setReactDebugChannel(
2182+
setReactDebugChannel!(
21692183
{ readable: readableBrowser },
21702184
htmlRequestId,
21712185
requestId
21722186
)
21732187
}
21742188

2175-
const reactServerStream = await workUnitAsyncStorage.run(
2189+
const environmentName = () =>
2190+
requestStore.prerenderPhase === true ? 'Prerender' : 'Server'
2191+
2192+
// Try to render the page and see if there's any cache misses.
2193+
// If there are, wait for caches to finish and restart the render.
2194+
2195+
// This render might end up being used as a prospective render (if there's cache misses),
2196+
// so we need to set it up for filling caches.
2197+
const cacheSignal = new CacheSignal()
2198+
2199+
// If we encounter async modules that delay rendering, we'll also need to restart.
2200+
// TODO(restart-on-cache-miss): technically, we only need to wait for pending *server* modules here,
2201+
// but `trackPendingModules` doesn't distinguish between client and server.
2202+
trackPendingModulesInRender(cacheSignal)
2203+
2204+
const prerenderResumeDataCache = createPrerenderResumeDataCache()
2205+
2206+
requestStore.prerenderResumeDataCache = prerenderResumeDataCache
2207+
// `getRenderResumeDataCache` will fall back to using `prerenderResumeDataCache` as `renderResumeDataCache`,
2208+
// so not having a resume data cache won't break any expectations in case we don't need to restart.
2209+
requestStore.renderResumeDataCache = null
2210+
requestStore.cacheSignal = cacheSignal
2211+
2212+
const initialRenderReactController = new AbortController()
2213+
2214+
const intialRenderDebugChannel =
2215+
setReactDebugChannel && createDebugChannel()
2216+
2217+
const initialRscPayload = await getPayload()
2218+
const maybeInitialServerStream = await workUnitAsyncStorage.run(
21762219
requestStore,
2177-
scheduleInSequentialTasks,
2178-
() => {
2179-
requestStore.prerenderPhase = true
2180-
return ComponentMod.renderToReadableStream(
2181-
RSCPayload,
2182-
clientReferenceManifest.clientModules,
2183-
{
2184-
onError: serverComponentsErrorHandler,
2185-
environmentName: () =>
2186-
requestStore.prerenderPhase === true ? 'Prerender' : 'Server',
2187-
filterStackFrame,
2188-
debugChannel: debugChannel?.serverSide,
2220+
() =>
2221+
pipelineInSequentialTasks(
2222+
() => {
2223+
// Static stage
2224+
requestStore.prerenderPhase = true
2225+
return ComponentMod.renderToReadableStream(
2226+
initialRscPayload,
2227+
clientReferenceManifest.clientModules,
2228+
{
2229+
onError: serverComponentsErrorHandler,
2230+
environmentName,
2231+
filterStackFrame,
2232+
debugChannel: intialRenderDebugChannel?.serverSide,
2233+
signal: initialRenderReactController.signal,
2234+
}
2235+
)
2236+
},
2237+
async (stream) => {
2238+
// Dynamic stage
2239+
// Note: if we had cache misses, things that would've happened statically otherwise
2240+
// may be marked as dynamic instead.
2241+
requestStore.prerenderPhase = false
2242+
2243+
// If all cache reads initiated in the static stage have completed,
2244+
// then all of the necessary caches have to be warm (or there's no caches on the page).
2245+
// On the other hand, if we still have pending cache reads, then we had a cache miss,
2246+
// and the static stage didn't render all the content that it normally would have.
2247+
const hadCacheMiss = cacheSignal.hasPendingReads()
2248+
if (!hadCacheMiss) {
2249+
// No cache misses. We can use the stream as is.
2250+
return stream
2251+
} else {
2252+
// Cache miss. We'll discard this stream, and render again.
2253+
return null
2254+
}
21892255
}
21902256
)
2191-
},
2192-
() => {
2193-
requestStore.prerenderPhase = false
2194-
}
21952257
)
21962258

2259+
if (maybeInitialServerStream !== null) {
2260+
// No cache misses. We can use the stream as is.
2261+
2262+
// We're using this render, so we should pass its debug channel to the client render.
2263+
if (intialRenderDebugChannel) {
2264+
setDebugChannelForClientRender(intialRenderDebugChannel)
2265+
}
2266+
2267+
reactServerResult = new ReactServerResult(maybeInitialServerStream)
2268+
} else {
2269+
// Cache miss. We will use the initial render to fill caches, and discard its result.
2270+
// Then, we can render again with warm caches.
2271+
2272+
// TODO(restart-on-cache-miss):
2273+
// This might end up waiting for more caches than strictly necessary,
2274+
// because we can't abort the render yet, and we'll let runtime/dynamic APIs resolve.
2275+
// Ideally we'd only wait for caches that are needed in the static stage.
2276+
// This will be optimized in the future by not allowing runtime/dynamic APIs to resolve.
2277+
2278+
// During a render, React pings pending tasks using `setImmediate`,
2279+
// and only waiting for a single `cacheReady` can make us stop filling caches too soon.
2280+
// To avoid this, we await `cacheReady` repeatedly with an extra delay to let React try render new content
2281+
// (and potentially discover more caches).
2282+
await cacheSignal.cacheReadyInRender()
2283+
initialRenderReactController.abort()
2284+
2285+
//===============================================
2286+
2287+
// The initial render acted as a prospective render to warm the caches.
2288+
// Now, we need to do another render.
2289+
2290+
// TODO(restart-on-cache-miss): we should use a separate request store for this instead
2291+
2292+
// We've filled the caches, so now we can render as usual.
2293+
requestStore.prerenderResumeDataCache = null
2294+
requestStore.renderResumeDataCache = createRenderResumeDataCache(
2295+
prerenderResumeDataCache
2296+
)
2297+
requestStore.cacheSignal = null
2298+
2299+
// Reset mutable fields.
2300+
requestStore.prerenderPhase = undefined
2301+
requestStore.usedDynamic = undefined
2302+
2303+
// The initial render already wrote to its debug channel. We're not using it,
2304+
// so we need to create a new one.
2305+
const finalRenderDebugChannel =
2306+
setReactDebugChannel && createDebugChannel()
2307+
// We know that we won't discard this render, so we can set the debug channel up immediately.
2308+
if (finalRenderDebugChannel) {
2309+
setDebugChannelForClientRender(finalRenderDebugChannel)
2310+
}
2311+
2312+
const finalRscPayload = await getPayload()
2313+
const finalServerStream = await workUnitAsyncStorage.run(
2314+
requestStore,
2315+
scheduleInSequentialTasks,
2316+
() => {
2317+
// Static stage
2318+
requestStore.prerenderPhase = true
2319+
return ComponentMod.renderToReadableStream(
2320+
finalRscPayload,
2321+
clientReferenceManifest.clientModules,
2322+
{
2323+
onError: serverComponentsErrorHandler,
2324+
environmentName,
2325+
filterStackFrame,
2326+
debugChannel: finalRenderDebugChannel?.serverSide,
2327+
}
2328+
)
2329+
},
2330+
() => {
2331+
// Dynamic stage
2332+
requestStore.prerenderPhase = false
2333+
}
2334+
)
2335+
2336+
reactServerResult = new ReactServerResult(finalServerStream)
2337+
}
2338+
2339+
// TODO(restart-on-cache-miss):
2340+
// This can probably be optimized to do less work,
2341+
// because we've already made sure that we have warm caches.
21972342
consoleAsyncStorage.run(
21982343
{ dim: true },
21992344
spawnDynamicValidationInDev,
@@ -2205,8 +2350,6 @@ async function renderToStream(
22052350
requestStore,
22062351
devValidatingFallbackParams
22072352
)
2208-
2209-
reactServerResult = new ReactServerResult(reactServerStream)
22102353
} else {
22112354
// This is a dynamic render. We don't do dynamic tracking because we're not prerendering
22122355
const RSCPayload = await workUnitAsyncStorage.run(
@@ -2558,12 +2701,21 @@ async function renderToStream(
25582701
}
25592702
}
25602703

2561-
function createDebugChannel():
2562-
| {
2563-
serverSide: { readable?: ReadableStream; writable: WritableStream }
2564-
clientSide: { readable: ReadableStream; writable?: WritableStream }
2565-
}
2566-
| undefined {
2704+
type DebugChannelPair = {
2705+
serverSide: DebugChannelServer
2706+
clientSide: DebugChannelClient
2707+
}
2708+
2709+
type DebugChannelServer = {
2710+
readable?: ReadableStream<Uint8Array>
2711+
writable: WritableStream<Uint8Array>
2712+
}
2713+
type DebugChannelClient = {
2714+
readable: ReadableStream<Uint8Array>
2715+
writable?: WritableStream<Uint8Array>
2716+
}
2717+
2718+
function createDebugChannel(): DebugChannelPair | undefined {
25672719
if (process.env.NODE_ENV === 'production') {
25682720
return undefined
25692721
}

packages/next/src/server/app-render/cache-signal.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
* and should only be used in codepaths gated with this feature.
66
*/
77

8+
import { waitAtLeastOneReactRenderTask } from '../../lib/scheduler'
89
import { InvariantError } from '../../shared/lib/invariant-error'
910

1011
export class CacheSignal {
@@ -79,6 +80,23 @@ export class CacheSignal {
7980
})
8081
}
8182

83+
/**
84+
* Like `cacheReady`, but for use when rendering (not prerendering).
85+
* React schedules work differently between these two, which affects the timing
86+
* of waiting for all caches to be discovered.
87+
**/
88+
async cacheReadyInRender() {
89+
// During a render, React pings pending tasks (that are waiting for something async to resolve) using `setImmediate`.
90+
// This is unlike a prerender, where they are pinged in a microtask.
91+
// This means that, if we're warming caches via a render (not a prerender),
92+
// we need to give React more time to continue rendering after a cache has resolved
93+
// in order to make sure we've discovered all the caches needed for the current render.
94+
do {
95+
await this.cacheReady()
96+
await waitAtLeastOneReactRenderTask()
97+
} while (this.hasPendingReads())
98+
}
99+
82100
beginRead() {
83101
this.count++
84102

@@ -114,6 +132,10 @@ export class CacheSignal {
114132
}
115133
}
116134

135+
hasPendingReads(): boolean {
136+
return this.count > 0
137+
}
138+
117139
trackRead<T>(promise: Promise<T>) {
118140
this.beginRead()
119141
// `promise.finally()` still rejects, so don't use it here to avoid unhandled rejections

packages/next/src/server/app-render/module-loading/track-module-loading.external.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,12 @@ import {
66
trackPendingChunkLoad,
77
trackPendingImport,
88
trackPendingModules,
9+
trackPendingModulesInRender,
910
} from './track-module-loading.instance' with { 'turbopack-transition': 'next-shared' }
1011

11-
export { trackPendingChunkLoad, trackPendingImport, trackPendingModules }
12+
export {
13+
trackPendingChunkLoad,
14+
trackPendingImport,
15+
trackPendingModules,
16+
trackPendingModulesInRender,
17+
}

0 commit comments

Comments
 (0)