diff --git a/MAINTAINERS b/MAINTAINERS index 5ad514035dd53..e48736bc1ca41 100644 --- a/MAINTAINERS +++ b/MAINTAINERS @@ -1,5 +1,6 @@ acdlite eps1lon +EugeneChoi4 gaearon gnoff unstubbable diff --git a/packages/react-client/src/ReactFlightClient.js b/packages/react-client/src/ReactFlightClient.js index 74a412d6f4c0a..6e96f2f0378ec 100644 --- a/packages/react-client/src/ReactFlightClient.js +++ b/packages/react-client/src/ReactFlightClient.js @@ -1337,7 +1337,11 @@ function fulfillReference( const {response, handler, parentObject, key, map, path} = reference; for (let i = 1; i < path.length; i++) { - while (value.$$typeof === REACT_LAZY_TYPE) { + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { // We never expect to see a Lazy node on this path because we encode those as // separate models. This must mean that we have inserted an extra lazy node // e.g. to replace a blocked element. We must instead look for it inside. @@ -1408,6 +1412,39 @@ function fulfillReference( } value = value[path[i]]; } + + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { + // If what we're referencing is a Lazy it must be because we inserted one as a virtual node + // while it was blocked by other data. If it's no longer blocked, we can unwrap it. + const referencedChunk: SomeChunk = value._payload; + if (referencedChunk === handler.chunk) { + // This is a reference to the thing we're currently blocking. We can peak + // inside of it to get the value. + value = handler.value; + continue; + } else { + switch (referencedChunk.status) { + case RESOLVED_MODEL: + initializeModelChunk(referencedChunk); + break; + case RESOLVED_MODULE: + initializeModuleChunk(referencedChunk); + break; + } + switch (referencedChunk.status) { + case INITIALIZED: { + value = referencedChunk.value; + continue; + } + } + } + break; + } + const mappedValue = map(response, value, parentObject, key); parentObject[key] = mappedValue; @@ -1855,7 +1892,11 @@ function getOutlinedModel( case INITIALIZED: let value = chunk.value; for (let i = 1; i < path.length; i++) { - while (value.$$typeof === REACT_LAZY_TYPE) { + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { const referencedChunk: SomeChunk = value._payload; switch (referencedChunk.status) { case RESOLVED_MODEL: @@ -1924,6 +1965,32 @@ function getOutlinedModel( } value = value[path[i]]; } + + while ( + typeof value === 'object' && + value !== null && + value.$$typeof === REACT_LAZY_TYPE + ) { + // If what we're referencing is a Lazy it must be because we inserted one as a virtual node + // while it was blocked by other data. If it's no longer blocked, we can unwrap it. + const referencedChunk: SomeChunk = value._payload; + switch (referencedChunk.status) { + case RESOLVED_MODEL: + initializeModelChunk(referencedChunk); + break; + case RESOLVED_MODULE: + initializeModuleChunk(referencedChunk); + break; + } + switch (referencedChunk.status) { + case INITIALIZED: { + value = referencedChunk.value; + continue; + } + } + break; + } + const chunkValue = map(response, value, parentObject, key); if ( parentObject[0] === REACT_ELEMENT_TYPE && diff --git a/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js b/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js index ba1c220fa4f32..f061fa9816588 100644 --- a/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js +++ b/packages/react-server-dom-turbopack/src/client/ReactFlightClientConfigBundlerTurbopack.js @@ -132,7 +132,19 @@ export function resolveServerReference( ); } } - // TODO: This needs to return async: true if it's an async module. + if (resolvedModuleData.async) { + // If the module is marked as async in a Client Reference, we don't actually care. + // What matters is whether the consumer wants to unwrap it or not. + // For Server References, it is different because the consumer is completely internal + // to the bundler. So instead of passing it to each reference we can mark it in the + // manifest. + return [ + resolvedModuleData.id, + resolvedModuleData.chunks, + name, + 1 /* async */, + ]; + } return [resolvedModuleData.id, resolvedModuleData.chunks, name]; } diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index 31bea759a0a89..66203af1fefdb 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -4702,6 +4702,70 @@ function renderDebugModel( element._store.validated, ]; } + case REACT_LAZY_TYPE: { + // To avoid actually initializing a lazy causing a side-effect, we make + // some assumptions about the structure of the payload even though + // that's not really part of the contract. In practice, this is really + // just coming from React.lazy helper or Flight. + const lazy: LazyComponent = (value: any); + const payload = lazy._payload; + + if (payload !== null && typeof payload === 'object') { + // React.lazy constructor + switch (payload._status) { + case -1 /* Uninitialized */: + case 0 /* Pending */: + break; + case 1 /* Resolved */: { + const id = outlineDebugModel(request, counter, payload._result); + return serializeLazyID(id); + } + case 2 /* Rejected */: { + // We don't log these errors since they didn't actually throw into + // Flight. + const digest = ''; + const id = request.nextChunkId++; + emitErrorChunk(request, id, digest, payload._result, true, null); + return serializeLazyID(id); + } + } + + // React Flight + switch (payload.status) { + case 'pending': + case 'blocked': + case 'resolved_model': + // The value is an uninitialized model from the Flight client. + // It's not very useful to emit that. + break; + case 'resolved_module': + // The value is client reference metadata from the Flight client. + // It's likely for SSR, so we choose not to emit it. + break; + case 'fulfilled': { + const id = outlineDebugModel(request, counter, payload.value); + return serializeLazyID(id); + } + case 'rejected': { + // We don't log these errors since they didn't actually throw into + // Flight. + const digest = ''; + const id = request.nextChunkId++; + emitErrorChunk(request, id, digest, payload.reason, true, null); + return serializeLazyID(id); + } + } + } + + // We couldn't emit a resolved or rejected value synchronously. For now, + // we emit this as a halted chunk. TODO: We could maybe also handle + // pending lazy debug models like we do in serializeDebugThenable, + // if/when we determine that it's worth the added complexity. + request.pendingDebugChunks++; + const id = request.nextChunkId++; + emitDebugHaltChunk(request, id); + return serializeLazyID(id); + } } // $FlowFixMe[method-unbinding] diff --git a/scripts/release/README.md b/scripts/release/README.md index 475714ed7d172..77042a5dac8b6 100644 --- a/scripts/release/README.md +++ b/scripts/release/README.md @@ -37,7 +37,7 @@ The high level process of creating releases is [documented below](#process). Ind If your code lands in the main branch, it will be automatically published to the prerelease channels within the next weekday. However, if you want to immediately publish a prerelease, you can trigger the job to run immediately via the GitHub UI: 1. Wait for the commit you want to release to finish its [(Runtime) Build and Test workflow](https://github.com/facebook/react/actions/workflows/runtime_build_and_test.yml), as the prerelease script needs to download the build from that workflow. -2. Copy the git sha of whichever commit you are trying to release +2. Copy the full git sha of whichever commit you are trying to release 3. Go to https://github.com/facebook/react/actions/workflows/runtime_prereleases_manual.yml 4. Paste the git sha into the "Run workflow" dropdown 5. Let the job finish and it will be released on npm