Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions MAINTAINERS
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
acdlite
eps1lon
EugeneChoi4
gaearon
gnoff
unstubbable
Expand Down
71 changes: 69 additions & 2 deletions packages/react-client/src/ReactFlightClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -1337,7 +1337,11 @@ function fulfillReference(
const {response, handler, parentObject, key, map, path} = reference;

for (let i = 1; i < path.length; i++) {
while (value.$$typeof === REACT_LAZY_TYPE) {
while (
typeof value === 'object' &&
value !== null &&
value.$$typeof === REACT_LAZY_TYPE
) {
// We never expect to see a Lazy node on this path because we encode those as
// separate models. This must mean that we have inserted an extra lazy node
// e.g. to replace a blocked element. We must instead look for it inside.
Expand Down Expand Up @@ -1408,6 +1412,39 @@ function fulfillReference(
}
value = value[path[i]];
}

while (
typeof value === 'object' &&
value !== null &&
value.$$typeof === REACT_LAZY_TYPE
) {
// If what we're referencing is a Lazy it must be because we inserted one as a virtual node
// while it was blocked by other data. If it's no longer blocked, we can unwrap it.
const referencedChunk: SomeChunk<any> = value._payload;
if (referencedChunk === handler.chunk) {
// This is a reference to the thing we're currently blocking. We can peak
// inside of it to get the value.
value = handler.value;
continue;
} else {
switch (referencedChunk.status) {
case RESOLVED_MODEL:
initializeModelChunk(referencedChunk);
break;
case RESOLVED_MODULE:
initializeModuleChunk(referencedChunk);
break;
}
switch (referencedChunk.status) {
case INITIALIZED: {
value = referencedChunk.value;
continue;
}
}
}
break;
}

const mappedValue = map(response, value, parentObject, key);
parentObject[key] = mappedValue;

Expand Down Expand Up @@ -1855,7 +1892,11 @@ function getOutlinedModel<T>(
case INITIALIZED:
let value = chunk.value;
for (let i = 1; i < path.length; i++) {
while (value.$$typeof === REACT_LAZY_TYPE) {
while (
typeof value === 'object' &&
value !== null &&
value.$$typeof === REACT_LAZY_TYPE
) {
const referencedChunk: SomeChunk<any> = value._payload;
switch (referencedChunk.status) {
case RESOLVED_MODEL:
Expand Down Expand Up @@ -1924,6 +1965,32 @@ function getOutlinedModel<T>(
}
value = value[path[i]];
}

while (
typeof value === 'object' &&
value !== null &&
value.$$typeof === REACT_LAZY_TYPE
) {
// If what we're referencing is a Lazy it must be because we inserted one as a virtual node
// while it was blocked by other data. If it's no longer blocked, we can unwrap it.
const referencedChunk: SomeChunk<any> = value._payload;
switch (referencedChunk.status) {
case RESOLVED_MODEL:
initializeModelChunk(referencedChunk);
break;
case RESOLVED_MODULE:
initializeModuleChunk(referencedChunk);
break;
}
switch (referencedChunk.status) {
case INITIALIZED: {
value = referencedChunk.value;
continue;
}
}
break;
}

const chunkValue = map(response, value, parentObject, key);
if (
parentObject[0] === REACT_ELEMENT_TYPE &&
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,19 @@ export function resolveServerReference<T>(
);
}
}
// TODO: This needs to return async: true if it's an async module.
if (resolvedModuleData.async) {
// If the module is marked as async in a Client Reference, we don't actually care.
// What matters is whether the consumer wants to unwrap it or not.
// For Server References, it is different because the consumer is completely internal
// to the bundler. So instead of passing it to each reference we can mark it in the
// manifest.
return [
resolvedModuleData.id,
resolvedModuleData.chunks,
name,
1 /* async */,
];
}
return [resolvedModuleData.id, resolvedModuleData.chunks, name];
}

Expand Down
64 changes: 64 additions & 0 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -4702,6 +4702,70 @@ function renderDebugModel(
element._store.validated,
];
}
case REACT_LAZY_TYPE: {
// To avoid actually initializing a lazy causing a side-effect, we make
// some assumptions about the structure of the payload even though
// that's not really part of the contract. In practice, this is really
// just coming from React.lazy helper or Flight.
const lazy: LazyComponent<any, any> = (value: any);
const payload = lazy._payload;

if (payload !== null && typeof payload === 'object') {
// React.lazy constructor
switch (payload._status) {
case -1 /* Uninitialized */:
case 0 /* Pending */:
break;
case 1 /* Resolved */: {
const id = outlineDebugModel(request, counter, payload._result);
return serializeLazyID(id);
}
case 2 /* Rejected */: {
// We don't log these errors since they didn't actually throw into
// Flight.
const digest = '';
const id = request.nextChunkId++;
emitErrorChunk(request, id, digest, payload._result, true, null);
return serializeLazyID(id);
}
}

// React Flight
switch (payload.status) {
case 'pending':
case 'blocked':
case 'resolved_model':
// The value is an uninitialized model from the Flight client.
// It's not very useful to emit that.
break;
case 'resolved_module':
// The value is client reference metadata from the Flight client.
// It's likely for SSR, so we choose not to emit it.
break;
case 'fulfilled': {
const id = outlineDebugModel(request, counter, payload.value);
return serializeLazyID(id);
}
case 'rejected': {
// We don't log these errors since they didn't actually throw into
// Flight.
const digest = '';
const id = request.nextChunkId++;
emitErrorChunk(request, id, digest, payload.reason, true, null);
return serializeLazyID(id);
}
}
}

// We couldn't emit a resolved or rejected value synchronously. For now,
// we emit this as a halted chunk. TODO: We could maybe also handle
// pending lazy debug models like we do in serializeDebugThenable,
// if/when we determine that it's worth the added complexity.
request.pendingDebugChunks++;
const id = request.nextChunkId++;
emitDebugHaltChunk(request, id);
return serializeLazyID(id);
}
}

// $FlowFixMe[method-unbinding]
Expand Down
2 changes: 1 addition & 1 deletion scripts/release/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ The high level process of creating releases is [documented below](#process). Ind
If your code lands in the main branch, it will be automatically published to the prerelease channels within the next weekday. However, if you want to immediately publish a prerelease, you can trigger the job to run immediately via the GitHub UI:

1. Wait for the commit you want to release to finish its [(Runtime) Build and Test workflow](https://github.com/facebook/react/actions/workflows/runtime_build_and_test.yml), as the prerelease script needs to download the build from that workflow.
2. Copy the git sha of whichever commit you are trying to release
2. Copy the full git sha of whichever commit you are trying to release
3. Go to https://github.com/facebook/react/actions/workflows/runtime_prereleases_manual.yml
4. Paste the git sha into the "Run workflow" dropdown
5. Let the job finish and it will be released on npm
Expand Down
Loading