Skip to content

Commit f7abc4a

Browse files
committed
[Flight] When halting omit any reference rather than refer to a shared missing chunk
When aborting a prerender we should leave references unfulfilled, not share a common unfullfilled reference. functionally today this doesn't matter because we don't have resuming but the semantic is that the row was not available when the abort happened and in a resume the row should fill in. But by pointing each task to a common unfulfilled chunk we lose the ability for these references to resolves to distinct values on resume. There are edge cases though like if we're aborting while rendering and we have a model that needs to refer to some reference that we don't know the identity of
1 parent a960b92 commit f7abc4a

File tree

5 files changed

+162
-58
lines changed

5 files changed

+162
-58
lines changed

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2797,7 +2797,16 @@ describe('ReactFlightDOM', () => {
27972797
abortFizz('bam');
27982798
});
27992799

2800-
expect(errors).toEqual(['bam']);
2800+
if (__DEV__) {
2801+
expect(errors).toEqual([new Error('Connection closed.')]);
2802+
} else {
2803+
// This is likely a bug. In Dev we get a connection closed error
2804+
// because the debug info creates a chunk that has a pending status
2805+
// and when the stream finishes we error if any chunks are still pending.
2806+
// In production there is no debug info so the missing chunk is never instantiated
2807+
// because nothing triggers model evaluation before the stream completes
2808+
expect(errors).toEqual(['bam']);
2809+
}
28012810

28022811
const container = document.createElement('div');
28032812
await readInto(container, fizzReadable);
@@ -2919,10 +2928,11 @@ describe('ReactFlightDOM', () => {
29192928
});
29202929

29212930
const {prelude} = await pendingResult;
2931+
29222932
expect(errors).toEqual(['boom']);
2923-
const response = ReactServerDOMClient.createFromReadableStream(
2924-
Readable.toWeb(prelude),
2925-
);
2933+
2934+
const preludeWeb = Readable.toWeb(prelude);
2935+
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
29262936

29272937
const {writable: fizzWritable, readable: fizzReadable} = getTestStream();
29282938

@@ -2949,7 +2959,17 @@ describe('ReactFlightDOM', () => {
29492959
});
29502960

29512961
// one error per boundary
2952-
expect(errors).toEqual(['boom', 'boom', 'boom']);
2962+
if (__DEV__) {
2963+
const err = new Error('Connection closed.');
2964+
expect(errors).toEqual([err, err, err]);
2965+
} else {
2966+
// This is likely a bug. In Dev we get a connection closed error
2967+
// because the debug info creates a chunk that has a pending status
2968+
// and when the stream finishes we error if any chunks are still pending.
2969+
// In production there is no debug info so the missing chunk is never instantiated
2970+
// because nothing triggers model evaluation before the stream completes
2971+
expect(errors).toEqual(['boom', 'boom', 'boom']);
2972+
}
29532973

29542974
const container = document.createElement('div');
29552975
await readInto(container, fizzReadable);

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2454,12 +2454,28 @@ describe('ReactFlightDOMBrowser', () => {
24542454
passThrough(prelude),
24552455
);
24562456
const container = document.createElement('div');
2457-
const root = ReactDOMClient.createRoot(container);
2457+
errors.length = 0;
2458+
const root = ReactDOMClient.createRoot(container, {
2459+
onUncaughtError(err) {
2460+
errors.push(err);
2461+
},
2462+
});
24582463

24592464
await act(() => {
24602465
root.render(<ClientRoot response={response} />);
24612466
});
24622467

2463-
expect(container.innerHTML).toBe('<div>loading...</div>');
2468+
if (__DEV__) {
2469+
expect(errors).toEqual([new Error('Connection closed.')]);
2470+
expect(container.innerHTML).toBe('');
2471+
} else {
2472+
// This is likely a bug. In Dev we get a connection closed error
2473+
// because the debug info creates a chunk that has a pending status
2474+
// and when the stream finishes we error if any chunks are still pending.
2475+
// In production there is no debug info so the missing chunk is never instantiated
2476+
// because nothing triggers model evaluation before the stream completes
2477+
expect(errors).toEqual([]);
2478+
expect(container.innerHTML).toBe('<div>loading...</div>');
2479+
}
24642480
});
24652481
});

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1172,7 +1172,16 @@ describe('ReactFlightDOMEdge', () => {
11721172
),
11731173
);
11741174
fizzController.abort('bam');
1175-
expect(errors).toEqual(['bam']);
1175+
if (__DEV__) {
1176+
expect(errors).toEqual([new Error('Connection closed.')]);
1177+
} else {
1178+
// This is likely a bug. In Dev we get a connection closed error
1179+
// because the debug info creates a chunk that has a pending status
1180+
// and when the stream finishes we error if any chunks are still pending.
1181+
// In production there is no debug info so the missing chunk is never instantiated
1182+
// because nothing triggers model evaluation before the stream completes
1183+
expect(errors).toEqual(['bam']);
1184+
}
11761185
// Should still match the result when parsed
11771186
const result = await readResult(ssrStream);
11781187
const div = document.createElement('div');

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -509,7 +509,16 @@ describe('ReactFlightDOMNode', () => {
509509
),
510510
);
511511
ssrStream.abort('bam');
512-
expect(errors).toEqual(['bam']);
512+
if (__DEV__) {
513+
expect(errors).toEqual([new Error('Connection closed.')]);
514+
} else {
515+
// This is likely a bug. In Dev we get a connection closed error
516+
// because the debug info creates a chunk that has a pending status
517+
// and when the stream finishes we error if any chunks are still pending.
518+
// In production there is no debug info so the missing chunk is never instantiated
519+
// because nothing triggers model evaluation before the stream completes
520+
expect(errors).toEqual(['bam']);
521+
}
513522
// Should still match the result when parsed
514523
const result = await readResult(ssrStream);
515524
const div = document.createElement('div');

packages/react-server/src/ReactFlightServer.js

Lines changed: 99 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -649,9 +649,13 @@ function serializeThenable(
649649
// We can no longer accept any resolved values
650650
request.abortableTasks.delete(newTask);
651651
newTask.status = ABORTED;
652-
const errorId: number = (request.fatalError: any);
653-
const model = stringify(serializeByValueID(errorId));
654-
emitModelChunk(request, newTask.id, model);
652+
if (enableHalt && request.type === PRERENDER) {
653+
request.pendingChunks--;
654+
} else {
655+
const errorId: number = (request.fatalError: any);
656+
const model = stringify(serializeByValueID(errorId));
657+
emitModelChunk(request, newTask.id, model);
658+
}
655659
return newTask.id;
656660
}
657661
if (typeof thenable.status === 'string') {
@@ -1633,6 +1637,33 @@ function outlineTask(request: Request, task: Task): ReactJSONValue {
16331637
return serializeLazyID(newTask.id);
16341638
}
16351639

1640+
function outlineHaltedTask(
1641+
request: Request,
1642+
task: Task,
1643+
allowLazy: boolean,
1644+
): ReactJSONValue {
1645+
const newTask = createTask(
1646+
request,
1647+
task.model, // the currently rendering element
1648+
task.keyPath, // unlike outlineModel this one carries along context
1649+
task.implicitSlot,
1650+
request.abortableTasks,
1651+
__DEV__ ? task.debugOwner : null,
1652+
__DEV__ && enableOwnerStacks ? task.debugStack : null,
1653+
__DEV__ && enableOwnerStacks ? task.debugTask : null,
1654+
);
1655+
1656+
// We don't actually try the new task because it is being halted.
1657+
haltTask(newTask, request);
1658+
if (allowLazy) {
1659+
// We're halting in a position that can handle a lazy reference
1660+
return serializeLazyID(newTask.id);
1661+
} else {
1662+
// We're halting in a position that needs a value reference
1663+
return serializeByValueID(newTask.id);
1664+
}
1665+
}
1666+
16361667
function renderElement(
16371668
request: Request,
16381669
task: Task,
@@ -2278,6 +2309,20 @@ function renderModel(
22782309
((model: any).$$typeof === REACT_ELEMENT_TYPE ||
22792310
(model: any).$$typeof === REACT_LAZY_TYPE);
22802311

2312+
if (request.status === ABORTING) {
2313+
task.status = ABORTED;
2314+
if (enableHalt && request.type === PRERENDER) {
2315+
// This will create a new task and refer to it in this slot
2316+
// the new task won't be retried because we are aborting
2317+
return outlineHaltedTask(request, task, wasReactNode);
2318+
}
2319+
const errorId = (request.fatalError: any);
2320+
if (wasReactNode) {
2321+
return serializeLazyID(errorId);
2322+
}
2323+
return serializeByValueID(errorId);
2324+
}
2325+
22812326
const x =
22822327
thrownValue === SuspenseException
22832328
? // This is a special type of exception used for Suspense. For historical
@@ -2291,14 +2336,6 @@ function renderModel(
22912336
if (typeof x === 'object' && x !== null) {
22922337
// $FlowFixMe[method-unbinding]
22932338
if (typeof x.then === 'function') {
2294-
if (request.status === ABORTING) {
2295-
task.status = ABORTED;
2296-
const errorId: number = (request.fatalError: any);
2297-
if (wasReactNode) {
2298-
return serializeLazyID(errorId);
2299-
}
2300-
return serializeByValueID(errorId);
2301-
}
23022339
// Something suspended, we'll need to create a new task and resolve it later.
23032340
const newTask = createTask(
23042341
request,
@@ -2344,15 +2381,6 @@ function renderModel(
23442381
}
23452382
}
23462383

2347-
if (request.status === ABORTING) {
2348-
task.status = ABORTED;
2349-
const errorId: number = (request.fatalError: any);
2350-
if (wasReactNode) {
2351-
return serializeLazyID(errorId);
2352-
}
2353-
return serializeByValueID(errorId);
2354-
}
2355-
23562384
// Restore the context. We assume that this will be restored by the inner
23572385
// functions in case nothing throws so we don't use "finally" here.
23582386
task.keyPath = prevKeyPath;
@@ -3820,6 +3848,22 @@ function retryTask(request: Request, task: Task): void {
38203848
request.abortableTasks.delete(task);
38213849
task.status = COMPLETED;
38223850
} catch (thrownValue) {
3851+
if (request.status === ABORTING) {
3852+
request.abortableTasks.delete(task);
3853+
task.status = ABORTED;
3854+
if (enableHalt && request.type === PRERENDER) {
3855+
// When aborting a prerener with halt semantics we don't emit
3856+
// anything into the slot for a task that aborts, it remains unresolved
3857+
request.pendingChunks--;
3858+
} else {
3859+
// Otherwise we emit an error chunk into the task slot.
3860+
const errorId: number = (request.fatalError: any);
3861+
const model = stringify(serializeByValueID(errorId));
3862+
emitModelChunk(request, task.id, model);
3863+
}
3864+
return;
3865+
}
3866+
38233867
const x =
38243868
thrownValue === SuspenseException
38253869
? // This is a special type of exception used for Suspense. For historical
@@ -3832,14 +3876,6 @@ function retryTask(request: Request, task: Task): void {
38323876
if (typeof x === 'object' && x !== null) {
38333877
// $FlowFixMe[method-unbinding]
38343878
if (typeof x.then === 'function') {
3835-
if (request.status === ABORTING) {
3836-
request.abortableTasks.delete(task);
3837-
task.status = ABORTED;
3838-
const errorId: number = (request.fatalError: any);
3839-
const model = stringify(serializeByValueID(errorId));
3840-
emitModelChunk(request, task.id, model);
3841-
return;
3842-
}
38433879
// Something suspended again, let's pick it back up later.
38443880
task.status = PENDING;
38453881
task.thenableState = getThenableStateAfterSuspending();
@@ -3856,15 +3892,6 @@ function retryTask(request: Request, task: Task): void {
38563892
}
38573893
}
38583894

3859-
if (request.status === ABORTING) {
3860-
request.abortableTasks.delete(task);
3861-
task.status = ABORTED;
3862-
const errorId: number = (request.fatalError: any);
3863-
const model = stringify(serializeByValueID(errorId));
3864-
emitModelChunk(request, task.id, model);
3865-
return;
3866-
}
3867-
38683895
request.abortableTasks.delete(task);
38693896
task.status = ERRORED;
38703897
const digest = logRecoverableError(request, x, task);
@@ -3942,6 +3969,17 @@ function abortTask(task: Task, request: Request, errorId: number): void {
39423969
request.completedErrorChunks.push(processedChunk);
39433970
}
39443971

3972+
function haltTask(task: Task, request: Request): void {
3973+
if (task.status === RENDERING) {
3974+
// this task will be halted by the render
3975+
return;
3976+
}
3977+
task.status = ABORTED;
3978+
// We don't actually emit anything for this task id because we are intentionally
3979+
// leaving the reference unfulfilled.
3980+
request.pendingChunks--;
3981+
}
3982+
39453983
function flushCompletedChunks(
39463984
request: Request,
39473985
destination: Destination,
@@ -4087,12 +4125,6 @@ export function abort(request: Request, reason: mixed): void {
40874125
}
40884126
const abortableTasks = request.abortableTasks;
40894127
if (abortableTasks.size > 0) {
4090-
// We have tasks to abort. We'll emit one error row and then emit a reference
4091-
// to that row from every row that's still remaining if we are rendering. If we
4092-
// are prerendering (and halt semantics are enabled) we will refer to an error row
4093-
// but not actually emit it so the reciever can at that point rather than error.
4094-
const errorId = request.nextChunkId++;
4095-
request.fatalError = errorId;
40964128
if (
40974129
enablePostpone &&
40984130
typeof reason === 'object' &&
@@ -4101,10 +4133,20 @@ export function abort(request: Request, reason: mixed): void {
41014133
) {
41024134
const postponeInstance: Postpone = (reason: any);
41034135
logPostpone(request, postponeInstance.message, null);
4104-
if (!enableHalt || request.type === PRERENDER) {
4105-
// When prerendering with halt semantics we omit the referred to postpone.
4136+
if (enableHalt && request.type === PRERENDER) {
4137+
// When prerendering with halt semantics we simply halt the task
4138+
// and leave the reference unfulfilled.
4139+
abortableTasks.forEach(task => haltTask(task, request));
4140+
abortableTasks.clear();
4141+
} else {
4142+
// When rendering we produce a shared postpone chunk and then
4143+
// fulfill each task with a reference to that chunk.
4144+
const errorId = request.nextChunkId++;
4145+
request.fatalError = errorId;
41064146
request.pendingChunks++;
41074147
emitPostponeChunk(request, errorId, postponeInstance);
4148+
abortableTasks.forEach(task => abortTask(task, request, errorId));
4149+
abortableTasks.clear();
41084150
}
41094151
} else {
41104152
const error =
@@ -4120,14 +4162,22 @@ export function abort(request: Request, reason: mixed): void {
41204162
)
41214163
: reason;
41224164
const digest = logRecoverableError(request, error, null);
4123-
if (!enableHalt || request.type === RENDER) {
4124-
// When prerendering with halt semantics we omit the referred to error.
4165+
if (enableHalt && request.type === PRERENDER) {
4166+
// When prerendering with halt semantics we simply halt the task
4167+
// and leave the reference unfulfilled.
4168+
abortableTasks.forEach(task => haltTask(task, request));
4169+
abortableTasks.clear();
4170+
} else {
4171+
// When rendering we produce a shared error chunk and then
4172+
// fulfill each task with a reference to that chunk.
4173+
const errorId = request.nextChunkId++;
4174+
request.fatalError = errorId;
41254175
request.pendingChunks++;
41264176
emitErrorChunk(request, errorId, digest, error);
4177+
abortableTasks.forEach(task => abortTask(task, request, errorId));
4178+
abortableTasks.clear();
41274179
}
41284180
}
4129-
abortableTasks.forEach(task => abortTask(task, request, errorId));
4130-
abortableTasks.clear();
41314181
const onAllReady = request.onAllReady;
41324182
onAllReady();
41334183
}

0 commit comments

Comments
 (0)