Skip to content

Commit 92d26c8

Browse files
authored
[Flight] When halting omit any reference rather than refer to a shared missing chunk (#30750)
When aborting a prerender we should leave references unfulfilled, not share a common unfullfilled reference. functionally today this doesn't matter because we don't have resuming but the semantic is that the row was not available when the abort happened and in a resume the row should fill in. But by pointing each task to a common unfulfilled chunk we lose the ability for these references to resolves to distinct values on resume.
1 parent 2505bf9 commit 92d26c8

File tree

5 files changed

+153
-58
lines changed

5 files changed

+153
-58
lines changed

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2797,7 +2797,16 @@ describe('ReactFlightDOM', () => {
27972797
abortFizz('bam');
27982798
});
27992799

2800-
expect(errors).toEqual(['bam']);
2800+
if (__DEV__) {
2801+
expect(errors).toEqual([new Error('Connection closed.')]);
2802+
} else {
2803+
// This is likely a bug. In Dev we get a connection closed error
2804+
// because the debug info creates a chunk that has a pending status
2805+
// and when the stream finishes we error if any chunks are still pending.
2806+
// In production there is no debug info so the missing chunk is never instantiated
2807+
// because nothing triggers model evaluation before the stream completes
2808+
expect(errors).toEqual(['bam']);
2809+
}
28012810

28022811
const container = document.createElement('div');
28032812
await readInto(container, fizzReadable);
@@ -2919,10 +2928,11 @@ describe('ReactFlightDOM', () => {
29192928
});
29202929

29212930
const {prelude} = await pendingResult;
2931+
29222932
expect(errors).toEqual(['boom']);
2923-
const response = ReactServerDOMClient.createFromReadableStream(
2924-
Readable.toWeb(prelude),
2925-
);
2933+
2934+
const preludeWeb = Readable.toWeb(prelude);
2935+
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
29262936

29272937
const {writable: fizzWritable, readable: fizzReadable} = getTestStream();
29282938

@@ -2949,7 +2959,17 @@ describe('ReactFlightDOM', () => {
29492959
});
29502960

29512961
// one error per boundary
2952-
expect(errors).toEqual(['boom', 'boom', 'boom']);
2962+
if (__DEV__) {
2963+
const err = new Error('Connection closed.');
2964+
expect(errors).toEqual([err, err, err]);
2965+
} else {
2966+
// This is likely a bug. In Dev we get a connection closed error
2967+
// because the debug info creates a chunk that has a pending status
2968+
// and when the stream finishes we error if any chunks are still pending.
2969+
// In production there is no debug info so the missing chunk is never instantiated
2970+
// because nothing triggers model evaluation before the stream completes
2971+
expect(errors).toEqual(['boom', 'boom', 'boom']);
2972+
}
29532973

29542974
const container = document.createElement('div');
29552975
await readInto(container, fizzReadable);

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2454,12 +2454,28 @@ describe('ReactFlightDOMBrowser', () => {
24542454
passThrough(prelude),
24552455
);
24562456
const container = document.createElement('div');
2457-
const root = ReactDOMClient.createRoot(container);
2457+
errors.length = 0;
2458+
const root = ReactDOMClient.createRoot(container, {
2459+
onUncaughtError(err) {
2460+
errors.push(err);
2461+
},
2462+
});
24582463

24592464
await act(() => {
24602465
root.render(<ClientRoot response={response} />);
24612466
});
24622467

2463-
expect(container.innerHTML).toBe('<div>loading...</div>');
2468+
if (__DEV__) {
2469+
expect(errors).toEqual([new Error('Connection closed.')]);
2470+
expect(container.innerHTML).toBe('');
2471+
} else {
2472+
// This is likely a bug. In Dev we get a connection closed error
2473+
// because the debug info creates a chunk that has a pending status
2474+
// and when the stream finishes we error if any chunks are still pending.
2475+
// In production there is no debug info so the missing chunk is never instantiated
2476+
// because nothing triggers model evaluation before the stream completes
2477+
expect(errors).toEqual([]);
2478+
expect(container.innerHTML).toBe('<div>loading...</div>');
2479+
}
24642480
});
24652481
});

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1172,7 +1172,16 @@ describe('ReactFlightDOMEdge', () => {
11721172
),
11731173
);
11741174
fizzController.abort('bam');
1175-
expect(errors).toEqual(['bam']);
1175+
if (__DEV__) {
1176+
expect(errors).toEqual([new Error('Connection closed.')]);
1177+
} else {
1178+
// This is likely a bug. In Dev we get a connection closed error
1179+
// because the debug info creates a chunk that has a pending status
1180+
// and when the stream finishes we error if any chunks are still pending.
1181+
// In production there is no debug info so the missing chunk is never instantiated
1182+
// because nothing triggers model evaluation before the stream completes
1183+
expect(errors).toEqual(['bam']);
1184+
}
11761185
// Should still match the result when parsed
11771186
const result = await readResult(ssrStream);
11781187
const div = document.createElement('div');

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -509,7 +509,16 @@ describe('ReactFlightDOMNode', () => {
509509
),
510510
);
511511
ssrStream.abort('bam');
512-
expect(errors).toEqual(['bam']);
512+
if (__DEV__) {
513+
expect(errors).toEqual([new Error('Connection closed.')]);
514+
} else {
515+
// This is likely a bug. In Dev we get a connection closed error
516+
// because the debug info creates a chunk that has a pending status
517+
// and when the stream finishes we error if any chunks are still pending.
518+
// In production there is no debug info so the missing chunk is never instantiated
519+
// because nothing triggers model evaluation before the stream completes
520+
expect(errors).toEqual(['bam']);
521+
}
513522
// Should still match the result when parsed
514523
const result = await readResult(ssrStream);
515524
const div = document.createElement('div');

packages/react-server/src/ReactFlightServer.js

Lines changed: 90 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -649,9 +649,13 @@ function serializeThenable(
649649
// We can no longer accept any resolved values
650650
request.abortableTasks.delete(newTask);
651651
newTask.status = ABORTED;
652-
const errorId: number = (request.fatalError: any);
653-
const model = stringify(serializeByValueID(errorId));
654-
emitModelChunk(request, newTask.id, model);
652+
if (enableHalt && request.type === PRERENDER) {
653+
request.pendingChunks--;
654+
} else {
655+
const errorId: number = (request.fatalError: any);
656+
const model = stringify(serializeByValueID(errorId));
657+
emitModelChunk(request, newTask.id, model);
658+
}
655659
return newTask.id;
656660
}
657661
if (typeof thenable.status === 'string') {
@@ -1633,6 +1637,24 @@ function outlineTask(request: Request, task: Task): ReactJSONValue {
16331637
return serializeLazyID(newTask.id);
16341638
}
16351639

1640+
function outlineHaltedTask(
1641+
request: Request,
1642+
task: Task,
1643+
allowLazy: boolean,
1644+
): ReactJSONValue {
1645+
// In the future if we track task state for resuming we'll maybe need to
1646+
// construnct an actual task here but since we're never going to retry it
1647+
// we just claim the id and serialize it according to the proper convention
1648+
const taskId = request.nextChunkId++;
1649+
if (allowLazy) {
1650+
// We're halting in a position that can handle a lazy reference
1651+
return serializeLazyID(taskId);
1652+
} else {
1653+
// We're halting in a position that needs a value reference
1654+
return serializeByValueID(taskId);
1655+
}
1656+
}
1657+
16361658
function renderElement(
16371659
request: Request,
16381660
task: Task,
@@ -2278,6 +2300,20 @@ function renderModel(
22782300
((model: any).$$typeof === REACT_ELEMENT_TYPE ||
22792301
(model: any).$$typeof === REACT_LAZY_TYPE);
22802302

2303+
if (request.status === ABORTING) {
2304+
task.status = ABORTED;
2305+
if (enableHalt && request.type === PRERENDER) {
2306+
// This will create a new task and refer to it in this slot
2307+
// the new task won't be retried because we are aborting
2308+
return outlineHaltedTask(request, task, wasReactNode);
2309+
}
2310+
const errorId = (request.fatalError: any);
2311+
if (wasReactNode) {
2312+
return serializeLazyID(errorId);
2313+
}
2314+
return serializeByValueID(errorId);
2315+
}
2316+
22812317
const x =
22822318
thrownValue === SuspenseException
22832319
? // This is a special type of exception used for Suspense. For historical
@@ -2291,14 +2327,6 @@ function renderModel(
22912327
if (typeof x === 'object' && x !== null) {
22922328
// $FlowFixMe[method-unbinding]
22932329
if (typeof x.then === 'function') {
2294-
if (request.status === ABORTING) {
2295-
task.status = ABORTED;
2296-
const errorId: number = (request.fatalError: any);
2297-
if (wasReactNode) {
2298-
return serializeLazyID(errorId);
2299-
}
2300-
return serializeByValueID(errorId);
2301-
}
23022330
// Something suspended, we'll need to create a new task and resolve it later.
23032331
const newTask = createTask(
23042332
request,
@@ -2344,15 +2372,6 @@ function renderModel(
23442372
}
23452373
}
23462374

2347-
if (request.status === ABORTING) {
2348-
task.status = ABORTED;
2349-
const errorId: number = (request.fatalError: any);
2350-
if (wasReactNode) {
2351-
return serializeLazyID(errorId);
2352-
}
2353-
return serializeByValueID(errorId);
2354-
}
2355-
23562375
// Restore the context. We assume that this will be restored by the inner
23572376
// functions in case nothing throws so we don't use "finally" here.
23582377
task.keyPath = prevKeyPath;
@@ -3820,6 +3839,22 @@ function retryTask(request: Request, task: Task): void {
38203839
request.abortableTasks.delete(task);
38213840
task.status = COMPLETED;
38223841
} catch (thrownValue) {
3842+
if (request.status === ABORTING) {
3843+
request.abortableTasks.delete(task);
3844+
task.status = ABORTED;
3845+
if (enableHalt && request.type === PRERENDER) {
3846+
// When aborting a prerener with halt semantics we don't emit
3847+
// anything into the slot for a task that aborts, it remains unresolved
3848+
request.pendingChunks--;
3849+
} else {
3850+
// Otherwise we emit an error chunk into the task slot.
3851+
const errorId: number = (request.fatalError: any);
3852+
const model = stringify(serializeByValueID(errorId));
3853+
emitModelChunk(request, task.id, model);
3854+
}
3855+
return;
3856+
}
3857+
38233858
const x =
38243859
thrownValue === SuspenseException
38253860
? // This is a special type of exception used for Suspense. For historical
@@ -3832,14 +3867,6 @@ function retryTask(request: Request, task: Task): void {
38323867
if (typeof x === 'object' && x !== null) {
38333868
// $FlowFixMe[method-unbinding]
38343869
if (typeof x.then === 'function') {
3835-
if (request.status === ABORTING) {
3836-
request.abortableTasks.delete(task);
3837-
task.status = ABORTED;
3838-
const errorId: number = (request.fatalError: any);
3839-
const model = stringify(serializeByValueID(errorId));
3840-
emitModelChunk(request, task.id, model);
3841-
return;
3842-
}
38433870
// Something suspended again, let's pick it back up later.
38443871
task.status = PENDING;
38453872
task.thenableState = getThenableStateAfterSuspending();
@@ -3856,15 +3883,6 @@ function retryTask(request: Request, task: Task): void {
38563883
}
38573884
}
38583885

3859-
if (request.status === ABORTING) {
3860-
request.abortableTasks.delete(task);
3861-
task.status = ABORTED;
3862-
const errorId: number = (request.fatalError: any);
3863-
const model = stringify(serializeByValueID(errorId));
3864-
emitModelChunk(request, task.id, model);
3865-
return;
3866-
}
3867-
38683886
request.abortableTasks.delete(task);
38693887
task.status = ERRORED;
38703888
const digest = logRecoverableError(request, x, task);
@@ -3942,6 +3960,17 @@ function abortTask(task: Task, request: Request, errorId: number): void {
39423960
request.completedErrorChunks.push(processedChunk);
39433961
}
39443962

3963+
function haltTask(task: Task, request: Request): void {
3964+
if (task.status === RENDERING) {
3965+
// this task will be halted by the render
3966+
return;
3967+
}
3968+
task.status = ABORTED;
3969+
// We don't actually emit anything for this task id because we are intentionally
3970+
// leaving the reference unfulfilled.
3971+
request.pendingChunks--;
3972+
}
3973+
39453974
function flushCompletedChunks(
39463975
request: Request,
39473976
destination: Destination,
@@ -4087,12 +4116,6 @@ export function abort(request: Request, reason: mixed): void {
40874116
}
40884117
const abortableTasks = request.abortableTasks;
40894118
if (abortableTasks.size > 0) {
4090-
// We have tasks to abort. We'll emit one error row and then emit a reference
4091-
// to that row from every row that's still remaining if we are rendering. If we
4092-
// are prerendering (and halt semantics are enabled) we will refer to an error row
4093-
// but not actually emit it so the reciever can at that point rather than error.
4094-
const errorId = request.nextChunkId++;
4095-
request.fatalError = errorId;
40964119
if (
40974120
enablePostpone &&
40984121
typeof reason === 'object' &&
@@ -4101,10 +4124,20 @@ export function abort(request: Request, reason: mixed): void {
41014124
) {
41024125
const postponeInstance: Postpone = (reason: any);
41034126
logPostpone(request, postponeInstance.message, null);
4104-
if (!enableHalt || request.type === PRERENDER) {
4105-
// When prerendering with halt semantics we omit the referred to postpone.
4127+
if (enableHalt && request.type === PRERENDER) {
4128+
// When prerendering with halt semantics we simply halt the task
4129+
// and leave the reference unfulfilled.
4130+
abortableTasks.forEach(task => haltTask(task, request));
4131+
abortableTasks.clear();
4132+
} else {
4133+
// When rendering we produce a shared postpone chunk and then
4134+
// fulfill each task with a reference to that chunk.
4135+
const errorId = request.nextChunkId++;
4136+
request.fatalError = errorId;
41064137
request.pendingChunks++;
41074138
emitPostponeChunk(request, errorId, postponeInstance);
4139+
abortableTasks.forEach(task => abortTask(task, request, errorId));
4140+
abortableTasks.clear();
41084141
}
41094142
} else {
41104143
const error =
@@ -4120,14 +4153,22 @@ export function abort(request: Request, reason: mixed): void {
41204153
)
41214154
: reason;
41224155
const digest = logRecoverableError(request, error, null);
4123-
if (!enableHalt || request.type === RENDER) {
4124-
// When prerendering with halt semantics we omit the referred to error.
4156+
if (enableHalt && request.type === PRERENDER) {
4157+
// When prerendering with halt semantics we simply halt the task
4158+
// and leave the reference unfulfilled.
4159+
abortableTasks.forEach(task => haltTask(task, request));
4160+
abortableTasks.clear();
4161+
} else {
4162+
// When rendering we produce a shared error chunk and then
4163+
// fulfill each task with a reference to that chunk.
4164+
const errorId = request.nextChunkId++;
4165+
request.fatalError = errorId;
41254166
request.pendingChunks++;
41264167
emitErrorChunk(request, errorId, digest, error);
4168+
abortableTasks.forEach(task => abortTask(task, request, errorId));
4169+
abortableTasks.clear();
41274170
}
41284171
}
4129-
abortableTasks.forEach(task => abortTask(task, request, errorId));
4130-
abortableTasks.clear();
41314172
const onAllReady = request.onAllReady;
41324173
onAllReady();
41334174
}

0 commit comments

Comments
 (0)