Skip to content

Commit c4cac62

Browse files
committed
[Flight] When halting omit any reference rather than refer to a shared missing chunk
When aborting a prerender we should leave references unfulfilled, not share a common unfullfilled reference. functionally today this doesn't matter because we don't have resuming but the semantic is that the row was not available when the abort happened and in a resume the row should fill in. But by pointing each task to a common unfulfilled chunk we lose the ability for these references to resolves to distinct values on resume. There are edge cases though like if we're aborting while rendering and we have a model that needs to refer to some reference that we don't know the identity of
1 parent a960b92 commit c4cac62

File tree

5 files changed

+143
-43
lines changed

5 files changed

+143
-43
lines changed

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2797,7 +2797,16 @@ describe('ReactFlightDOM', () => {
27972797
abortFizz('bam');
27982798
});
27992799

2800-
expect(errors).toEqual(['bam']);
2800+
if (__DEV__) {
2801+
expect(errors).toEqual([new Error('Connection closed.')]);
2802+
} else {
2803+
// This is likely a bug. In Dev we get a connection closed error
2804+
// because the debug info creates a chunk that has a pending status
2805+
// and when the stream finishes we error if any chunks are still pending.
2806+
// In production there is no debug info so the missing chunk is never instantiated
2807+
// because nothing triggers model evaluation before the stream completes
2808+
expect(errors).toEqual(['bam']);
2809+
}
28012810

28022811
const container = document.createElement('div');
28032812
await readInto(container, fizzReadable);
@@ -2919,10 +2928,11 @@ describe('ReactFlightDOM', () => {
29192928
});
29202929

29212930
const {prelude} = await pendingResult;
2931+
29222932
expect(errors).toEqual(['boom']);
2923-
const response = ReactServerDOMClient.createFromReadableStream(
2924-
Readable.toWeb(prelude),
2925-
);
2933+
2934+
const preludeWeb = Readable.toWeb(prelude);
2935+
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
29262936

29272937
const {writable: fizzWritable, readable: fizzReadable} = getTestStream();
29282938

@@ -2949,7 +2959,17 @@ describe('ReactFlightDOM', () => {
29492959
});
29502960

29512961
// one error per boundary
2952-
expect(errors).toEqual(['boom', 'boom', 'boom']);
2962+
if (__DEV__) {
2963+
const err = new Error('Connection closed.');
2964+
expect(errors).toEqual([err, err, err]);
2965+
} else {
2966+
// This is likely a bug. In Dev we get a connection closed error
2967+
// because the debug info creates a chunk that has a pending status
2968+
// and when the stream finishes we error if any chunks are still pending.
2969+
// In production there is no debug info so the missing chunk is never instantiated
2970+
// because nothing triggers model evaluation before the stream completes
2971+
expect(errors).toEqual(['boom', 'boom', 'boom']);
2972+
}
29532973

29542974
const container = document.createElement('div');
29552975
await readInto(container, fizzReadable);

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2454,12 +2454,28 @@ describe('ReactFlightDOMBrowser', () => {
24542454
passThrough(prelude),
24552455
);
24562456
const container = document.createElement('div');
2457-
const root = ReactDOMClient.createRoot(container);
2457+
errors.length = 0;
2458+
const root = ReactDOMClient.createRoot(container, {
2459+
onUncaughtError(err) {
2460+
errors.push(err);
2461+
},
2462+
});
24582463

24592464
await act(() => {
24602465
root.render(<ClientRoot response={response} />);
24612466
});
24622467

2463-
expect(container.innerHTML).toBe('<div>loading...</div>');
2468+
if (__DEV__) {
2469+
expect(errors).toEqual([new Error('Connection closed.')]);
2470+
expect(container.innerHTML).toBe('');
2471+
} else {
2472+
// This is likely a bug. In Dev we get a connection closed error
2473+
// because the debug info creates a chunk that has a pending status
2474+
// and when the stream finishes we error if any chunks are still pending.
2475+
// In production there is no debug info so the missing chunk is never instantiated
2476+
// because nothing triggers model evaluation before the stream completes
2477+
expect(errors).toEqual([]);
2478+
expect(container.innerHTML).toBe('<div>loading...</div>');
2479+
}
24642480
});
24652481
});

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1172,7 +1172,16 @@ describe('ReactFlightDOMEdge', () => {
11721172
),
11731173
);
11741174
fizzController.abort('bam');
1175-
expect(errors).toEqual(['bam']);
1175+
if (__DEV__) {
1176+
expect(errors).toEqual([new Error('Connection closed.')]);
1177+
} else {
1178+
// This is likely a bug. In Dev we get a connection closed error
1179+
// because the debug info creates a chunk that has a pending status
1180+
// and when the stream finishes we error if any chunks are still pending.
1181+
// In production there is no debug info so the missing chunk is never instantiated
1182+
// because nothing triggers model evaluation before the stream completes
1183+
expect(errors).toEqual(['bam']);
1184+
}
11761185
// Should still match the result when parsed
11771186
const result = await readResult(ssrStream);
11781187
const div = document.createElement('div');

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -509,7 +509,16 @@ describe('ReactFlightDOMNode', () => {
509509
),
510510
);
511511
ssrStream.abort('bam');
512-
expect(errors).toEqual(['bam']);
512+
if (__DEV__) {
513+
expect(errors).toEqual([new Error('Connection closed.')]);
514+
} else {
515+
// This is likely a bug. In Dev we get a connection closed error
516+
// because the debug info creates a chunk that has a pending status
517+
// and when the stream finishes we error if any chunks are still pending.
518+
// In production there is no debug info so the missing chunk is never instantiated
519+
// because nothing triggers model evaluation before the stream completes
520+
expect(errors).toEqual(['bam']);
521+
}
513522
// Should still match the result when parsed
514523
const result = await readResult(ssrStream);
515524
const div = document.createElement('div');

packages/react-server/src/ReactFlightServer.js

Lines changed: 80 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -649,9 +649,13 @@ function serializeThenable(
649649
// We can no longer accept any resolved values
650650
request.abortableTasks.delete(newTask);
651651
newTask.status = ABORTED;
652-
const errorId: number = (request.fatalError: any);
653-
const model = stringify(serializeByValueID(errorId));
654-
emitModelChunk(request, newTask.id, model);
652+
if (enableHalt && request.type === PRERENDER) {
653+
request.pendingChunks--;
654+
} else {
655+
const errorId: number = (request.fatalError: any);
656+
const model = stringify(serializeByValueID(errorId));
657+
emitModelChunk(request, newTask.id, model);
658+
}
655659
return newTask.id;
656660
}
657661
if (typeof thenable.status === 'string') {
@@ -2293,7 +2297,17 @@ function renderModel(
22932297
if (typeof x.then === 'function') {
22942298
if (request.status === ABORTING) {
22952299
task.status = ABORTED;
2296-
const errorId: number = (request.fatalError: any);
2300+
let errorId: number;
2301+
if (enableHalt && request.type === PRERENDER) {
2302+
// This is unfortunate that we would consume an id here. It suggests something
2303+
// isn't quite right with our model. When halting we don't emit any chunks
2304+
// but we're not in a position where we are avoiding emitting an entire
2305+
// chunk so we have to return something in this slot within the model so we
2306+
// consume an id and return it here knowing it will never resolve.
2307+
errorId = request.nextChunkId++;
2308+
} else {
2309+
errorId = (request.fatalError: any);
2310+
}
22972311
if (wasReactNode) {
22982312
return serializeLazyID(errorId);
22992313
}
@@ -2346,7 +2360,17 @@ function renderModel(
23462360

23472361
if (request.status === ABORTING) {
23482362
task.status = ABORTED;
2349-
const errorId: number = (request.fatalError: any);
2363+
let errorId: number;
2364+
if (enableHalt && request.type === PRERENDER) {
2365+
// This is unfortunate that we would consume an id here. It suggests something
2366+
// isn't quite right with our model. When halting we don't emit any chunks
2367+
// but we're not in a position where we are avoiding emitting an entire
2368+
// chunk so we have to return something in this slot within the model so we
2369+
// consume an id and return it here knowing it will never resolve.
2370+
errorId = request.nextChunkId++;
2371+
} else {
2372+
errorId = (request.fatalError: any);
2373+
}
23502374
if (wasReactNode) {
23512375
return serializeLazyID(errorId);
23522376
}
@@ -3820,6 +3844,22 @@ function retryTask(request: Request, task: Task): void {
38203844
request.abortableTasks.delete(task);
38213845
task.status = COMPLETED;
38223846
} catch (thrownValue) {
3847+
if (request.status === ABORTING) {
3848+
request.abortableTasks.delete(task);
3849+
task.status = ABORTED;
3850+
if (enableHalt && request.type === PRERENDER) {
3851+
// When aborting a prerener with halt semantics we don't emit
3852+
// anything into the slot for a task that aborts, it remains unresolved
3853+
request.pendingChunks--;
3854+
} else {
3855+
// Otherwise we emit an error chunk into the task slot.
3856+
const errorId: number = (request.fatalError: any);
3857+
const model = stringify(serializeByValueID(errorId));
3858+
emitModelChunk(request, task.id, model);
3859+
}
3860+
return;
3861+
}
3862+
38233863
const x =
38243864
thrownValue === SuspenseException
38253865
? // This is a special type of exception used for Suspense. For historical
@@ -3832,14 +3872,6 @@ function retryTask(request: Request, task: Task): void {
38323872
if (typeof x === 'object' && x !== null) {
38333873
// $FlowFixMe[method-unbinding]
38343874
if (typeof x.then === 'function') {
3835-
if (request.status === ABORTING) {
3836-
request.abortableTasks.delete(task);
3837-
task.status = ABORTED;
3838-
const errorId: number = (request.fatalError: any);
3839-
const model = stringify(serializeByValueID(errorId));
3840-
emitModelChunk(request, task.id, model);
3841-
return;
3842-
}
38433875
// Something suspended again, let's pick it back up later.
38443876
task.status = PENDING;
38453877
task.thenableState = getThenableStateAfterSuspending();
@@ -3856,15 +3888,6 @@ function retryTask(request: Request, task: Task): void {
38563888
}
38573889
}
38583890

3859-
if (request.status === ABORTING) {
3860-
request.abortableTasks.delete(task);
3861-
task.status = ABORTED;
3862-
const errorId: number = (request.fatalError: any);
3863-
const model = stringify(serializeByValueID(errorId));
3864-
emitModelChunk(request, task.id, model);
3865-
return;
3866-
}
3867-
38683891
request.abortableTasks.delete(task);
38693892
task.status = ERRORED;
38703893
const digest = logRecoverableError(request, x, task);
@@ -3942,6 +3965,17 @@ function abortTask(task: Task, request: Request, errorId: number): void {
39423965
request.completedErrorChunks.push(processedChunk);
39433966
}
39443967

3968+
function haltTask(task: Task, request: Request): void {
3969+
if (task.status === RENDERING) {
3970+
// this task will be halted by the render
3971+
return;
3972+
}
3973+
task.status = ABORTED;
3974+
// We don't actually emit anything for this task id because we are intentionally
3975+
// leaving the reference unfulfilled.
3976+
request.pendingChunks--;
3977+
}
3978+
39453979
function flushCompletedChunks(
39463980
request: Request,
39473981
destination: Destination,
@@ -4087,12 +4121,6 @@ export function abort(request: Request, reason: mixed): void {
40874121
}
40884122
const abortableTasks = request.abortableTasks;
40894123
if (abortableTasks.size > 0) {
4090-
// We have tasks to abort. We'll emit one error row and then emit a reference
4091-
// to that row from every row that's still remaining if we are rendering. If we
4092-
// are prerendering (and halt semantics are enabled) we will refer to an error row
4093-
// but not actually emit it so the reciever can at that point rather than error.
4094-
const errorId = request.nextChunkId++;
4095-
request.fatalError = errorId;
40964124
if (
40974125
enablePostpone &&
40984126
typeof reason === 'object' &&
@@ -4101,10 +4129,20 @@ export function abort(request: Request, reason: mixed): void {
41014129
) {
41024130
const postponeInstance: Postpone = (reason: any);
41034131
logPostpone(request, postponeInstance.message, null);
4104-
if (!enableHalt || request.type === PRERENDER) {
4105-
// When prerendering with halt semantics we omit the referred to postpone.
4132+
if (enableHalt && request.type === PRERENDER) {
4133+
// When prerendering with halt semantics we simply halt the task
4134+
// and leave the reference unfulfilled.
4135+
abortableTasks.forEach(task => haltTask(task, request));
4136+
abortableTasks.clear();
4137+
} else {
4138+
// When rendering we produce a shared postpone chunk and then
4139+
// fulfill each task with a reference to that chunk.
4140+
const errorId = request.nextChunkId++;
4141+
request.fatalError = errorId;
41064142
request.pendingChunks++;
41074143
emitPostponeChunk(request, errorId, postponeInstance);
4144+
abortableTasks.forEach(task => abortTask(task, request, errorId));
4145+
abortableTasks.clear();
41084146
}
41094147
} else {
41104148
const error =
@@ -4120,14 +4158,22 @@ export function abort(request: Request, reason: mixed): void {
41204158
)
41214159
: reason;
41224160
const digest = logRecoverableError(request, error, null);
4123-
if (!enableHalt || request.type === RENDER) {
4124-
// When prerendering with halt semantics we omit the referred to error.
4161+
if (enableHalt && request.type === PRERENDER) {
4162+
// When prerendering with halt semantics we simply halt the task
4163+
// and leave the reference unfulfilled.
4164+
abortableTasks.forEach(task => haltTask(task, request));
4165+
abortableTasks.clear();
4166+
} else {
4167+
// When rendering we produce a shared error chunk and then
4168+
// fulfill each task with a reference to that chunk.
4169+
const errorId = request.nextChunkId++;
4170+
request.fatalError = errorId;
41254171
request.pendingChunks++;
41264172
emitErrorChunk(request, errorId, digest, error);
4173+
abortableTasks.forEach(task => abortTask(task, request, errorId));
4174+
abortableTasks.clear();
41274175
}
41284176
}
4129-
abortableTasks.forEach(task => abortTask(task, request, errorId));
4130-
abortableTasks.clear();
41314177
const onAllReady = request.onAllReady;
41324178
onAllReady();
41334179
}

0 commit comments

Comments
 (0)