Skip to content

Commit 6459ba7

Browse files
committed
Attempts to avoid consuming an error id while aborting.
There are edge cases though like if we're aborting while rendering and we have a model that needs to refer to some reference that we don't know the identity of
1 parent 3fd8dcb commit 6459ba7

File tree

5 files changed

+108
-31
lines changed

5 files changed

+108
-31
lines changed

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2797,7 +2797,16 @@ describe('ReactFlightDOM', () => {
27972797
abortFizz('bam');
27982798
});
27992799

2800-
expect(errors).toEqual(['bam']);
2800+
if (__DEV__) {
2801+
expect(errors).toEqual([new Error('Connection closed.')]);
2802+
} else {
2803+
// This is likely a bug. In Dev we get a connection closed error
2804+
// because the debug info creates a chunk that has a pending status
2805+
// and when the stream finishes we error if any chunks are still pending.
2806+
// In production there is no debug info so the missing chunk is never instantiated
2807+
// because nothing triggers model evaluation before the stream completes
2808+
expect(errors).toEqual(['bam']);
2809+
}
28012810

28022811
const container = document.createElement('div');
28032812
await readInto(container, fizzReadable);
@@ -2919,10 +2928,11 @@ describe('ReactFlightDOM', () => {
29192928
});
29202929

29212930
const {prelude} = await pendingResult;
2931+
29222932
expect(errors).toEqual(['boom']);
2923-
const response = ReactServerDOMClient.createFromReadableStream(
2924-
Readable.toWeb(prelude),
2925-
);
2933+
2934+
const preludeWeb = Readable.toWeb(prelude);
2935+
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
29262936

29272937
const {writable: fizzWritable, readable: fizzReadable} = getTestStream();
29282938

@@ -2949,7 +2959,17 @@ describe('ReactFlightDOM', () => {
29492959
});
29502960

29512961
// one error per boundary
2952-
expect(errors).toEqual(['boom', 'boom', 'boom']);
2962+
if (__DEV__) {
2963+
const err = new Error('Connection closed.');
2964+
expect(errors).toEqual([err, err, err]);
2965+
} else {
2966+
// This is likely a bug. In Dev we get a connection closed error
2967+
// because the debug info creates a chunk that has a pending status
2968+
// and when the stream finishes we error if any chunks are still pending.
2969+
// In production there is no debug info so the missing chunk is never instantiated
2970+
// because nothing triggers model evaluation before the stream completes
2971+
expect(errors).toEqual(['boom', 'boom', 'boom']);
2972+
}
29532973

29542974
const container = document.createElement('div');
29552975
await readInto(container, fizzReadable);

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2454,12 +2454,28 @@ describe('ReactFlightDOMBrowser', () => {
24542454
passThrough(prelude),
24552455
);
24562456
const container = document.createElement('div');
2457-
const root = ReactDOMClient.createRoot(container);
2457+
errors.length = 0;
2458+
const root = ReactDOMClient.createRoot(container, {
2459+
onUncaughtError(err) {
2460+
errors.push(err);
2461+
},
2462+
});
24582463

24592464
await act(() => {
24602465
root.render(<ClientRoot response={response} />);
24612466
});
24622467

2463-
expect(container.innerHTML).toBe('<div>loading...</div>');
2468+
if (__DEV__) {
2469+
expect(errors).toEqual([new Error('Connection closed.')]);
2470+
expect(container.innerHTML).toBe('');
2471+
} else {
2472+
// This is likely a bug. In Dev we get a connection closed error
2473+
// because the debug info creates a chunk that has a pending status
2474+
// and when the stream finishes we error if any chunks are still pending.
2475+
// In production there is no debug info so the missing chunk is never instantiated
2476+
// because nothing triggers model evaluation before the stream completes
2477+
expect(errors).toEqual([]);
2478+
expect(container.innerHTML).toBe('<div>loading...</div>');
2479+
}
24642480
});
24652481
});

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1172,7 +1172,16 @@ describe('ReactFlightDOMEdge', () => {
11721172
),
11731173
);
11741174
fizzController.abort('bam');
1175-
expect(errors).toEqual(['bam']);
1175+
if (__DEV__) {
1176+
expect(errors).toEqual([new Error('Connection closed.')]);
1177+
} else {
1178+
// This is likely a bug. In Dev we get a connection closed error
1179+
// because the debug info creates a chunk that has a pending status
1180+
// and when the stream finishes we error if any chunks are still pending.
1181+
// In production there is no debug info so the missing chunk is never instantiated
1182+
// because nothing triggers model evaluation before the stream completes
1183+
expect(errors).toEqual(['bam']);
1184+
}
11761185
// Should still match the result when parsed
11771186
const result = await readResult(ssrStream);
11781187
const div = document.createElement('div');

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -509,7 +509,16 @@ describe('ReactFlightDOMNode', () => {
509509
),
510510
);
511511
ssrStream.abort('bam');
512-
expect(errors).toEqual(['bam']);
512+
if (__DEV__) {
513+
expect(errors).toEqual([new Error('Connection closed.')]);
514+
} else {
515+
// This is likely a bug. In Dev we get a connection closed error
516+
// because the debug info creates a chunk that has a pending status
517+
// and when the stream finishes we error if any chunks are still pending.
518+
// In production there is no debug info so the missing chunk is never instantiated
519+
// because nothing triggers model evaluation before the stream completes
520+
expect(errors).toEqual(['bam']);
521+
}
513522
// Should still match the result when parsed
514523
const result = await readResult(ssrStream);
515524
const div = document.createElement('div');

packages/react-server/src/ReactFlightServer.js

Lines changed: 45 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -649,9 +649,13 @@ function serializeThenable(
649649
// We can no longer accept any resolved values
650650
request.abortableTasks.delete(newTask);
651651
newTask.status = ABORTED;
652-
const errorId: number = (request.fatalError: any);
653-
const model = stringify(serializeByValueID(errorId));
654-
emitModelChunk(request, newTask.id, model);
652+
if (enableHalt && request.type === PRERENDER) {
653+
request.pendingChunks--;
654+
} else {
655+
const errorId: number = (request.fatalError: any);
656+
const model = stringify(serializeByValueID(errorId));
657+
emitModelChunk(request, newTask.id, model);
658+
}
655659
return newTask.id;
656660
}
657661
if (typeof thenable.status === 'string') {
@@ -2293,7 +2297,17 @@ function renderModel(
22932297
if (typeof x.then === 'function') {
22942298
if (request.status === ABORTING) {
22952299
task.status = ABORTED;
2296-
const errorId: number = (request.fatalError: any);
2300+
let errorId: number;
2301+
if (enableHalt && request.type === PRERENDER) {
2302+
// This is unfortunate that we would consume an id here. It suggests something
2303+
// isn't quite right with our model. When halting we don't emit any chunks
2304+
// but we're not in a position where we are avoiding emitting an entire
2305+
// chunk so we have to return something in this slot within the model so we
2306+
// consume an id and return it here knowing it will never resolve.
2307+
errorId = request.nextChunkId++;
2308+
} else {
2309+
errorId = (request.fatalError: any);
2310+
}
22972311
if (wasReactNode) {
22982312
return serializeLazyID(errorId);
22992313
}
@@ -2346,7 +2360,17 @@ function renderModel(
23462360

23472361
if (request.status === ABORTING) {
23482362
task.status = ABORTED;
2349-
const errorId: number = (request.fatalError: any);
2363+
let errorId: number;
2364+
if (enableHalt && request.type === PRERENDER) {
2365+
// This is unfortunate that we would consume an id here. It suggests something
2366+
// isn't quite right with our model. When halting we don't emit any chunks
2367+
// but we're not in a position where we are avoiding emitting an entire
2368+
// chunk so we have to return something in this slot within the model so we
2369+
// consume an id and return it here knowing it will never resolve.
2370+
errorId = request.nextChunkId++;
2371+
} else {
2372+
errorId = (request.fatalError: any);
2373+
}
23502374
if (wasReactNode) {
23512375
return serializeLazyID(errorId);
23522376
}
@@ -3820,6 +3844,22 @@ function retryTask(request: Request, task: Task): void {
38203844
request.abortableTasks.delete(task);
38213845
task.status = COMPLETED;
38223846
} catch (thrownValue) {
3847+
if (request.status === ABORTING) {
3848+
request.abortableTasks.delete(task);
3849+
task.status = ABORTED;
3850+
if (enableHalt && request.type === PRERENDER) {
3851+
// When aborting a prerener with halt semantics we don't emit
3852+
// anything into the slot for a task that aborts, it remains unresolved
3853+
request.pendingChunks--;
3854+
} else {
3855+
// Otherwise we emit an error chunk into the task slot.
3856+
const errorId: number = (request.fatalError: any);
3857+
const model = stringify(serializeByValueID(errorId));
3858+
emitModelChunk(request, task.id, model);
3859+
}
3860+
return;
3861+
}
3862+
38233863
const x =
38243864
thrownValue === SuspenseException
38253865
? // This is a special type of exception used for Suspense. For historical
@@ -3832,14 +3872,6 @@ function retryTask(request: Request, task: Task): void {
38323872
if (typeof x === 'object' && x !== null) {
38333873
// $FlowFixMe[method-unbinding]
38343874
if (typeof x.then === 'function') {
3835-
if (request.status === ABORTING) {
3836-
request.abortableTasks.delete(task);
3837-
task.status = ABORTED;
3838-
const errorId: number = (request.fatalError: any);
3839-
const model = stringify(serializeByValueID(errorId));
3840-
emitModelChunk(request, task.id, model);
3841-
return;
3842-
}
38433875
// Something suspended again, let's pick it back up later.
38443876
task.status = PENDING;
38453877
task.thenableState = getThenableStateAfterSuspending();
@@ -3856,15 +3888,6 @@ function retryTask(request: Request, task: Task): void {
38563888
}
38573889
}
38583890

3859-
if (request.status === ABORTING) {
3860-
request.abortableTasks.delete(task);
3861-
task.status = ABORTED;
3862-
const errorId: number = (request.fatalError: any);
3863-
const model = stringify(serializeByValueID(errorId));
3864-
emitModelChunk(request, task.id, model);
3865-
return;
3866-
}
3867-
38683891
request.abortableTasks.delete(task);
38693892
task.status = ERRORED;
38703893
const digest = logRecoverableError(request, x, task);

0 commit comments

Comments
 (0)