diff --git a/packages/react-client/src/ReactFlightClient.js b/packages/react-client/src/ReactFlightClient.js index d538e077abb1c..657597c1fc7f7 100644 --- a/packages/react-client/src/ReactFlightClient.js +++ b/packages/react-client/src/ReactFlightClient.js @@ -767,6 +767,10 @@ function initializeModuleChunk(chunk: ResolvedModuleChunk): void { // Report that any missing chunks in the model is now going to throw this // error upon read. Also notify any pending promises. export function reportGlobalError(response: Response, error: Error): void { + if (response._closed) { + // Already closed. Ignore further errors. + return; + } response._closed = true; response._closedReason = error; response._chunks.forEach(chunk => { @@ -3698,6 +3702,26 @@ function processFullStringRow( resolveText(response, id, row); return; } + case 82 /* "R" */: { + startReadableStream(response, id, undefined); + return; + } + case 114 /* "r" */: { + startReadableStream(response, id, 'bytes'); + return; + } + case 88 /* "X" */: { + startAsyncIterable(response, id, false); + return; + } + case 120 /* "x" */: { + startAsyncIterable(response, id, true); + return; + } + case 67 /* "C" */: { + stopStream(response, id, row); + return; + } case 78 /* "N" */: { if (enableProfilerTimer && enableComponentPerformanceTrack) { // Track the time origin for future debug info. We track it relative @@ -3741,6 +3765,16 @@ function processFullStringRow( } // Fallthrough to share the error with Console entries. } + case 89 /* "Y" */: { + if (__DEV__) { + // In the case where debug info is coming through the same channel as the main channel + // this indicates that the main channel is now done and anything remaining is just debug + // info. + closeMainChannel(response); + return; + } + // Fallthrough to share the error with Console entries. + } case 74 /* "J" */: { if ( enableProfilerTimer && @@ -3763,30 +3797,6 @@ function processFullStringRow( 'matching versions on the server and the client.', ); } - case 82 /* "R" */: { - startReadableStream(response, id, undefined); - return; - } - // Fallthrough - case 114 /* "r" */: { - startReadableStream(response, id, 'bytes'); - return; - } - // Fallthrough - case 88 /* "X" */: { - startAsyncIterable(response, id, false); - return; - } - // Fallthrough - case 120 /* "x" */: { - startAsyncIterable(response, id, true); - return; - } - // Fallthrough - case 67 /* "C" */: { - stopStream(response, id, row); - return; - } // Fallthrough case 80 /* "P" */: { if (enablePostpone) { @@ -4100,6 +4110,24 @@ function createFromJSONCallback(response: Response) { }; } +export function closeMainChannel(response: Response) { + if (enableProfilerTimer && enableComponentPerformanceTrack) { + if (response._replayConsole) { + markAllTracksInOrder(); + flushComponentPerformance( + response, + getChunk(response, 0), + 0, + -Infinity, + -Infinity, + ); + } + // We don't expect to get any more console entries. This also ensures that we + // don't call flushComponentPerformance again when the debug stream errors/closes. + response._replayConsole = false; + } +} + export function close(response: Response): void { // In case there are any remaining unresolved chunks, they won't // be resolved now. So we need to issue an error to those. diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index 9ba53d4afb360..22d2c218ac653 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -274,7 +274,7 @@ function patchConsole(consoleInst: typeof console, methodName: string) { request, parseStackTrace(new Error('react-stack-top-frame'), 1), ); - request.pendingChunks++; + request.pendingDebugChunks++; const owner: null | ReactComponentInfo = resolveOwner(); const args = Array.from(arguments); // Extract the env if this is a console log that was replayed from another env. @@ -464,6 +464,7 @@ export type Request = { timeOrigin: number, abortTime: number, // DEV-only + pendingDebugChunks: number, completedDebugChunks: Array, environmentName: () => string, filterStackFrame: (url: string, functionName: string) => boolean, @@ -578,6 +579,7 @@ function RequestInstance( this.onFatalError = onFatalError; if (__DEV__) { + this.pendingDebugChunks = 0; this.completedDebugChunks = ([]: Array); this.environmentName = environmentName === undefined @@ -716,7 +718,7 @@ function serializeDebugThenable( thenable: Thenable, ): string { // Like serializeThenable but for renderDebugModel - request.pendingChunks++; + request.pendingDebugChunks++; const id = request.nextChunkId++; const ref = serializePromiseID(id); request.writtenDebugObjects.set(thenable, ref); @@ -728,20 +730,9 @@ function serializeDebugThenable( } case 'rejected': { const x = thenable.reason; - if ( - enablePostpone && - typeof x === 'object' && - x !== null && - (x: any).$$typeof === REACT_POSTPONE_TYPE - ) { - const postponeInstance: Postpone = (x: any); - // We don't log this postpone. - emitPostponeChunk(request, id, postponeInstance); - } else { - // We don't log these errors since they didn't actually throw into Flight. - const digest = ''; - emitErrorChunk(request, id, digest, x, true); - } + // We don't log these errors since they didn't actually throw into Flight. + const digest = ''; + emitErrorChunk(request, id, digest, x, true); return ref; } } @@ -778,20 +769,9 @@ function serializeDebugThenable( enqueueFlush(request); return; } - if ( - enablePostpone && - typeof reason === 'object' && - reason !== null && - (reason: any).$$typeof === REACT_POSTPONE_TYPE - ) { - const postponeInstance: Postpone = (reason: any); - // We don't log this postpone. - emitPostponeChunk(request, id, postponeInstance); - } else { - // We don't log these errors since they didn't actually throw into Flight. - const digest = ''; - emitErrorChunk(request, id, digest, reason, true); - } + // We don't log these errors since they didn't actually throw into Flight. + const digest = ''; + emitErrorChunk(request, id, digest, reason, true); enqueueFlush(request); }, ); @@ -1436,7 +1416,7 @@ function renderFunctionComponent( const componentName = (Component: any).displayName || Component.name || ''; const componentEnv = (0, request.environmentName)(); - request.pendingChunks++; + request.pendingDebugChunks++; componentDebugInfo = ({ name: componentName, env: componentEnv, @@ -2206,7 +2186,7 @@ function visitAsyncNode( const env = (0, request.environmentName)(); advanceTaskTime(request, task, startTime); // Then emit a reference to us awaiting it in the current task. - request.pendingChunks++; + request.pendingDebugChunks++; emitDebugChunk(request, task.id, { awaited: ((ioNode: any): ReactIOInfo), // This is deduped by this reference. env: env, @@ -2263,7 +2243,7 @@ function emitAsyncSequence( } else if (awaitedNode !== null) { // Nothing in user space (unfiltered stack) awaited this. serializeIONode(request, awaitedNode, awaitedNode.promise); - request.pendingChunks++; + request.pendingDebugChunks++; // We log the environment at the time when we ping which may be later than what the // environment was when we actually started awaiting. const env = (0, request.environmentName)(); @@ -2444,7 +2424,7 @@ function serializeDeferredObject( // This client supports a long lived connection. We can assign this object // an ID to be lazy loaded later. // This keeps the connection alive until we ask for it or release it. - request.pendingChunks++; + request.pendingDebugChunks++; const id = request.nextChunkId++; deferredDebugObjects.existing.set(value, id); deferredDebugObjects.retained.set(id, value); @@ -2581,7 +2561,7 @@ function serializeDebugClientReference( try { const clientReferenceMetadata: ClientReferenceMetadata = resolveClientReferenceMetadata(request.bundlerConfig, clientReference); - request.pendingChunks++; + request.pendingDebugChunks++; const importId = request.nextChunkId++; emitImportChunk(request, importId, clientReferenceMetadata, true); if (parent[0] === REACT_ELEMENT_TYPE && parentPropertyName === '1') { @@ -2594,7 +2574,7 @@ function serializeDebugClientReference( } return serializeByValueID(importId); } catch (x) { - request.pendingChunks++; + request.pendingDebugChunks++; const errorId = request.nextChunkId++; const digest = logRecoverableError(request, x, null); emitErrorChunk(request, errorId, digest, x, true); @@ -2697,7 +2677,7 @@ function serializeLargeTextString(request: Request, text: string): string { } function serializeDebugLargeTextString(request: Request, text: string): string { - request.pendingChunks++; + request.pendingDebugChunks++; const textId = request.nextChunkId++; emitTextChunk(request, textId, text, true); return serializeByValueID(textId); @@ -2806,7 +2786,7 @@ function serializeDebugTypedArray( tag: string, typedArray: $ArrayBufferView, ): string { - request.pendingChunks++; + request.pendingDebugChunks++; const bufferId = request.nextChunkId++; emitTypedArrayChunk(request, bufferId, tag, typedArray, true); return serializeByValueID(bufferId); @@ -2815,6 +2795,7 @@ function serializeDebugTypedArray( function serializeDebugBlob(request: Request, blob: Blob): string { const model: Array = [blob.type]; const reader = blob.stream().getReader(); + request.pendingDebugChunks++; const id = request.nextChunkId++; function progress( entry: {done: false, value: Uint8Array} | {done: true, value: void}, @@ -4082,7 +4063,7 @@ function outlineIOInfo(request: Request, ioInfo: ReactIOInfo): void { return; } // We can't serialize the ConsoleTask/Error objects so we need to omit them before serializing. - request.pendingChunks++; + request.pendingDebugChunks++; const id = request.nextChunkId++; const owner = ioInfo.owner; // Ensure the owner is already outlined. @@ -4160,7 +4141,7 @@ function serializeIONode( request.abortTime : ioNode.end; - request.pendingChunks++; + request.pendingDebugChunks++; const id = request.nextChunkId++; emitIOInfoChunk( request, @@ -4197,7 +4178,11 @@ function emitTypedArrayChunk( } } } - request.pendingChunks++; // Extra chunk for the header. + if (debug) { + request.pendingDebugChunks++; + } else { + request.pendingChunks++; // Extra chunk for the header. + } // TODO: Convert to little endian if that's not the server default. const binaryChunk = typedArrayToBinaryChunk(typedArray); const binaryLength = byteLengthOfBinaryChunk(binaryChunk); @@ -4222,7 +4207,11 @@ function emitTextChunk( 'Existence of byteLengthOfChunk should have already been checked. This is a bug in React.', ); } - request.pendingChunks++; // Extra chunk for the header. + if (debug) { + request.pendingDebugChunks++; + } else { + request.pendingChunks++; // Extra chunk for the header. + } const textChunk = stringToChunk(text); const binaryLength = byteLengthOfChunk(textChunk); const row = id.toString(16) + ':T' + binaryLength.toString(16) + ','; @@ -4607,7 +4596,7 @@ function renderDebugModel( // $FlowFixMe[method-unbinding] '(' + Function.prototype.toString.call(value) + ')', ); - request.pendingChunks++; + request.pendingDebugChunks++; const id = request.nextChunkId++; const processedChunk = encodeReferenceChunk(request, id, serializedValue); request.completedDebugChunks.push(processedChunk); @@ -4765,7 +4754,7 @@ function outlineDebugModel( } const id = request.nextChunkId++; - request.pendingChunks++; + request.pendingDebugChunks++; emitOutlinedDebugModelChunk(request, id, counter, model); return id; } @@ -4814,7 +4803,7 @@ function emitTimeOriginChunk(request: Request, timeOrigin: number): void { // We emit the time origin once. All ReactTimeInfo timestamps later in the stream // are relative to this time origin. This allows for more compact number encoding // and lower precision loss. - request.pendingChunks++; + request.pendingDebugChunks++; const row = ':N' + timeOrigin + '\n'; const processedChunk = stringToChunk(row); // TODO: Move to its own priority queue. @@ -4841,7 +4830,7 @@ function forwardDebugInfo( // being no references to this as an owner. outlineComponentInfo(request, (info: any)); // Emit a reference to the outlined one. - request.pendingChunks++; + request.pendingDebugChunks++; emitDebugChunk(request, id, info); } else if (info.awaited) { const ioInfo = info.awaited; @@ -4882,11 +4871,11 @@ function forwardDebugInfo( // $FlowFixMe[cannot-write] debugAsyncInfo.stack = debugStack; } - request.pendingChunks++; + request.pendingDebugChunks++; emitDebugChunk(request, id, debugAsyncInfo); } } else { - request.pendingChunks++; + request.pendingDebugChunks++; emitDebugChunk(request, id, info); } } @@ -4988,7 +4977,7 @@ function forwardDebugInfoFromAbortedTask(request: Request, task: Task): void { // complete in time before aborting. // The best we can do is try to emit the stack of where this Promise was created. serializeIONode(request, node, null); - request.pendingChunks++; + request.pendingDebugChunks++; const env = (0, request.environmentName)(); const asyncInfo: ReactAsyncInfo = { awaited: ((node: any): ReactIOInfo), // This is deduped by this reference. @@ -5017,7 +5006,7 @@ function emitTimingChunk( if (!enableProfilerTimer || !enableComponentPerformanceTrack) { return; } - request.pendingChunks++; + request.pendingDebugChunks++; const relativeTimestamp = timestamp - request.timeOrigin; const row = serializeRowHeader('D', id) + '{"time":' + relativeTimestamp + '}\n'; @@ -5229,7 +5218,7 @@ function retryTask(request: Request, task: Task): void { if (__DEV__) { const currentEnv = (0, request.environmentName)(); if (currentEnv !== task.environmentName) { - request.pendingChunks++; + request.pendingDebugChunks++; // The environment changed since we last emitted any debug information for this // task. We emit an entry that just includes the environment name change. emitDebugChunk(request, task.id, {env: currentEnv}); @@ -5411,6 +5400,7 @@ function flushCompletedChunks( request: Request, destination: Destination, ): void { + const hadPendingChunks = __DEV__ ? request.pendingChunks > 0 : false; beginWriting(destination); try { // We emit module chunks first in the stream so that @@ -5449,7 +5439,7 @@ function flushCompletedChunks( const debugChunks = request.completedDebugChunks; i = 0; for (; i < debugChunks.length; i++) { - request.pendingChunks--; + request.pendingDebugChunks--; const chunk = debugChunks[i]; const keepWriting: boolean = writeChunkAndReturn(destination, chunk); if (!keepWriting) { @@ -5498,19 +5488,31 @@ function flushCompletedChunks( } flushBuffered(destination); if (request.pendingChunks === 0) { - // We're done. - if (enableTaint) { - cleanupTaintQueue(request); - } - if (request.status < ABORTING) { - const abortReason = new Error( - 'This render completed successfully. All cacheSignals are now aborted to allow clean up of any unused resources.', - ); - request.cacheController.abort(abortReason); + if (__DEV__ && request.pendingDebugChunks > 0) { + if (hadPendingChunks) { + // We just flushed the last regular chunk but we still have debug info to send. + // We send a signal to indicate that the main channel is now closed and it's only + // the debug channel that's still open. + beginWriting(destination); + writeChunkAndReturn(destination, stringToChunk(':Y\n')); + completeWriting(destination); + flushBuffered(destination); + } + } else { + // We're done. + if (enableTaint) { + cleanupTaintQueue(request); + } + if (request.status < ABORTING) { + const abortReason = new Error( + 'This render completed successfully. All cacheSignals are now aborted to allow clean up of any unused resources.', + ); + request.cacheController.abort(abortReason); + } + request.status = CLOSED; + close(destination); + request.destination = null; } - request.status = CLOSED; - close(destination); - request.destination = null; } } @@ -5716,7 +5718,7 @@ export function resolveDebugMessage(request: Request, message: string): void { const retainedValue = deferredDebugObjects.retained.get(id); if (retainedValue !== undefined) { // We're no longer blocked on this. We won't emit it. - request.pendingChunks--; + request.pendingDebugChunks--; deferredDebugObjects.retained.delete(id); deferredDebugObjects.existing.delete(retainedValue); enqueueFlush(request);