@@ -125,7 +125,7 @@ const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals;
125
125
type Update < S , A > = { |
126
126
lane : Lane ,
127
127
action : A ,
128
- eagerReducer : ( ( S , A ) => S ) | null ,
128
+ hasEagerState : boolean ,
129
129
eagerState : S | null ,
130
130
next : Update < S , A> ,
131
131
| } ;
@@ -730,7 +730,7 @@ function mountReducer<S, I, A>(
730
730
lastRenderedState : ( initialState : any ) ,
731
731
} ;
732
732
hook.queue = queue;
733
- const dispatch: Dispatch< A > = (queue.dispatch = (dispatchAction .bind(
733
+ const dispatch: Dispatch< A > = (queue.dispatch = (dispatchReducerAction .bind(
734
734
null,
735
735
currentlyRenderingFiber,
736
736
queue,
@@ -801,7 +801,7 @@ function updateReducer<S, I, A>(
801
801
const clone : Update < S , A > = {
802
802
lane : updateLane ,
803
803
action : update . action ,
804
- eagerReducer : update . eagerReducer ,
804
+ hasEagerState : update . hasEagerState ,
805
805
eagerState : update . eagerState ,
806
806
next : ( null : any ) ,
807
807
} ;
@@ -829,17 +829,17 @@ function updateReducer<S, I, A>(
829
829
// this will never be skipped by the check above.
830
830
lane : NoLane ,
831
831
action : update . action ,
832
- eagerReducer : update . eagerReducer ,
832
+ hasEagerState : update . hasEagerState ,
833
833
eagerState : update . eagerState ,
834
834
next : ( null : any ) ,
835
835
} ;
836
836
newBaseQueueLast = newBaseQueueLast . next = clone ;
837
837
}
838
838
839
839
// Process this update.
840
- if (update.eagerReducer === reducer ) {
841
- // If this update was processed eagerly, and its reducer matches the
842
- // current reducer, we can use the eagerly computed state.
840
+ if (update.hasEagerState ) {
841
+ // If this update is a state update (not a reducer) and was processed eagerly,
842
+ // we can use the eagerly computed state
843
843
newState = ( ( update . eagerState : any ) : S ) ;
844
844
} else {
845
845
const action = update . action ;
@@ -1190,7 +1190,7 @@ function useMutableSource<Source, Snapshot>(
1190
1190
lastRenderedReducer : basicStateReducer ,
1191
1191
lastRenderedState : snapshot ,
1192
1192
} ;
1193
- newQueue . dispatch = setSnapshot = ( dispatchAction . bind (
1193
+ newQueue . dispatch = setSnapshot = ( dispatchSetState . bind (
1194
1194
null ,
1195
1195
currentlyRenderingFiber ,
1196
1196
newQueue ,
@@ -1481,7 +1481,7 @@ function mountState<S>(
1481
1481
hook.queue = queue;
1482
1482
const dispatch: Dispatch<
1483
1483
BasicStateAction < S > ,
1484
- > = ( queue . dispatch = ( dispatchAction . bind (
1484
+ > = ( queue . dispatch = ( dispatchSetState . bind (
1485
1485
null ,
1486
1486
currentlyRenderingFiber ,
1487
1487
queue ,
@@ -2150,7 +2150,7 @@ function refreshCache<T>(fiber: Fiber, seedKey: ?() => T, seedValue: T) {
2150
2150
// TODO: Warn if unmounted?
2151
2151
}
2152
2152
2153
- function dispatchAction < S , A > (
2153
+ function dispatchReducerAction < S , A > (
2154
2154
fiber : Fiber ,
2155
2155
queue : UpdateQueue < S , A > ,
2156
2156
action : A ,
@@ -2171,7 +2171,119 @@ function dispatchAction<S, A>(
2171
2171
const update: Update< S , A > = {
2172
2172
lane ,
2173
2173
action ,
2174
- eagerReducer : null ,
2174
+ hasEagerState : false ,
2175
+ eagerState : null ,
2176
+ next : ( null : any ) ,
2177
+ } ;
2178
+
2179
+ const alternate = fiber.alternate;
2180
+ if (
2181
+ fiber === currentlyRenderingFiber ||
2182
+ (alternate !== null && alternate === currentlyRenderingFiber )
2183
+ ) {
2184
+ // This is a render phase update. Stash it in a lazily-created map of
2185
+ // queue -> linked list of updates. After this render pass, we'll restart
2186
+ // and apply the stashed updates on top of the work-in-progress hook.
2187
+ didScheduleRenderPhaseUpdateDuringThisPass = didScheduleRenderPhaseUpdate = true ;
2188
+ const pending = queue . pending ;
2189
+ if ( pending === null ) {
2190
+ // This is the first update. Create a circular list.
2191
+ update. next = update ;
2192
+ } else {
2193
+ update . next = pending . next ;
2194
+ pending . next = update ;
2195
+ }
2196
+ queue.pending = update;
2197
+ } else {
2198
+ if ( isInterleavedUpdate ( fiber , lane ) ) {
2199
+ const interleaved = queue . interleaved ;
2200
+ if ( interleaved === null ) {
2201
+ // This is the first update. Create a circular list.
2202
+ update. next = update ;
2203
+ // At the end of the current render, this queue's interleaved updates will
2204
+ // be transferred to the pending queue.
2205
+ pushInterleavedQueue ( queue ) ;
2206
+ } else {
2207
+ update . next = interleaved . next ;
2208
+ interleaved . next = update ;
2209
+ }
2210
+ queue.interleaved = update;
2211
+ } else {
2212
+ const pending = queue . pending ;
2213
+ if ( pending === null ) {
2214
+ // This is the first update. Create a circular list.
2215
+ update. next = update ;
2216
+ } else {
2217
+ update . next = pending . next ;
2218
+ pending . next = update ;
2219
+ }
2220
+ queue.pending = update;
2221
+ }
2222
+
2223
+ if ( __DEV__ ) {
2224
+ // $FlowExpectedError - jest isn't a global, and isn't recognized outside of tests
2225
+ if ( 'undefined' !== typeof jest ) {
2226
+ warnIfNotCurrentlyActingUpdatesInDev ( fiber ) ;
2227
+ }
2228
+ }
2229
+ const root = scheduleUpdateOnFiber ( fiber , lane , eventTime ) ;
2230
+
2231
+ if ( isTransitionLane ( lane ) && root !== null ) {
2232
+ let queueLanes = queue . lanes ;
2233
+
2234
+ // If any entangled lanes are no longer pending on the root, then they
2235
+ // must have finished. We can remove them from the shared queue, which
2236
+ // represents a superset of the actually pending lanes. In some cases we
2237
+ // may entangle more than we need to, but that's OK. In fact it's worse if
2238
+ // we *don't* entangle when we should.
2239
+ queueLanes = intersectLanes ( queueLanes , root . pendingLanes ) ;
2240
+
2241
+ // Entangle the new transition lane with the other transition lanes.
2242
+ const newQueueLanes = mergeLanes ( queueLanes , lane ) ;
2243
+ queue . lanes = newQueueLanes ;
2244
+ // Even if queue.lanes already include lane, we don't know for certain if
2245
+ // the lane finished since the last time we entangled it. So we need to
2246
+ // entangle it again, just to be sure.
2247
+ markRootEntangled ( root , newQueueLanes ) ;
2248
+ }
2249
+ }
2250
+
2251
+ if ( __DEV__ ) {
2252
+ if ( enableDebugTracing ) {
2253
+ if ( fiber . mode & DebugTracingMode ) {
2254
+ const name = getComponentNameFromFiber ( fiber ) || 'Unknown' ;
2255
+ logStateUpdateScheduled ( name , lane , action ) ;
2256
+ }
2257
+ }
2258
+ }
2259
+
2260
+ if ( enableSchedulingProfiler ) {
2261
+ markStateUpdateScheduled ( fiber , lane ) ;
2262
+ }
2263
+ }
2264
+
2265
+ function dispatchSetState < S , A > (
2266
+ fiber: Fiber,
2267
+ queue: UpdateQueue< S , A > ,
2268
+ action: A,
2269
+ ) {
2270
+ if ( __DEV__ ) {
2271
+ if ( typeof arguments [ 3 ] === 'function' ) {
2272
+ console . error (
2273
+ "State updates from the useState() and useReducer() Hooks don't support the " +
2274
+ 'second callback argument. To execute a side effect after ' +
2275
+ 'rendering, declare it in the component body with useEffect().' ,
2276
+ ) ;
2277
+ }
2278
+ }
2279
+
2280
+ const eventTime = requestEventTime ( ) ;
2281
+ const lane = requestUpdateLane ( fiber ) ;
2282
+
2283
+ const update : Update < S , A > = {
2284
+ lane ,
2285
+ action ,
2286
+ hasEagerState : false ,
2175
2287
eagerState : null ,
2176
2288
next : ( null : any ) ,
2177
2289
} ;
@@ -2241,7 +2353,7 @@ function dispatchAction<S, A>(
2241
2353
// it, on the update object. If the reducer hasn't changed by the
2242
2354
// time we enter the render phase, then the eager state can be used
2243
2355
// without calling the reducer again.
2244
- update . eagerReducer = lastRenderedReducer ;
2356
+ update . hasEagerState = true ;
2245
2357
update . eagerState = eagerState ;
2246
2358
if ( is ( eagerState , currentState ) ) {
2247
2359
// Fast path. We can bail out without scheduling React to re-render.
0 commit comments