diff --git a/packages/react-devtools-shared/src/__tests__/__snapshots__/profilingCache-test.js.snap b/packages/react-devtools-shared/src/__tests__/__snapshots__/profilingCache-test.js.snap
index 1462a64195d6b..8d7eeb4255a94 100644
--- a/packages/react-devtools-shared/src/__tests__/__snapshots__/profilingCache-test.js.snap
+++ b/packages/react-devtools-shared/src/__tests__/__snapshots__/profilingCache-test.js.snap
@@ -73,7 +73,7 @@ Object {
     },
   },
   "duration": 15,
-  "effectDuration": null,
+  "effectDuration": 0,
   "fiberActualDurations": Map {
     1 => 15,
     2 => 15,
@@ -86,7 +86,7 @@ Object {
     3 => 3,
     4 => 2,
   },
-  "passiveEffectDuration": null,
+  "passiveEffectDuration": 0,
   "priorityLevel": "Immediate",
   "timestamp": 15,
   "updaters": Array [
diff --git a/packages/react-dom/src/server/ReactPartialRendererHooks.js b/packages/react-dom/src/server/ReactPartialRendererHooks.js
index 6e2b715382a0a..168fd78f6103e 100644
--- a/packages/react-dom/src/server/ReactPartialRendererHooks.js
+++ b/packages/react-dom/src/server/ReactPartialRendererHooks.js
@@ -216,6 +216,10 @@ export function resetHooksState(): void {
   workInProgressHook = null;
 }
 
+function getCacheSignal() {
+  throw new Error('Not implemented.');
+}
+
 function getCacheForType<T>(resourceType: () => T): T {
   throw new Error('Not implemented.');
 }
@@ -551,6 +555,7 @@ export const Dispatcher: DispatcherType = {
 };
 
 if (enableCache) {
+  Dispatcher.getCacheSignal = getCacheSignal;
   Dispatcher.getCacheForType = getCacheForType;
   Dispatcher.useCacheRefresh = useCacheRefresh;
 }
diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js
index 0d284c683ed1f..3ad60546c2b05 100644
--- a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js
+++ b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js
@@ -18,8 +18,13 @@ import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols';
 import {isPrimaryRenderer} from './ReactFiberHostConfig';
 import {createCursor, push, pop} from './ReactFiberStack.new';
 import {pushProvider, popProvider} from './ReactFiberNewContext.new';
+import * as Scheduler from 'scheduler';
 
-export type Cache = Map<() => mixed, mixed>;
+export type Cache = {|
+  controller: AbortController,
+  data: Map<() => mixed, mixed>,
+  refCount: number,
+|};
 
 export type CacheComponentState = {|
   +parent: Cache,
@@ -31,6 +36,13 @@ export type SpawnedCachePool = {|
   +pool: Cache,
 |};
 
+// Intentionally not named imports because Rollup would
+// use dynamic dispatch for CommonJS interop named imports.
+const {
+  unstable_scheduleCallback: scheduleCallback,
+  unstable_NormalPriority: NormalPriority,
+} = Scheduler;
+
 export const CacheContext: ReactContext<Cache> = enableCache
   ? {
       $$typeof: REACT_CONTEXT_TYPE,
@@ -57,6 +69,58 @@ let pooledCache: Cache | null = null;
 // cache from the render that suspended.
 const prevFreshCacheOnStack: StackCursor<Cache | null> = createCursor(null);
 
+// Creates a new empty Cache instance with a ref-count of 0. The caller is responsible
+// for retaining the cache once it is in use (retainCache), and releasing the cache
+// once it is no longer needed (releaseCache).
+export function createCache(): Cache {
+  if (!enableCache) {
+    return (null: any);
+  }
+  const cache: Cache = {
+    controller: new AbortController(),
+    data: new Map(),
+    refCount: 0,
+  };
+
+  return cache;
+}
+
+export function retainCache(cache: Cache) {
+  if (!enableCache) {
+    return;
+  }
+  if (__DEV__) {
+    if (cache.controller.signal.aborted) {
+      console.warn(
+        'A cache instance was retained after it was already freed. ' +
+          'This likely indicates a bug in React.',
+      );
+    }
+  }
+  cache.refCount++;
+}
+
+// Cleanup a cache instance, potentially freeing it if there are no more references
+export function releaseCache(cache: Cache) {
+  if (!enableCache) {
+    return;
+  }
+  cache.refCount--;
+  if (__DEV__) {
+    if (cache.refCount < 0) {
+      console.warn(
+        'A cache instance was released after it was already freed. ' +
+          'This likely indicates a bug in React.',
+      );
+    }
+  }
+  if (cache.refCount === 0) {
+    scheduleCallback(NormalPriority, () => {
+      cache.controller.abort();
+    });
+  }
+}
+
 export function pushCacheProvider(workInProgress: Fiber, cache: Cache) {
   if (!enableCache) {
     return;
@@ -78,8 +142,14 @@ export function requestCacheFromPool(renderLanes: Lanes): Cache {
   if (pooledCache !== null) {
     return pooledCache;
   }
-  // Create a fresh cache.
-  pooledCache = new Map();
+  // Create a fresh cache. The pooled cache must be owned - it is freed
+  // in releaseRootPooledCache() - but the cache instance handed out
+  // is retained/released in the commit phase of the component that
+  // references is (ie the host root, cache boundary, suspense component)
+  // Ie, pooledCache is conceptually an Option<Arc<Cache>> (owned),
+  // whereas the return value of this function is a &Arc<Cache> (borrowed).
+  pooledCache = createCache();
+  retainCache(pooledCache);
   return pooledCache;
 }
 
@@ -91,7 +161,13 @@ export function pushRootCachePool(root: FiberRoot) {
   // from `root.pooledCache`. If it's currently `null`, we will lazily
   // initialize it the first type it's requested. However, we only mutate
   // the root itself during the complete/unwind phase of the HostRoot.
-  pooledCache = root.pooledCache;
+  const rootCache = root.pooledCache;
+  if (rootCache != null) {
+    pooledCache = rootCache;
+    root.pooledCache = null;
+  } else {
+    pooledCache = null;
+  }
 }
 
 export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) {
@@ -157,7 +233,6 @@ export function getSuspendedCachePool(): SpawnedCachePool | null {
   if (!enableCache) {
     return null;
   }
-
   // We check the cache on the stack first, since that's the one any new Caches
   // would have accessed.
   let pool = pooledCache;
diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js
index dd450fff76b50..a00059ededf10 100644
--- a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js
+++ b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js
@@ -18,8 +18,13 @@ import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols';
 import {isPrimaryRenderer} from './ReactFiberHostConfig';
 import {createCursor, push, pop} from './ReactFiberStack.old';
 import {pushProvider, popProvider} from './ReactFiberNewContext.old';
+import * as Scheduler from 'scheduler';
 
-export type Cache = Map<() => mixed, mixed>;
+export type Cache = {|
+  controller: AbortController,
+  data: Map<() => mixed, mixed>,
+  refCount: number,
+|};
 
 export type CacheComponentState = {|
   +parent: Cache,
@@ -31,6 +36,13 @@ export type SpawnedCachePool = {|
   +pool: Cache,
 |};
 
+// Intentionally not named imports because Rollup would
+// use dynamic dispatch for CommonJS interop named imports.
+const {
+  unstable_scheduleCallback: scheduleCallback,
+  unstable_NormalPriority: NormalPriority,
+} = Scheduler;
+
 export const CacheContext: ReactContext<Cache> = enableCache
   ? {
       $$typeof: REACT_CONTEXT_TYPE,
@@ -57,6 +69,58 @@ let pooledCache: Cache | null = null;
 // cache from the render that suspended.
 const prevFreshCacheOnStack: StackCursor<Cache | null> = createCursor(null);
 
+// Creates a new empty Cache instance with a ref-count of 0. The caller is responsible
+// for retaining the cache once it is in use (retainCache), and releasing the cache
+// once it is no longer needed (releaseCache).
+export function createCache(): Cache {
+  if (!enableCache) {
+    return (null: any);
+  }
+  const cache: Cache = {
+    controller: new AbortController(),
+    data: new Map(),
+    refCount: 0,
+  };
+
+  return cache;
+}
+
+export function retainCache(cache: Cache) {
+  if (!enableCache) {
+    return;
+  }
+  if (__DEV__) {
+    if (cache.controller.signal.aborted) {
+      console.warn(
+        'A cache instance was retained after it was already freed. ' +
+          'This likely indicates a bug in React.',
+      );
+    }
+  }
+  cache.refCount++;
+}
+
+// Cleanup a cache instance, potentially freeing it if there are no more references
+export function releaseCache(cache: Cache) {
+  if (!enableCache) {
+    return;
+  }
+  cache.refCount--;
+  if (__DEV__) {
+    if (cache.refCount < 0) {
+      console.warn(
+        'A cache instance was released after it was already freed. ' +
+          'This likely indicates a bug in React.',
+      );
+    }
+  }
+  if (cache.refCount === 0) {
+    scheduleCallback(NormalPriority, () => {
+      cache.controller.abort();
+    });
+  }
+}
+
 export function pushCacheProvider(workInProgress: Fiber, cache: Cache) {
   if (!enableCache) {
     return;
@@ -78,8 +142,14 @@ export function requestCacheFromPool(renderLanes: Lanes): Cache {
   if (pooledCache !== null) {
     return pooledCache;
   }
-  // Create a fresh cache.
-  pooledCache = new Map();
+  // Create a fresh cache. The pooled cache must be owned - it is freed
+  // in releaseRootPooledCache() - but the cache instance handed out
+  // is retained/released in the commit phase of the component that
+  // references is (ie the host root, cache boundary, suspense component)
+  // Ie, pooledCache is conceptually an Option<Arc<Cache>> (owned),
+  // whereas the return value of this function is a &Arc<Cache> (borrowed).
+  pooledCache = createCache();
+  retainCache(pooledCache);
   return pooledCache;
 }
 
@@ -91,7 +161,13 @@ export function pushRootCachePool(root: FiberRoot) {
   // from `root.pooledCache`. If it's currently `null`, we will lazily
   // initialize it the first type it's requested. However, we only mutate
   // the root itself during the complete/unwind phase of the HostRoot.
-  pooledCache = root.pooledCache;
+  const rootCache = root.pooledCache;
+  if (rootCache != null) {
+    pooledCache = rootCache;
+    root.pooledCache = null;
+  } else {
+    pooledCache = null;
+  }
 }
 
 export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) {
@@ -157,7 +233,6 @@ export function getSuspendedCachePool(): SpawnedCachePool | null {
   if (!enableCache) {
     return null;
   }
-
   // We check the cache on the stack first, since that's the one any new Caches
   // would have accessed.
   let pool = pooledCache;
diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.new.js b/packages/react-reconciler/src/ReactFiberCommitWork.new.js
index 235bd74361730..efdc515d9c0b2 100644
--- a/packages/react-reconciler/src/ReactFiberCommitWork.new.js
+++ b/packages/react-reconciler/src/ReactFiberCommitWork.new.js
@@ -24,6 +24,7 @@ import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.new';
 import type {Wakeable} from 'shared/ReactTypes';
 import type {OffscreenState} from './ReactFiberOffscreenComponent';
 import type {HookFlags} from './ReactHookEffectTags';
+import type {Cache} from './ReactFiberCacheComponent.new';
 
 import {
   enableCreateEventHandleAPI,
@@ -39,6 +40,7 @@ import {
   enableSuspenseLayoutEffectSemantics,
   enableUpdaterTracking,
   warnAboutCallbackRefReturningFunction,
+  enableCache,
 } from 'shared/ReactFeatureFlags';
 import {
   FunctionComponent,
@@ -58,6 +60,7 @@ import {
   ScopeComponent,
   OffscreenComponent,
   LegacyHiddenComponent,
+  CacheComponent,
 } from './ReactWorkTags';
 import {detachDeletedInstance} from './ReactFiberHostConfig';
 import {
@@ -153,6 +156,7 @@ import {
   markComponentLayoutEffectUnmountStarted,
   markComponentLayoutEffectUnmountStopped,
 } from './SchedulingProfiler';
+import {releaseCache, retainCache} from './ReactFiberCacheComponent.new';
 
 let didWarnAboutUndefinedSnapshotBeforeUpdate: Set<mixed> | null = null;
 if (__DEV__) {
@@ -2665,6 +2669,82 @@ function commitPassiveMountOnFiber(
       }
       break;
     }
+    case HostRoot: {
+      if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (finishedWork.alternate !== null) {
+          previousCache = finishedWork.alternate.memoizedState.cache;
+        }
+        const nextCache = finishedWork.memoizedState.cache;
+        // Retain/release the root cache.
+        // Note that on initial mount, previousCache and nextCache will be the same
+        // and this retain won't occur. To counter this, we instead retain the HostRoot's
+        // initial cache when creating the root itself (see createFiberRoot() in
+        // ReactFiberRoot.js). Subsequent updates that change the cache are reflected
+        // here, such that previous/next caches are retained correctly.
+        if (nextCache !== previousCache) {
+          retainCache(nextCache);
+          if (previousCache != null) {
+            releaseCache(previousCache);
+          }
+        }
+      }
+      break;
+    }
+    case LegacyHiddenComponent:
+    case OffscreenComponent: {
+      if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (
+          finishedWork.alternate !== null &&
+          finishedWork.alternate.memoizedState !== null &&
+          finishedWork.alternate.memoizedState.cachePool !== null
+        ) {
+          previousCache = finishedWork.alternate.memoizedState.cachePool.pool;
+        }
+        let nextCache: Cache | null = null;
+        if (
+          finishedWork.memoizedState !== null &&
+          finishedWork.memoizedState.cachePool !== null
+        ) {
+          nextCache = finishedWork.memoizedState.cachePool.pool;
+        }
+        // Retain/release the cache used for pending (suspended) nodes.
+        // Note that this is only reached in the non-suspended/visible case:
+        // when the content is suspended/hidden, the retain/release occurs
+        // via the parent Suspense component (see case above).
+        if (nextCache !== previousCache) {
+          if (nextCache != null) {
+            retainCache(nextCache);
+          }
+          if (previousCache != null) {
+            releaseCache(previousCache);
+          }
+        }
+      }
+      break;
+    }
+    case CacheComponent: {
+      if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (finishedWork.alternate !== null) {
+          previousCache = finishedWork.alternate.memoizedState.cache;
+        }
+        const nextCache = finishedWork.memoizedState.cache;
+        // Retain/release the cache. In theory the cache component
+        // could be "borrowing" a cache instance owned by some parent,
+        // in which case we could avoid retaining/releasing. But it
+        // is non-trivial to determine when that is the case, so we
+        // always retain/release.
+        if (nextCache !== previousCache) {
+          retainCache(nextCache);
+          if (previousCache != null) {
+            releaseCache(previousCache);
+          }
+        }
+      }
+      break;
+    }
   }
 }
 
@@ -2871,6 +2951,43 @@ function commitPassiveUnmountInsideDeletedTreeOnFiber(
       }
       break;
     }
+    // TODO: run passive unmount effects when unmounting a root.
+    // Because passive unmount effects are not currently run,
+    // the cache instance owned by the root will never be freed.
+    // When effects are run, the cache should be freed here:
+    // case HostRoot: {
+    //   if (enableCache) {
+    //     const cache = current.memoizedState.cache;
+    //     releaseCache(cache);
+    //   }
+    //   break;
+    // }
+    case LegacyHiddenComponent:
+    case OffscreenComponent: {
+      if (enableCache) {
+        if (
+          current.memoizedState !== null &&
+          current.memoizedState.cachePool !== null
+        ) {
+          const cache: Cache = current.memoizedState.cachePool.pool;
+          // Retain/release the cache used for pending (suspended) nodes.
+          // Note that this is only reached in the non-suspended/visible case:
+          // when the content is suspended/hidden, the retain/release occurs
+          // via the parent Suspense component (see case above).
+          if (cache != null) {
+            retainCache(cache);
+          }
+        }
+      }
+      break;
+    }
+    case CacheComponent: {
+      if (enableCache) {
+        const cache = current.memoizedState.cache;
+        releaseCache(cache);
+      }
+      break;
+    }
   }
 }
 
diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.old.js b/packages/react-reconciler/src/ReactFiberCommitWork.old.js
index c4da995064d29..7d4d5d7aeecef 100644
--- a/packages/react-reconciler/src/ReactFiberCommitWork.old.js
+++ b/packages/react-reconciler/src/ReactFiberCommitWork.old.js
@@ -24,6 +24,7 @@ import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.old';
 import type {Wakeable} from 'shared/ReactTypes';
 import type {OffscreenState} from './ReactFiberOffscreenComponent';
 import type {HookFlags} from './ReactHookEffectTags';
+import type {Cache} from './ReactFiberCacheComponent.old';
 
 import {
   enableCreateEventHandleAPI,
@@ -39,6 +40,7 @@ import {
   enableSuspenseLayoutEffectSemantics,
   enableUpdaterTracking,
   warnAboutCallbackRefReturningFunction,
+  enableCache,
 } from 'shared/ReactFeatureFlags';
 import {
   FunctionComponent,
@@ -58,6 +60,7 @@ import {
   ScopeComponent,
   OffscreenComponent,
   LegacyHiddenComponent,
+  CacheComponent,
 } from './ReactWorkTags';
 import {detachDeletedInstance} from './ReactFiberHostConfig';
 import {
@@ -153,6 +156,7 @@ import {
   markComponentLayoutEffectUnmountStarted,
   markComponentLayoutEffectUnmountStopped,
 } from './SchedulingProfiler';
+import {releaseCache, retainCache} from './ReactFiberCacheComponent.old';
 
 let didWarnAboutUndefinedSnapshotBeforeUpdate: Set<mixed> | null = null;
 if (__DEV__) {
@@ -2665,6 +2669,82 @@ function commitPassiveMountOnFiber(
       }
       break;
     }
+    case HostRoot: {
+      if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (finishedWork.alternate !== null) {
+          previousCache = finishedWork.alternate.memoizedState.cache;
+        }
+        const nextCache = finishedWork.memoizedState.cache;
+        // Retain/release the root cache.
+        // Note that on initial mount, previousCache and nextCache will be the same
+        // and this retain won't occur. To counter this, we instead retain the HostRoot's
+        // initial cache when creating the root itself (see createFiberRoot() in
+        // ReactFiberRoot.js). Subsequent updates that change the cache are reflected
+        // here, such that previous/next caches are retained correctly.
+        if (nextCache !== previousCache) {
+          retainCache(nextCache);
+          if (previousCache != null) {
+            releaseCache(previousCache);
+          }
+        }
+      }
+      break;
+    }
+    case LegacyHiddenComponent:
+    case OffscreenComponent: {
+      if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (
+          finishedWork.alternate !== null &&
+          finishedWork.alternate.memoizedState !== null &&
+          finishedWork.alternate.memoizedState.cachePool !== null
+        ) {
+          previousCache = finishedWork.alternate.memoizedState.cachePool.pool;
+        }
+        let nextCache: Cache | null = null;
+        if (
+          finishedWork.memoizedState !== null &&
+          finishedWork.memoizedState.cachePool !== null
+        ) {
+          nextCache = finishedWork.memoizedState.cachePool.pool;
+        }
+        // Retain/release the cache used for pending (suspended) nodes.
+        // Note that this is only reached in the non-suspended/visible case:
+        // when the content is suspended/hidden, the retain/release occurs
+        // via the parent Suspense component (see case above).
+        if (nextCache !== previousCache) {
+          if (nextCache != null) {
+            retainCache(nextCache);
+          }
+          if (previousCache != null) {
+            releaseCache(previousCache);
+          }
+        }
+      }
+      break;
+    }
+    case CacheComponent: {
+      if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (finishedWork.alternate !== null) {
+          previousCache = finishedWork.alternate.memoizedState.cache;
+        }
+        const nextCache = finishedWork.memoizedState.cache;
+        // Retain/release the cache. In theory the cache component
+        // could be "borrowing" a cache instance owned by some parent,
+        // in which case we could avoid retaining/releasing. But it
+        // is non-trivial to determine when that is the case, so we
+        // always retain/release.
+        if (nextCache !== previousCache) {
+          retainCache(nextCache);
+          if (previousCache != null) {
+            releaseCache(previousCache);
+          }
+        }
+      }
+      break;
+    }
   }
 }
 
@@ -2871,6 +2951,43 @@ function commitPassiveUnmountInsideDeletedTreeOnFiber(
       }
       break;
     }
+    // TODO: run passive unmount effects when unmounting a root.
+    // Because passive unmount effects are not currently run,
+    // the cache instance owned by the root will never be freed.
+    // When effects are run, the cache should be freed here:
+    // case HostRoot: {
+    //   if (enableCache) {
+    //     const cache = current.memoizedState.cache;
+    //     releaseCache(cache);
+    //   }
+    //   break;
+    // }
+    case LegacyHiddenComponent:
+    case OffscreenComponent: {
+      if (enableCache) {
+        if (
+          current.memoizedState !== null &&
+          current.memoizedState.cachePool !== null
+        ) {
+          const cache: Cache = current.memoizedState.cachePool.pool;
+          // Retain/release the cache used for pending (suspended) nodes.
+          // Note that this is only reached in the non-suspended/visible case:
+          // when the content is suspended/hidden, the retain/release occurs
+          // via the parent Suspense component (see case above).
+          if (cache != null) {
+            retainCache(cache);
+          }
+        }
+      }
+      break;
+    }
+    case CacheComponent: {
+      if (enableCache) {
+        const cache = current.memoizedState.cache;
+        releaseCache(cache);
+      }
+      break;
+    }
   }
 }
 
diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js
index dadec516c3a4b..20a7fc52db13a 100644
--- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js
+++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js
@@ -72,6 +72,7 @@ import {
   ChildDeletion,
   StaticMask,
   MutationMask,
+  Passive,
 } from './ReactFiberFlags';
 
 import {
@@ -848,7 +849,15 @@ function completeWork(
       if (enableCache) {
         popRootCachePool(fiberRoot, renderLanes);
 
+        let previousCache: Cache | null = null;
+        if (workInProgress.alternate !== null) {
+          previousCache = workInProgress.alternate.memoizedState.cache;
+        }
         const cache: Cache = workInProgress.memoizedState.cache;
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          workInProgress.flags |= Passive;
+        }
         popCacheProvider(workInProgress, cache);
       }
       popHostContainer(workInProgress);
@@ -1089,6 +1098,29 @@ function completeWork(
         prevDidTimeout = prevState !== null;
       }
 
+      if (enableCache && nextDidTimeout) {
+        const offscreenFiber: Fiber = (workInProgress.child: any);
+        let previousCache: Cache | null = null;
+        if (
+          offscreenFiber.alternate !== null &&
+          offscreenFiber.alternate.memoizedState !== null &&
+          offscreenFiber.alternate.memoizedState.cachePool !== null
+        ) {
+          previousCache = offscreenFiber.alternate.memoizedState.cachePool.pool;
+        }
+        let cache: Cache | null = null;
+        if (
+          offscreenFiber.memoizedState !== null &&
+          offscreenFiber.memoizedState.cachePool !== null
+        ) {
+          cache = offscreenFiber.memoizedState.cachePool.pool;
+        }
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          offscreenFiber.flags |= Passive;
+        }
+      }
+
       // If the suspended state of the boundary changes, we need to schedule
       // an effect to toggle the subtree's visibility. When we switch from
       // fallback -> primary, the inner Offscreen fiber schedules this effect
@@ -1465,6 +1497,25 @@ function completeWork(
       }
 
       if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (
+          workInProgress.alternate !== null &&
+          workInProgress.alternate.memoizedState !== null &&
+          workInProgress.alternate.memoizedState.cachePool !== null
+        ) {
+          previousCache = workInProgress.alternate.memoizedState.cachePool.pool;
+        }
+        let cache: Cache | null = null;
+        if (
+          workInProgress.memoizedState !== null &&
+          workInProgress.memoizedState.cachePool !== null
+        ) {
+          cache = workInProgress.memoizedState.cachePool.pool;
+        }
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          workInProgress.flags |= Passive;
+        }
         const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any);
         if (spawnedCachePool !== null) {
           popCachePool(workInProgress);
@@ -1475,7 +1526,15 @@ function completeWork(
     }
     case CacheComponent: {
       if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (workInProgress.alternate !== null) {
+          previousCache = workInProgress.alternate.memoizedState.cache;
+        }
         const cache: Cache = workInProgress.memoizedState.cache;
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          workInProgress.flags |= Passive;
+        }
         popCacheProvider(workInProgress, cache);
         bubbleProperties(workInProgress);
         return null;
diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js
index 06fbf5abff50f..305359aef206e 100644
--- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js
+++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js
@@ -72,6 +72,7 @@ import {
   ChildDeletion,
   StaticMask,
   MutationMask,
+  Passive,
 } from './ReactFiberFlags';
 
 import {
@@ -848,7 +849,15 @@ function completeWork(
       if (enableCache) {
         popRootCachePool(fiberRoot, renderLanes);
 
+        let previousCache: Cache | null = null;
+        if (workInProgress.alternate !== null) {
+          previousCache = workInProgress.alternate.memoizedState.cache;
+        }
         const cache: Cache = workInProgress.memoizedState.cache;
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          workInProgress.flags |= Passive;
+        }
         popCacheProvider(workInProgress, cache);
       }
       popHostContainer(workInProgress);
@@ -1089,6 +1098,29 @@ function completeWork(
         prevDidTimeout = prevState !== null;
       }
 
+      if (enableCache && nextDidTimeout) {
+        const offscreenFiber: Fiber = (workInProgress.child: any);
+        let previousCache: Cache | null = null;
+        if (
+          offscreenFiber.alternate !== null &&
+          offscreenFiber.alternate.memoizedState !== null &&
+          offscreenFiber.alternate.memoizedState.cachePool !== null
+        ) {
+          previousCache = offscreenFiber.alternate.memoizedState.cachePool.pool;
+        }
+        let cache: Cache | null = null;
+        if (
+          offscreenFiber.memoizedState !== null &&
+          offscreenFiber.memoizedState.cachePool !== null
+        ) {
+          cache = offscreenFiber.memoizedState.cachePool.pool;
+        }
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          offscreenFiber.flags |= Passive;
+        }
+      }
+
       // If the suspended state of the boundary changes, we need to schedule
       // an effect to toggle the subtree's visibility. When we switch from
       // fallback -> primary, the inner Offscreen fiber schedules this effect
@@ -1465,6 +1497,25 @@ function completeWork(
       }
 
       if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (
+          workInProgress.alternate !== null &&
+          workInProgress.alternate.memoizedState !== null &&
+          workInProgress.alternate.memoizedState.cachePool !== null
+        ) {
+          previousCache = workInProgress.alternate.memoizedState.cachePool.pool;
+        }
+        let cache: Cache | null = null;
+        if (
+          workInProgress.memoizedState !== null &&
+          workInProgress.memoizedState.cachePool !== null
+        ) {
+          cache = workInProgress.memoizedState.cachePool.pool;
+        }
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          workInProgress.flags |= Passive;
+        }
         const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any);
         if (spawnedCachePool !== null) {
           popCachePool(workInProgress);
@@ -1475,7 +1526,15 @@ function completeWork(
     }
     case CacheComponent: {
       if (enableCache) {
+        let previousCache: Cache | null = null;
+        if (workInProgress.alternate !== null) {
+          previousCache = workInProgress.alternate.memoizedState.cache;
+        }
         const cache: Cache = workInProgress.memoizedState.cache;
+        if (cache !== previousCache) {
+          // Run passive effects to retain/release the cache.
+          workInProgress.flags |= Passive;
+        }
         popCacheProvider(workInProgress, cache);
         bubbleProperties(workInProgress);
         return null;
diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js
index afaebd20a5264..d21704a1b6fea 100644
--- a/packages/react-reconciler/src/ReactFiberHooks.new.js
+++ b/packages/react-reconciler/src/ReactFiberHooks.new.js
@@ -110,7 +110,7 @@ import {
 import {getIsRendering} from './ReactCurrentFiber';
 import {logStateUpdateScheduled} from './DebugTracing';
 import {markStateUpdateScheduled} from './SchedulingProfiler';
-import {CacheContext} from './ReactFiberCacheComponent.new';
+import {createCache, CacheContext} from './ReactFiberCacheComponent.new';
 import {
   createUpdate as createLegacyQueueUpdate,
   enqueueUpdate as enqueueLegacyQueueUpdate,
@@ -2124,6 +2124,9 @@ function updateRefresh() {
 }
 
 function refreshCache<T>(fiber: Fiber, seedKey: ?() => T, seedValue: T) {
+  if (!enableCache) {
+    return;
+  }
   // TODO: Does Cache work in legacy mode? Should decide and write a test.
   // TODO: Consider warning if the refresh is at discrete priority, or if we
   // otherwise suspect that it wasn't batched properly.
@@ -2139,11 +2142,14 @@ function refreshCache<T>(fiber: Fiber, seedKey: ?() => T, seedValue: T) {
           entangleLegacyQueueTransitions(root, provider, lane);
         }
 
-        const seededCache = new Map();
+        // TODO: If a refresh never commits, the new cache created here must be
+        // released. A simple case is start refreshing a cache boundary, but then
+        // unmount that boundary before the refresh completes.
+        const seededCache = createCache();
         if (seedKey !== null && seedKey !== undefined && root !== null) {
           // Seed the cache with the value passed by the caller. This could be
           // from a server mutation, or it could be a streaming response.
-          seededCache.set(seedKey, seedValue);
+          seededCache.data.set(seedKey, seedValue);
         }
 
         // Schedule an update on the cache boundary to trigger a refresh.
@@ -2390,15 +2396,23 @@ function markUpdateInDevTools(fiber, lane, action) {
   }
 }
 
+function getCacheSignal(): AbortSignal {
+  if (!enableCache) {
+    throw new Error('Not implemented.');
+  }
+  const cache: Cache = readContext(CacheContext);
+  return cache.controller.signal;
+}
+
 function getCacheForType<T>(resourceType: () => T): T {
   if (!enableCache) {
     throw new Error('Not implemented.');
   }
   const cache: Cache = readContext(CacheContext);
-  let cacheForType: T | void = (cache.get(resourceType): any);
+  let cacheForType: T | void = (cache.data.get(resourceType): any);
   if (cacheForType === undefined) {
     cacheForType = resourceType();
-    cache.set(resourceType, cacheForType);
+    cache.data.set(resourceType, cacheForType);
   }
   return cacheForType;
 }
@@ -2426,6 +2440,7 @@ export const ContextOnlyDispatcher: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (ContextOnlyDispatcher: Dispatcher).getCacheSignal = getCacheSignal;
   (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType;
   (ContextOnlyDispatcher: Dispatcher).useCacheRefresh = throwInvalidHookError;
 }
@@ -2453,6 +2468,7 @@ const HooksDispatcherOnMount: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (HooksDispatcherOnMount: Dispatcher).getCacheSignal = getCacheSignal;
   (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType;
   (HooksDispatcherOnMount: Dispatcher).useCacheRefresh = mountRefresh;
 }
@@ -2480,6 +2496,7 @@ const HooksDispatcherOnUpdate: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (HooksDispatcherOnUpdate: Dispatcher).getCacheSignal = getCacheSignal;
   (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType;
   (HooksDispatcherOnUpdate: Dispatcher).useCacheRefresh = updateRefresh;
 }
@@ -2507,6 +2524,7 @@ const HooksDispatcherOnRerender: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (HooksDispatcherOnRerender: Dispatcher).getCacheSignal = getCacheSignal;
   (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType;
   (HooksDispatcherOnRerender: Dispatcher).useCacheRefresh = updateRefresh;
 }
@@ -2677,6 +2695,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnMountInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -2818,6 +2837,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -2959,6 +2979,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3101,6 +3122,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3259,6 +3281,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType;
     (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3417,6 +3440,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType;
     (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3576,6 +3600,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType;
     (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js
index 03df5b1444e62..cf6786e617daf 100644
--- a/packages/react-reconciler/src/ReactFiberHooks.old.js
+++ b/packages/react-reconciler/src/ReactFiberHooks.old.js
@@ -110,7 +110,7 @@ import {
 import {getIsRendering} from './ReactCurrentFiber';
 import {logStateUpdateScheduled} from './DebugTracing';
 import {markStateUpdateScheduled} from './SchedulingProfiler';
-import {CacheContext} from './ReactFiberCacheComponent.old';
+import {createCache, CacheContext} from './ReactFiberCacheComponent.old';
 import {
   createUpdate as createLegacyQueueUpdate,
   enqueueUpdate as enqueueLegacyQueueUpdate,
@@ -2124,6 +2124,9 @@ function updateRefresh() {
 }
 
 function refreshCache<T>(fiber: Fiber, seedKey: ?() => T, seedValue: T) {
+  if (!enableCache) {
+    return;
+  }
   // TODO: Does Cache work in legacy mode? Should decide and write a test.
   // TODO: Consider warning if the refresh is at discrete priority, or if we
   // otherwise suspect that it wasn't batched properly.
@@ -2139,11 +2142,14 @@ function refreshCache<T>(fiber: Fiber, seedKey: ?() => T, seedValue: T) {
           entangleLegacyQueueTransitions(root, provider, lane);
         }
 
-        const seededCache = new Map();
+        // TODO: If a refresh never commits, the new cache created here must be
+        // released. A simple case is start refreshing a cache boundary, but then
+        // unmount that boundary before the refresh completes.
+        const seededCache = createCache();
         if (seedKey !== null && seedKey !== undefined && root !== null) {
           // Seed the cache with the value passed by the caller. This could be
           // from a server mutation, or it could be a streaming response.
-          seededCache.set(seedKey, seedValue);
+          seededCache.data.set(seedKey, seedValue);
         }
 
         // Schedule an update on the cache boundary to trigger a refresh.
@@ -2390,15 +2396,23 @@ function markUpdateInDevTools(fiber, lane, action) {
   }
 }
 
+function getCacheSignal(): AbortSignal {
+  if (!enableCache) {
+    throw new Error('Not implemented.');
+  }
+  const cache: Cache = readContext(CacheContext);
+  return cache.controller.signal;
+}
+
 function getCacheForType<T>(resourceType: () => T): T {
   if (!enableCache) {
     throw new Error('Not implemented.');
   }
   const cache: Cache = readContext(CacheContext);
-  let cacheForType: T | void = (cache.get(resourceType): any);
+  let cacheForType: T | void = (cache.data.get(resourceType): any);
   if (cacheForType === undefined) {
     cacheForType = resourceType();
-    cache.set(resourceType, cacheForType);
+    cache.data.set(resourceType, cacheForType);
   }
   return cacheForType;
 }
@@ -2426,6 +2440,7 @@ export const ContextOnlyDispatcher: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (ContextOnlyDispatcher: Dispatcher).getCacheSignal = getCacheSignal;
   (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType;
   (ContextOnlyDispatcher: Dispatcher).useCacheRefresh = throwInvalidHookError;
 }
@@ -2453,6 +2468,7 @@ const HooksDispatcherOnMount: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (HooksDispatcherOnMount: Dispatcher).getCacheSignal = getCacheSignal;
   (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType;
   (HooksDispatcherOnMount: Dispatcher).useCacheRefresh = mountRefresh;
 }
@@ -2480,6 +2496,7 @@ const HooksDispatcherOnUpdate: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (HooksDispatcherOnUpdate: Dispatcher).getCacheSignal = getCacheSignal;
   (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType;
   (HooksDispatcherOnUpdate: Dispatcher).useCacheRefresh = updateRefresh;
 }
@@ -2507,6 +2524,7 @@ const HooksDispatcherOnRerender: Dispatcher = {
   unstable_isNewReconciler: enableNewReconciler,
 };
 if (enableCache) {
+  (HooksDispatcherOnRerender: Dispatcher).getCacheSignal = getCacheSignal;
   (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType;
   (HooksDispatcherOnRerender: Dispatcher).useCacheRefresh = updateRefresh;
 }
@@ -2677,6 +2695,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnMountInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -2818,6 +2837,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -2959,6 +2979,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3101,6 +3122,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType;
     (HooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3259,6 +3281,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType;
     (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3417,6 +3440,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType;
     (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
@@ -3576,6 +3600,7 @@ if (__DEV__) {
     unstable_isNewReconciler: enableNewReconciler,
   };
   if (enableCache) {
+    (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheSignal = getCacheSignal;
     (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType;
     (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() {
       currentHookNameInDev = 'useCacheRefresh';
diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js
index ad124a432a4a2..c1f34e1052fc4 100644
--- a/packages/react-reconciler/src/ReactFiberLane.new.js
+++ b/packages/react-reconciler/src/ReactFiberLane.new.js
@@ -17,7 +17,6 @@ export type Lane = number;
 export type LaneMap<T> = Array<T>;
 
 import {
-  enableCache,
   enableSchedulingProfiler,
   enableUpdaterTracking,
   allowConcurrentByDefault,
@@ -635,15 +634,6 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) {
 
   root.entangledLanes &= remainingLanes;
 
-  if (enableCache) {
-    const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes);
-    if (pooledCacheLanes === NoLanes) {
-      // None of the remaining work relies on the cache pool. Clear it so
-      // subsequent requests get a new cache.
-      root.pooledCache = null;
-    }
-  }
-
   const entanglements = root.entanglements;
   const eventTimes = root.eventTimes;
   const expirationTimes = root.expirationTimes;
diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js
index 4a064a3846515..c81191f6a07e5 100644
--- a/packages/react-reconciler/src/ReactFiberLane.old.js
+++ b/packages/react-reconciler/src/ReactFiberLane.old.js
@@ -17,7 +17,6 @@ export type Lane = number;
 export type LaneMap<T> = Array<T>;
 
 import {
-  enableCache,
   enableSchedulingProfiler,
   enableUpdaterTracking,
   allowConcurrentByDefault,
@@ -635,15 +634,6 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) {
 
   root.entangledLanes &= remainingLanes;
 
-  if (enableCache) {
-    const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes);
-    if (pooledCacheLanes === NoLanes) {
-      // None of the remaining work relies on the cache pool. Clear it so
-      // subsequent requests get a new cache.
-      root.pooledCache = null;
-    }
-  }
-
   const entanglements = root.entanglements;
   const eventTimes = root.eventTimes;
   const expirationTimes = root.expirationTimes;
diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js
index db012ad1db19d..803adee1e22dd 100644
--- a/packages/react-reconciler/src/ReactFiberRoot.new.js
+++ b/packages/react-reconciler/src/ReactFiberRoot.new.js
@@ -28,6 +28,7 @@ import {
 } from 'shared/ReactFeatureFlags';
 import {initializeUpdateQueue} from './ReactUpdateQueue.new';
 import {LegacyRoot, ConcurrentRoot} from './ReactRootTags';
+import {createCache, retainCache} from './ReactFiberCacheComponent.new';
 
 function FiberRootNode(containerInfo, tag, hydrate) {
   this.tag = tag;
@@ -117,8 +118,18 @@ export function createFiberRoot(
   uninitializedFiber.stateNode = root;
 
   if (enableCache) {
-    const initialCache = new Map();
+    const initialCache = createCache();
+    retainCache(initialCache);
+
+    // The pooledCache is a fresh cache instance that is used temporarily
+    // for newly mounted boundaries during a render. In general, the
+    // pooledCache is always cleared from the root at the end of a render:
+    // it is either released when render commits, or moved to an Offscreen
+    // component if rendering suspends. Because the lifetime of the pooled
+    // cache is distinct from the main memoizedState.cache, it must be
+    // retained separately.
     root.pooledCache = initialCache;
+    retainCache(initialCache);
     const initialState = {
       element: null,
       cache: initialCache,
diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js
index 03332fd545619..504dac966ef22 100644
--- a/packages/react-reconciler/src/ReactFiberRoot.old.js
+++ b/packages/react-reconciler/src/ReactFiberRoot.old.js
@@ -28,6 +28,7 @@ import {
 } from 'shared/ReactFeatureFlags';
 import {initializeUpdateQueue} from './ReactUpdateQueue.old';
 import {LegacyRoot, ConcurrentRoot} from './ReactRootTags';
+import {createCache, retainCache} from './ReactFiberCacheComponent.old';
 
 function FiberRootNode(containerInfo, tag, hydrate) {
   this.tag = tag;
@@ -117,8 +118,18 @@ export function createFiberRoot(
   uninitializedFiber.stateNode = root;
 
   if (enableCache) {
-    const initialCache = new Map();
+    const initialCache = createCache();
+    retainCache(initialCache);
+
+    // The pooledCache is a fresh cache instance that is used temporarily
+    // for newly mounted boundaries during a render. In general, the
+    // pooledCache is always cleared from the root at the end of a render:
+    // it is either released when render commits, or moved to an Offscreen
+    // component if rendering suspends. Because the lifetime of the pooled
+    // cache is distinct from the main memoizedState.cache, it must be
+    // retained separately.
     root.pooledCache = initialCache;
+    retainCache(initialCache);
     const initialState = {
       element: null,
       cache: initialCache,
diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js
index 89286bd573858..930fc608f4724 100644
--- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js
+++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js
@@ -32,6 +32,7 @@ import {
   skipUnmountedBoundaries,
   enableUpdaterTracking,
   warnOnSubscriptionInsideStartTransition,
+  enableCache,
 } from 'shared/ReactFeatureFlags';
 import ReactSharedInternals from 'shared/ReactSharedInternals';
 import is from 'shared/objectIs';
@@ -234,6 +235,7 @@ import {
   isDevToolsPresent,
 } from './ReactFiberDevToolsHook.new';
 import {onCommitRoot as onCommitRootTestSelector} from './ReactTestSelectors';
+import {releaseCache} from './ReactFiberCacheComponent.new';
 
 const ceil = Math.ceil;
 
@@ -331,6 +333,7 @@ let rootDoesHavePassiveEffects: boolean = false;
 let rootWithPendingPassiveEffects: FiberRoot | null = null;
 let pendingPassiveEffectsLanes: Lanes = NoLanes;
 let pendingPassiveProfilerEffects: Array<Fiber> = [];
+let pendingPassiveEffectsRemainingLanes: Lanes = NoLanes;
 
 // Use these to prevent an infinite loop of nested updates
 const NESTED_UPDATE_LIMIT = 50;
@@ -1900,8 +1903,12 @@ function commitRootImpl(root, renderPriorityLevel) {
   ) {
     if (!rootDoesHavePassiveEffects) {
       rootDoesHavePassiveEffects = true;
+      pendingPassiveEffectsRemainingLanes = remainingLanes;
       scheduleCallback(NormalSchedulerPriority, () => {
         flushPassiveEffects();
+        // This render triggered passive effects: release the root cache pool
+        // *after* passive effects fire to avoid freeing a cache pool that may
+        // be referenced by a node in the tree (HostRoot, Cache boundary etc)
         return null;
       });
     }
@@ -2027,6 +2034,10 @@ function commitRootImpl(root, renderPriorityLevel) {
     rootDoesHavePassiveEffects = false;
     rootWithPendingPassiveEffects = root;
     pendingPassiveEffectsLanes = lanes;
+  } else {
+    // There were no passive effects, so we can immediately release the cache
+    // pool for this render.
+    releaseRootPooledCache(root, remainingLanes);
   }
 
   // Read this again, since an effect might have updated it
@@ -2127,6 +2138,21 @@ function commitRootImpl(root, renderPriorityLevel) {
   return null;
 }
 
+function releaseRootPooledCache(root: FiberRoot, remainingLanes: Lanes) {
+  if (enableCache) {
+    const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes);
+    if (pooledCacheLanes === NoLanes) {
+      // None of the remaining work relies on the cache pool. Clear it so
+      // subsequent requests get a new cache
+      const pooledCache = root.pooledCache;
+      if (pooledCache != null) {
+        root.pooledCache = null;
+        releaseCache(pooledCache);
+      }
+    }
+  }
+}
+
 export function flushPassiveEffects(): boolean {
   // Returns whether passive effects were flushed.
   // TODO: Combine this check with the one in flushPassiveEFfectsImpl. We should
@@ -2135,6 +2161,15 @@ export function flushPassiveEffects(): boolean {
   // `Scheduler.runWithPriority`, which accepts a function. But now we track the
   // priority within React itself, so we can mutate the variable directly.
   if (rootWithPendingPassiveEffects !== null) {
+    // Cache the root since rootWithPendingPassiveEffects is cleared in
+    // flushPassiveEffectsImpl
+    const root = rootWithPendingPassiveEffects;
+    // Cache and clear the remaining lanes flag; it must be reset since this
+    // method can be called from various places, not always from commitRoot
+    // where the remaining lanes are known
+    const remainingLanes = pendingPassiveEffectsRemainingLanes;
+    pendingPassiveEffectsRemainingLanes = NoLanes;
+
     const renderPriority = lanesToEventPriority(pendingPassiveEffectsLanes);
     const priority = lowerEventPriority(DefaultEventPriority, renderPriority);
     const prevTransition = ReactCurrentBatchConfig.transition;
@@ -2146,6 +2181,11 @@ export function flushPassiveEffects(): boolean {
     } finally {
       setCurrentUpdatePriority(previousPriority);
       ReactCurrentBatchConfig.transition = prevTransition;
+
+      // Once passive effects have run for the tree - giving components a
+      // chance to retain cache instances they use - release the pooled
+      // cache at the root (if there is one)
+      releaseRootPooledCache(root, remainingLanes);
     }
   }
   return false;
diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js
index d5cb23483d2d0..0b7f46c799016 100644
--- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js
+++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js
@@ -32,6 +32,7 @@ import {
   skipUnmountedBoundaries,
   enableUpdaterTracking,
   warnOnSubscriptionInsideStartTransition,
+  enableCache,
 } from 'shared/ReactFeatureFlags';
 import ReactSharedInternals from 'shared/ReactSharedInternals';
 import is from 'shared/objectIs';
@@ -234,6 +235,7 @@ import {
   isDevToolsPresent,
 } from './ReactFiberDevToolsHook.old';
 import {onCommitRoot as onCommitRootTestSelector} from './ReactTestSelectors';
+import {releaseCache} from './ReactFiberCacheComponent.old';
 
 const ceil = Math.ceil;
 
@@ -331,6 +333,7 @@ let rootDoesHavePassiveEffects: boolean = false;
 let rootWithPendingPassiveEffects: FiberRoot | null = null;
 let pendingPassiveEffectsLanes: Lanes = NoLanes;
 let pendingPassiveProfilerEffects: Array<Fiber> = [];
+let pendingPassiveEffectsRemainingLanes: Lanes = NoLanes;
 
 // Use these to prevent an infinite loop of nested updates
 const NESTED_UPDATE_LIMIT = 50;
@@ -1900,8 +1903,12 @@ function commitRootImpl(root, renderPriorityLevel) {
   ) {
     if (!rootDoesHavePassiveEffects) {
       rootDoesHavePassiveEffects = true;
+      pendingPassiveEffectsRemainingLanes = remainingLanes;
       scheduleCallback(NormalSchedulerPriority, () => {
         flushPassiveEffects();
+        // This render triggered passive effects: release the root cache pool
+        // *after* passive effects fire to avoid freeing a cache pool that may
+        // be referenced by a node in the tree (HostRoot, Cache boundary etc)
         return null;
       });
     }
@@ -2027,6 +2034,10 @@ function commitRootImpl(root, renderPriorityLevel) {
     rootDoesHavePassiveEffects = false;
     rootWithPendingPassiveEffects = root;
     pendingPassiveEffectsLanes = lanes;
+  } else {
+    // There were no passive effects, so we can immediately release the cache
+    // pool for this render.
+    releaseRootPooledCache(root, remainingLanes);
   }
 
   // Read this again, since an effect might have updated it
@@ -2127,6 +2138,21 @@ function commitRootImpl(root, renderPriorityLevel) {
   return null;
 }
 
+function releaseRootPooledCache(root: FiberRoot, remainingLanes: Lanes) {
+  if (enableCache) {
+    const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes);
+    if (pooledCacheLanes === NoLanes) {
+      // None of the remaining work relies on the cache pool. Clear it so
+      // subsequent requests get a new cache
+      const pooledCache = root.pooledCache;
+      if (pooledCache != null) {
+        root.pooledCache = null;
+        releaseCache(pooledCache);
+      }
+    }
+  }
+}
+
 export function flushPassiveEffects(): boolean {
   // Returns whether passive effects were flushed.
   // TODO: Combine this check with the one in flushPassiveEFfectsImpl. We should
@@ -2135,6 +2161,15 @@ export function flushPassiveEffects(): boolean {
   // `Scheduler.runWithPriority`, which accepts a function. But now we track the
   // priority within React itself, so we can mutate the variable directly.
   if (rootWithPendingPassiveEffects !== null) {
+    // Cache the root since rootWithPendingPassiveEffects is cleared in
+    // flushPassiveEffectsImpl
+    const root = rootWithPendingPassiveEffects;
+    // Cache and clear the remaining lanes flag; it must be reset since this
+    // method can be called from various places, not always from commitRoot
+    // where the remaining lanes are known
+    const remainingLanes = pendingPassiveEffectsRemainingLanes;
+    pendingPassiveEffectsRemainingLanes = NoLanes;
+
     const renderPriority = lanesToEventPriority(pendingPassiveEffectsLanes);
     const priority = lowerEventPriority(DefaultEventPriority, renderPriority);
     const prevTransition = ReactCurrentBatchConfig.transition;
@@ -2146,6 +2181,11 @@ export function flushPassiveEffects(): boolean {
     } finally {
       setCurrentUpdatePriority(previousPriority);
       ReactCurrentBatchConfig.transition = prevTransition;
+
+      // Once passive effects have run for the tree - giving components a
+      // chance to retain cache instances they use - release the pooled
+      // cache at the root (if there is one)
+      releaseRootPooledCache(root, remainingLanes);
     }
   }
   return false;
diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js
index f079b3e8f2b93..e971828233c10 100644
--- a/packages/react-reconciler/src/ReactInternalTypes.js
+++ b/packages/react-reconciler/src/ReactInternalTypes.js
@@ -273,6 +273,7 @@ type BasicStateAction<S> = (S => S) | S;
 type Dispatch<A> = A => void;
 
 export type Dispatcher = {|
+  getCacheSignal?: () => AbortSignal,
   getCacheForType?: <T>(resourceType: () => T) => T,
   readContext<T>(context: ReactContext<T>): T,
   useState<S>(initialState: (() => S) | S): [S, Dispatch<BasicStateAction<S>>],
diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js
index 31321eb07a520..7ef18875e087d 100644
--- a/packages/react-reconciler/src/__tests__/ReactCache-test.js
+++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js
@@ -1,6 +1,7 @@
 let React;
 let ReactNoop;
 let Cache;
+let getCacheSignal;
 let getCacheForType;
 let Scheduler;
 let act;
@@ -22,6 +23,7 @@ describe('ReactCache', () => {
     Scheduler = require('scheduler');
     act = require('jest-react').act;
     Suspense = React.Suspense;
+    getCacheSignal = React.unstable_getCacheSignal;
     getCacheForType = React.unstable_getCacheForType;
     useCacheRefresh = React.unstable_useCacheRefresh;
     startTransition = React.startTransition;
@@ -52,6 +54,7 @@ describe('ReactCache', () => {
           const newRecord = {
             status: 'resolved',
             value: text,
+            cleanupScheduled: false,
           };
           data.set(text, newRecord);
         } else if (record.status === 'pending') {
@@ -64,6 +67,7 @@ describe('ReactCache', () => {
           const newRecord = {
             status: 'rejected',
             value: error,
+            cleanupScheduled: false,
           };
           data.set(text, newRecord);
         } else if (record.status === 'pending') {
@@ -76,9 +80,21 @@ describe('ReactCache', () => {
   }
 
   function readText(text) {
+    const signal = getCacheSignal();
     const textCache = getCacheForType(createTextCache);
     const record = textCache.data.get(text);
     if (record !== undefined) {
+      if (!record.cleanupScheduled) {
+        // This record was seeded prior to the abort signal being available:
+        // schedule a cleanup function for it.
+        // TODO: Add ability to cleanup entries seeded w useCacheRefresh()
+        record.cleanupScheduled = true;
+        signal.addEventListener('abort', () => {
+          Scheduler.unstable_yieldValue(
+            `Cache cleanup: ${text} [v${textCache.version}]`,
+          );
+        });
+      }
       switch (record.status) {
         case 'pending':
           throw record.value;
@@ -115,9 +131,15 @@ describe('ReactCache', () => {
       const newRecord = {
         status: 'pending',
         value: thenable,
+        cleanupScheduled: true,
       };
       textCache.data.set(text, newRecord);
 
+      signal.addEventListener('abort', () => {
+        Scheduler.unstable_yieldValue(
+          `Cache cleanup: ${text} [v${textCache.version}]`,
+        );
+      });
       throw thenable;
     }
   }
@@ -180,6 +202,13 @@ describe('ReactCache', () => {
     });
     expect(Scheduler).toHaveYielded(['A']);
     expect(root).toMatchRenderedOutput('A');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // no cleanup: cache is still retained at the root
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
@@ -200,12 +229,19 @@ describe('ReactCache', () => {
     });
     expect(Scheduler).toHaveYielded(['A']);
     expect(root).toMatchRenderedOutput('A');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // no cleanup: cache is still retained at the root
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
   test('multiple new Cache boundaries in the same update share the same, fresh cache', async () => {
-    function App({text}) {
-      return (
+    function App({showMore}) {
+      return showMore ? (
         <>
           <Cache>
             <Suspense fallback={<Text text="Loading..." />}>
@@ -218,6 +254,8 @@ describe('ReactCache', () => {
             </Suspense>
           </Cache>
         </>
+      ) : (
+        '(empty)'
       );
     }
 
@@ -225,6 +263,12 @@ describe('ReactCache', () => {
     await act(async () => {
       root.render(<App showMore={false} />);
     });
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('(empty)');
+
+    await act(async () => {
+      root.render(<App showMore={true} />);
+    });
     // Even though there are two new <Cache /> trees, they should share the same
     // data cache. So there should be only a single cache miss for A.
     expect(Scheduler).toHaveYielded([
@@ -239,6 +283,15 @@ describe('ReactCache', () => {
     });
     expect(Scheduler).toHaveYielded(['A', 'A']);
     expect(root).toMatchRenderedOutput('AA');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // cleanup occurs for the cache shared by the inner cache boundaries (which
+    // are not shared w the root because they were added in an update)
+    // note that no cache is created for the root since the cache is never accessed
+    expect(Scheduler).toHaveYielded(['Cache cleanup: A [v1]']);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
@@ -261,8 +314,8 @@ describe('ReactCache', () => {
       await act(async () => {
         root.render(<App />);
       });
-      // Even though there are two new <Cache /> trees, they should share the same
-      // data cache. So there should be only a single cache miss for A.
+      // Even though there is a nested <Cache /> boundary, it should share the same
+      // data cache as the root. So there should be only a single cache miss for A.
       expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']);
       expect(root).toMatchRenderedOutput('Loading...');
 
@@ -271,6 +324,13 @@ describe('ReactCache', () => {
       });
       expect(Scheduler).toHaveYielded(['A', 'A']);
       expect(root).toMatchRenderedOutput('AA');
+
+      await act(async () => {
+        root.render('Bye');
+      });
+      // no cleanup: cache is still retained at the root
+      expect(Scheduler).toHaveYielded([]);
+      expect(root).toMatchRenderedOutput('Bye');
     },
   );
 
@@ -309,6 +369,13 @@ describe('ReactCache', () => {
       'A [v1]',
     ]);
     expect(root).toMatchRenderedOutput('A [v1]A [v1]');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // no cleanup: cache is still retained at the root
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
@@ -356,10 +423,21 @@ describe('ReactCache', () => {
     });
     expect(Scheduler).toHaveYielded(['A [v2]']);
     expect(root).toMatchRenderedOutput('A [v1]A [v2]');
+
+    // Replace all the children: this should retain the root Cache instance,
+    // but cleanup the separate cache instance created for the fresh cache
+    // boundary
+    await act(async () => {
+      root.render('Bye!');
+    });
+    // Cleanup occurs for the *second* cache instance: the first is still
+    // referenced by the root
+    expect(Scheduler).toHaveYielded(['Cache cleanup: A [v2]']);
+    expect(root).toMatchRenderedOutput('Bye!');
   });
 
   // @gate experimental || www
-  test('inner content uses same cache as shell if spawned by the same transition', async () => {
+  test('inner/outer cache boundaries uses the same cache instance on initial render', async () => {
     const root = ReactNoop.createRoot();
 
     function App() {
@@ -431,10 +509,109 @@ describe('ReactCache', () => {
         <div>Content</div>
       </>,
     );
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // no cleanup: cache is still retained at the root
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('Bye');
+  });
+
+  // @gate experimental || www
+  test('inner/ outer cache boundaries added in the same update use the same cache instance', async () => {
+    const root = ReactNoop.createRoot();
+
+    function App({showMore}) {
+      return showMore ? (
+        <Cache>
+          <Suspense fallback={<Text text="Loading shell..." />}>
+            {/* The shell reads A */}
+            <Shell>
+              {/* The inner content reads both A and B */}
+              <Suspense fallback={<Text text="Loading content..." />}>
+                <Cache>
+                  <Content />
+                </Cache>
+              </Suspense>
+            </Shell>
+          </Suspense>
+        </Cache>
+      ) : (
+        '(empty)'
+      );
+    }
+
+    function Shell({children}) {
+      readText('A');
+      return (
+        <>
+          <div>
+            <Text text="Shell" />
+          </div>
+          <div>{children}</div>
+        </>
+      );
+    }
+
+    function Content() {
+      readText('A');
+      readText('B');
+      return <Text text="Content" />;
+    }
+
+    await act(async () => {
+      root.render(<App showMore={false} />);
+    });
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('(empty)');
+
+    await act(async () => {
+      root.render(<App showMore={true} />);
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading shell...']);
+    expect(root).toMatchRenderedOutput('Loading shell...');
+
+    await act(async () => {
+      resolveMostRecentTextCache('A');
+    });
+    expect(Scheduler).toHaveYielded([
+      'Shell',
+      // There's a cache miss for B, because it hasn't been read yet. But not
+      // A, because it was cached when we rendered the shell.
+      'Cache miss! [B]',
+      'Loading content...',
+    ]);
+    expect(root).toMatchRenderedOutput(
+      <>
+        <div>Shell</div>
+        <div>Loading content...</div>
+      </>,
+    );
+
+    await act(async () => {
+      resolveMostRecentTextCache('B');
+    });
+    expect(Scheduler).toHaveYielded(['Content']);
+    expect(root).toMatchRenderedOutput(
+      <>
+        <div>Shell</div>
+        <div>Content</div>
+      </>,
+    );
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    expect(Scheduler).toHaveYielded([
+      'Cache cleanup: A [v1]',
+      'Cache cleanup: B [v1]',
+    ]);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
-  test('refresh a cache', async () => {
+  test('refresh a cache boundary', async () => {
     let refresh;
     function App() {
       refresh = useCacheRefresh();
@@ -474,6 +651,14 @@ describe('ReactCache', () => {
     // Note that the version has updated
     expect(Scheduler).toHaveYielded(['A [v2]']);
     expect(root).toMatchRenderedOutput('A [v2]');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // the original cache instance does not cleanup since it is still referenced
+    // by the root, but the refreshed inner cache does cleanup
+    expect(Scheduler).toHaveYielded(['Cache cleanup: A [v2]']);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
@@ -512,9 +697,64 @@ describe('ReactCache', () => {
     await act(async () => {
       resolveMostRecentTextCache('A');
     });
-    // Note that the version has updated
-    expect(Scheduler).toHaveYielded(['A [v2]']);
+    // Note that the version has updated, and the previous cache is cleared
+    expect(Scheduler).toHaveYielded(['A [v2]', 'Cache cleanup: A [v1]']);
     expect(root).toMatchRenderedOutput('A [v2]');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // the original root cache already cleaned up when the refresh completed
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('Bye');
+  });
+
+  // @gate experimental || www
+  test('refresh the root cache without a transition', async () => {
+    let refresh;
+    function App() {
+      refresh = useCacheRefresh();
+      return <AsyncText showVersion={true} text="A" />;
+    }
+
+    // Mount initial data
+    const root = ReactNoop.createRoot();
+    await act(async () => {
+      root.render(
+        <Suspense fallback={<Text text="Loading..." />}>
+          <App />
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']);
+    expect(root).toMatchRenderedOutput('Loading...');
+
+    await act(async () => {
+      resolveMostRecentTextCache('A');
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // Refresh for new data.
+    await act(async () => {
+      refresh();
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']);
+    expect(root).toMatchRenderedOutput('Loading...');
+
+    await act(async () => {
+      resolveMostRecentTextCache('A');
+    });
+    // Note that the version has updated, and the previous cache is cleared
+    expect(Scheduler).toHaveYielded(['A [v2]', 'Cache cleanup: A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v2]');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // the original root cache already cleaned up when the refresh completed
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
@@ -556,8 +796,16 @@ describe('ReactCache', () => {
       startTransition(() => refresh(createTextCache, cache));
     });
     // The root should re-render without a cache miss.
+    // The cache is not cleared up yet, since it's still reference by the root
     expect(Scheduler).toHaveYielded(['A [v2]']);
     expect(root).toMatchRenderedOutput('A [v2]');
+
+    await act(async () => {
+      root.render('Bye');
+    });
+    // the refreshed cache boundary is unmounted and cleans up
+    expect(Scheduler).toHaveYielded(['Cache cleanup: A [v2]']);
+    expect(root).toMatchRenderedOutput('Bye');
   });
 
   // @gate experimental || www
@@ -621,8 +869,22 @@ describe('ReactCache', () => {
     await act(async () => {
       resolveMostRecentTextCache('A');
     });
-    expect(Scheduler).toHaveYielded(['A [v3]', 'A [v3]']);
+    expect(Scheduler).toHaveYielded([
+      'A [v3]',
+      'A [v3]',
+      // once the refresh completes the inner showMore boundary frees its previous
+      // cache instance, since it is now using the refreshed parent instance.
+      'Cache cleanup: A [v2]',
+    ]);
     expect(root).toMatchRenderedOutput('A [v3]A [v3]');
+
+    await act(async () => {
+      root.render('Bye!');
+    });
+    // Unmounting children releases the refreshed cache instance only; the root
+    // still retains the original cache instance used for the first render
+    expect(Scheduler).toHaveYielded(['Cache cleanup: A [v3]']);
+    expect(root).toMatchRenderedOutput('Bye!');
   });
 
   // @gate experimental || www
@@ -695,6 +957,21 @@ describe('ReactCache', () => {
       });
       expect(Scheduler).toHaveYielded(['A [v2]']);
       expect(root).toMatchRenderedOutput('A [v2]A [v1]');
+
+      // Unmount children: this should clear *both* cache instances:
+      // the root doesn't have a cache instance (since it wasn't accessed
+      // during the initial render, and all subsequent cache accesses were within
+      // a fresh boundary). Therefore this causes cleanup for both the fresh cache
+      // instance in the refreshed first boundary and cleanup for the non-refreshed
+      // sibling boundary.
+      await act(async () => {
+        root.render('Bye!');
+      });
+      expect(Scheduler).toHaveYielded([
+        'Cache cleanup: A [v2]',
+        'Cache cleanup: A [v1]',
+      ]);
+      expect(root).toMatchRenderedOutput('Bye!');
     },
   );
 
@@ -733,6 +1010,7 @@ describe('ReactCache', () => {
         'Cache miss! [B]',
         'Loading...',
       ]);
+      expect(root).toMatchRenderedOutput('Loading...');
 
       await act(async () => {
         // This will resolve the content in the first cache
@@ -750,6 +1028,7 @@ describe('ReactCache', () => {
         'A [v1]',
         'B [v1]',
       ]);
+      expect(root).toMatchRenderedOutput('Loading... A [v1] B [v1]');
 
       // Now resolve the second tree
       await act(async () => {
@@ -757,6 +1036,15 @@ describe('ReactCache', () => {
       });
       expect(Scheduler).toHaveYielded(['A [v2]']);
       expect(root).toMatchRenderedOutput('A [v2] A [v1] B [v1]');
+
+      await act(async () => {
+        root.render('Bye!');
+      });
+      // Unmounting children releases both cache boundaries, but the original
+      // cache instance (used by second boundary) is still referenced by the root.
+      // only the second cache instance is freed.
+      expect(Scheduler).toHaveYielded(['Cache cleanup: A [v2]']);
+      expect(root).toMatchRenderedOutput('Bye!');
     },
   );
 
@@ -841,6 +1129,19 @@ describe('ReactCache', () => {
     });
     expect(Scheduler).toHaveYielded(['A [v1]', 'A [v1]', 'A [v2]']);
     expect(root).toMatchRenderedOutput('A [v1]A [v1]A [v2]');
+
+    // Unmount children: the first text cache instance is created only after the root
+    // commits, so both fresh cache instances are released by their cache boundaries,
+    // cleaning up v1 (used for the first two children which render togeether) and
+    // v2 (used for the third boundary added later).
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded([
+      'Cache cleanup: A [v1]',
+      'Cache cleanup: A [v2]',
+    ]);
+    expect(root).toMatchRenderedOutput('Bye!');
   });
 
   // @gate experimental || www
@@ -863,7 +1164,7 @@ describe('ReactCache', () => {
           <Suspense fallback={<Text text="Loading..." />}>
             {shouldShow ? (
               <Cache>
-                <AsyncText text="A" />
+                <AsyncText showVersion={true} text="A" />
               </Cache>
             ) : null}
           </Suspense>
@@ -880,7 +1181,7 @@ describe('ReactCache', () => {
 
     const root = ReactNoop.createRoot();
     await act(async () => {
-      root.render(<App showMore={false} />);
+      root.render(<App />);
     });
     expect(Scheduler).toHaveYielded(['0']);
     expect(root).toMatchRenderedOutput('0');
@@ -908,7 +1209,331 @@ describe('ReactCache', () => {
     await act(async () => {
       resolveMostRecentTextCache('A');
     });
-    expect(Scheduler).toHaveYielded(['A']);
-    expect(root).toMatchRenderedOutput('A1');
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]1');
+
+    // Unmount children: the first text cache instance is created only after initial
+    // render after calling showMore(). This instance is cleaned up when that boundary
+    // is unmounted. Bc root cache instance is never accessed, the inner cache
+    // boundary ends up at v1.
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded(['Cache cleanup: A [v1]']);
+    expect(root).toMatchRenderedOutput('Bye!');
+  });
+
+  // @gate experimental || www
+  test('cache boundary uses a fresh cache when its key changes', async () => {
+    const root = ReactNoop.createRoot();
+    seedNextTextCache('A');
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache key="A">
+            <AsyncText showVersion={true} text="A" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    seedNextTextCache('B');
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache key="B">
+            <AsyncText showVersion={true} text="B" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['B [v2]']);
+    expect(root).toMatchRenderedOutput('B [v2]');
+
+    // Unmount children: the fresh cache instance for B cleans up since the cache boundary
+    // is the only owner, while the original cache instance (for A) is still retained by
+    // the root.
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded(['Cache cleanup: B [v2]']);
+    expect(root).toMatchRenderedOutput('Bye!');
+  });
+
+  // @gate experimental || www
+  test('overlapping transitions after an initial mount use the same fresh cache', async () => {
+    const root = ReactNoop.createRoot();
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache key="A">
+            <AsyncText showVersion={true} text="A" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [A]']);
+    expect(root).toMatchRenderedOutput('Loading...');
+
+    await act(async () => {
+      resolveMostRecentTextCache('A');
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // After a mount, subsequent transitions use a fresh cache
+    await act(async () => {
+      startTransition(() => {
+        root.render(
+          <Suspense fallback="Loading...">
+            <Cache key="B">
+              <AsyncText showVersion={true} text="B" />
+            </Cache>
+          </Suspense>,
+        );
+      });
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [B]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // Update to a different text and with a different key for the cache
+    // boundary: this should still use the fresh cache instance created
+    // for the earlier transition
+    await act(async () => {
+      startTransition(() => {
+        root.render(
+          <Suspense fallback="Loading...">
+            <Cache key="C">
+              <AsyncText showVersion={true} text="C" />
+            </Cache>
+          </Suspense>,
+        );
+      });
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [C]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    await act(async () => {
+      resolveMostRecentTextCache('C');
+    });
+    expect(Scheduler).toHaveYielded(['C [v2]']);
+    expect(root).toMatchRenderedOutput('C [v2]');
+
+    // Unmount children: the fresh cache used for the updates is freed, while the
+    // original cache (with A) is still retained at the root.
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded([
+      'Cache cleanup: B [v2]',
+      'Cache cleanup: C [v2]',
+    ]);
+    expect(root).toMatchRenderedOutput('Bye!');
+  });
+
+  // @gate experimental || www
+  test('overlapping updates after an initial mount use the same fresh cache', async () => {
+    const root = ReactNoop.createRoot();
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache key="A">
+            <AsyncText showVersion={true} text="A" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [A]']);
+    expect(root).toMatchRenderedOutput('Loading...');
+
+    await act(async () => {
+      resolveMostRecentTextCache('A');
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // After a mount, subsequent updates use a fresh cache
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache key="B">
+            <AsyncText showVersion={true} text="B" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [B]']);
+    expect(root).toMatchRenderedOutput('Loading...');
+
+    // A second update uses the same fresh cache: even though this is a new
+    // Cache boundary, the render uses the fresh cache from the pending update.
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache key="C">
+            <AsyncText showVersion={true} text="C" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [C]']);
+    expect(root).toMatchRenderedOutput('Loading...');
+
+    await act(async () => {
+      resolveMostRecentTextCache('C');
+    });
+    expect(Scheduler).toHaveYielded(['C [v2]']);
+    expect(root).toMatchRenderedOutput('C [v2]');
+
+    // Unmount children: the fresh cache used for the updates is freed, while the
+    // original cache (with A) is still retained at the root.
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded([
+      'Cache cleanup: B [v2]',
+      'Cache cleanup: C [v2]',
+    ]);
+    expect(root).toMatchRenderedOutput('Bye!');
+  });
+
+  // @gate experimental || www
+  test('cleans up cache only used in an aborted transition', async () => {
+    const root = ReactNoop.createRoot();
+    seedNextTextCache('A');
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache key="A">
+            <AsyncText showVersion={true} text="A" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // Start a transition from A -> B..., which should create a fresh cache
+    // for the new cache boundary (bc of the different key)
+    await act(async () => {
+      startTransition(() => {
+        root.render(
+          <Suspense fallback="Loading...">
+            <Cache key="B">
+              <AsyncText showVersion={true} text="B" />
+            </Cache>
+          </Suspense>,
+        );
+      });
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [B]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // ...but cancel by transitioning "back" to A (which we never really left)
+    await act(async () => {
+      startTransition(() => {
+        root.render(
+          <Suspense fallback="Loading...">
+            <Cache key="A">
+              <AsyncText showVersion={true} text="A" />
+            </Cache>
+          </Suspense>,
+        );
+      });
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]', 'Cache cleanup: B [v2]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // Unmount children: ...
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded([]);
+    expect(root).toMatchRenderedOutput('Bye!');
+  });
+
+  // @gate experimental || www
+  test.skip('if a root cache refresh never commits its fresh cache is released', async () => {
+    const root = ReactNoop.createRoot();
+    let refresh;
+    function Example({text}) {
+      refresh = useCacheRefresh();
+      return <AsyncText showVersion={true} text={text} />;
+    }
+    seedNextTextCache('A');
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Example text="A" />
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    await act(async () => {
+      startTransition(() => {
+        refresh();
+      });
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [A]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded([
+      // TODO: the v1 cache should *not* be cleaned up, it is still retained by the root
+      // The following line is presently yielded but should not be:
+      // 'Cache cleanup: A [v1]',
+
+      // TODO: the v2 cache *should* be cleaned up, it was created for the abandoned refresh
+      // The following line is presently not yielded but should be:
+      'Cache cleanup: A [v2]',
+    ]);
+    expect(root).toMatchRenderedOutput('Bye!');
+  });
+
+  // @gate experimental || www
+  test.skip('if a cache boundary refresh never commits its fresh cache is released', async () => {
+    const root = ReactNoop.createRoot();
+    let refresh;
+    function Example({text}) {
+      refresh = useCacheRefresh();
+      return <AsyncText showVersion={true} text={text} />;
+    }
+    seedNextTextCache('A');
+    await act(async () => {
+      root.render(
+        <Suspense fallback="Loading...">
+          <Cache>
+            <Example text="A" />
+          </Cache>
+        </Suspense>,
+      );
+    });
+    expect(Scheduler).toHaveYielded(['A [v1]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    await act(async () => {
+      startTransition(() => {
+        refresh();
+      });
+    });
+    expect(Scheduler).toHaveYielded(['Cache miss! [A]']);
+    expect(root).toMatchRenderedOutput('A [v1]');
+
+    // Unmount the boundary before the refresh can complete
+    await act(async () => {
+      root.render('Bye!');
+    });
+    expect(Scheduler).toHaveYielded([
+      // TODO: the v2 cache *should* be cleaned up, it was created for the abandoned refresh
+      // The following line is presently not yielded but should be:
+      'Cache cleanup: A [v2]',
+    ]);
+    expect(root).toMatchRenderedOutput('Bye!');
   });
 });
diff --git a/packages/react-reconciler/src/__tests__/SchedulingProfiler-test.internal.js b/packages/react-reconciler/src/__tests__/SchedulingProfiler-test.internal.js
index 9b6ec83a40d39..6b1e4290d0102 100644
--- a/packages/react-reconciler/src/__tests__/SchedulingProfiler-test.internal.js
+++ b/packages/react-reconciler/src/__tests__/SchedulingProfiler-test.internal.js
@@ -409,30 +409,30 @@ describe('SchedulingProfiler', () => {
 
     if (gate(flags => flags.enableSchedulingProfiler)) {
       expect(getMarks()).toMatchInlineSnapshot(`
-              Array [
-                "--render-start-16",
-                "--component-render-start-Example",
-                "--component-render-stop",
-                "--render-stop",
-                "--commit-start-16",
-                "--react-version-17.0.3",
-                "--profiler-version-1",
-                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-                "--layout-effects-start-16",
-                "--schedule-state-update-1-Example",
-                "--layout-effects-stop",
-                "--render-start-1",
-                "--component-render-start-Example",
-                "--component-render-stop",
-                "--render-stop",
-                "--commit-start-1",
-                "--react-version-17.0.3",
-                "--profiler-version-1",
-                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-                "--commit-stop",
-                "--commit-stop",
-              ]
-          `);
+        Array [
+          "--render-start-16",
+          "--component-render-start-Example",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-16",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--layout-effects-start-16",
+          "--schedule-state-update-1-Example",
+          "--layout-effects-stop",
+          "--render-start-1",
+          "--component-render-start-Example",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-1",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--commit-stop",
+          "--commit-stop",
+        ]
+      `);
     }
   });
 
@@ -462,30 +462,30 @@ describe('SchedulingProfiler', () => {
 
     if (gate(flags => flags.enableSchedulingProfiler)) {
       expect(getMarks()).toMatchInlineSnapshot(`
-              Array [
-                "--render-start-16",
-                "--component-render-start-Example",
-                "--component-render-stop",
-                "--render-stop",
-                "--commit-start-16",
-                "--react-version-17.0.3",
-                "--profiler-version-1",
-                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-                "--layout-effects-start-16",
-                "--schedule-forced-update-1-Example",
-                "--layout-effects-stop",
-                "--render-start-1",
-                "--component-render-start-Example",
-                "--component-render-stop",
-                "--render-stop",
-                "--commit-start-1",
-                "--react-version-17.0.3",
-                "--profiler-version-1",
-                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-                "--commit-stop",
-                "--commit-stop",
-              ]
-          `);
+        Array [
+          "--render-start-16",
+          "--component-render-start-Example",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-16",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--layout-effects-start-16",
+          "--schedule-forced-update-1-Example",
+          "--layout-effects-stop",
+          "--render-start-1",
+          "--component-render-start-Example",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-1",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--commit-stop",
+          "--commit-stop",
+        ]
+      `);
     }
   });
 
@@ -701,22 +701,22 @@ describe('SchedulingProfiler', () => {
 
     if (gate(flags => flags.enableSchedulingProfiler)) {
       expect(getMarks()).toMatchInlineSnapshot(`
-              Array [
-                "--schedule-render-16",
-                "--render-start-16",
-                "--component-render-start-Example",
-                "--schedule-state-update-16-Example",
-                "--component-render-stop",
-                "--render-stop",
-                "--commit-start-16",
-                "--react-version-17.0.3",
-                "--profiler-version-1",
-                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-                "--layout-effects-start-16",
-                "--layout-effects-stop",
-                "--commit-stop",
-              ]
-          `);
+        Array [
+          "--schedule-render-16",
+          "--render-start-16",
+          "--component-render-start-Example",
+          "--schedule-state-update-16-Example",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-16",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--layout-effects-start-16",
+          "--layout-effects-stop",
+          "--commit-stop",
+        ]
+      `);
     }
   });
 
@@ -746,35 +746,35 @@ describe('SchedulingProfiler', () => {
 
     if (gate(flags => flags.enableSchedulingProfiler)) {
       expect(getMarks()).toMatchInlineSnapshot(`
-              Array [
-                "--schedule-render-1",
-                "--render-start-1",
-                "--component-render-start-ErrorBoundary",
-                "--component-render-stop",
-                "--component-render-start-ExampleThatThrows",
-                "--component-render-start-ExampleThatThrows",
-                "--component-render-stop",
-                "--error-ExampleThatThrows-mount-Expected error",
-                "--render-stop",
-                "--commit-start-1",
-                "--react-version-17.0.3",
-                "--profiler-version-1",
-                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-                "--layout-effects-start-1",
-                "--schedule-state-update-1-ErrorBoundary",
-                "--layout-effects-stop",
-                "--commit-stop",
-                "--render-start-1",
-                "--component-render-start-ErrorBoundary",
-                "--component-render-stop",
-                "--render-stop",
-                "--commit-start-1",
-                "--react-version-17.0.3",
-                "--profiler-version-1",
-                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-                "--commit-stop",
-              ]
-          `);
+        Array [
+          "--schedule-render-1",
+          "--render-start-1",
+          "--component-render-start-ErrorBoundary",
+          "--component-render-stop",
+          "--component-render-start-ExampleThatThrows",
+          "--component-render-start-ExampleThatThrows",
+          "--component-render-stop",
+          "--error-ExampleThatThrows-mount-Expected error",
+          "--render-stop",
+          "--commit-start-1",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--layout-effects-start-1",
+          "--schedule-state-update-1-ErrorBoundary",
+          "--layout-effects-stop",
+          "--commit-stop",
+          "--render-start-1",
+          "--component-render-start-ErrorBoundary",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-1",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--commit-stop",
+        ]
+      `);
     }
   });
 
diff --git a/packages/react-reconciler/src/__tests__/SchedulingProfilerLabels-test.internal.js b/packages/react-reconciler/src/__tests__/SchedulingProfilerLabels-test.internal.js
index 13ca3988b2a71..a27c0271c116a 100644
--- a/packages/react-reconciler/src/__tests__/SchedulingProfilerLabels-test.internal.js
+++ b/packages/react-reconciler/src/__tests__/SchedulingProfilerLabels-test.internal.js
@@ -89,20 +89,20 @@ describe('SchedulingProfiler labels', () => {
 
     if (gate(flags => flags.enableSchedulingProfiler)) {
       expect(clearedMarks).toMatchInlineSnapshot(`
-      Array [
-        "__v3",
-        "--schedule-render-1",
-        "--render-start-1",
-        "--render-stop",
-        "--commit-start-1",
-        "--react-version-17.0.3",
-        "--profiler-version-1",
-        "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-        "--layout-effects-start-1",
-        "--layout-effects-stop",
-        "--commit-stop",
-      ]
-    `);
+              Array [
+                "__v3",
+                "--schedule-render-1",
+                "--render-start-1",
+                "--render-stop",
+                "--commit-start-1",
+                "--react-version-17.0.3",
+                "--profiler-version-1",
+                "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+                "--layout-effects-start-1",
+                "--layout-effects-stop",
+                "--commit-stop",
+              ]
+          `);
     }
   });
 
@@ -114,11 +114,11 @@ describe('SchedulingProfiler labels', () => {
 
         root.render(<div />);
         expect(clearedMarks).toMatchInlineSnapshot(`
-        Array [
-          "__v3",
-          "--schedule-render-16",
-        ]
-      `);
+                  Array [
+                    "__v3",
+                    "--schedule-render-16",
+                  ]
+              `);
       });
     }
   });
@@ -152,21 +152,21 @@ describe('SchedulingProfiler labels', () => {
       });
 
       expect(clearedMarks).toMatchInlineSnapshot(`
-      Array [
-        "--schedule-state-update-1-App",
-        "--render-start-1",
-        "--component-render-start-App",
-        "--component-render-stop",
-        "--render-stop",
-        "--commit-start-1",
-        "--react-version-17.0.3",
-        "--profiler-version-1",
-        "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-        "--layout-effects-start-1",
-        "--layout-effects-stop",
-        "--commit-stop",
-      ]
-    `);
+        Array [
+          "--schedule-state-update-1-App",
+          "--render-start-1",
+          "--component-render-start-App",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-1",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--layout-effects-start-1",
+          "--layout-effects-stop",
+          "--commit-stop",
+        ]
+      `);
     }
   });
 
@@ -196,21 +196,21 @@ describe('SchedulingProfiler labels', () => {
       });
 
       expect(clearedMarks).toMatchInlineSnapshot(`
-      Array [
-        "--schedule-state-update-4-App",
-        "--render-start-4",
-        "--component-render-start-App",
-        "--component-render-stop",
-        "--render-stop",
-        "--commit-start-4",
-        "--react-version-17.0.3",
-        "--profiler-version-1",
-        "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
-        "--layout-effects-start-4",
-        "--layout-effects-stop",
-        "--commit-stop",
-      ]
-    `);
+        Array [
+          "--schedule-state-update-4-App",
+          "--render-start-4",
+          "--component-render-start-App",
+          "--component-render-stop",
+          "--render-stop",
+          "--commit-start-4",
+          "--react-version-17.0.3",
+          "--profiler-version-1",
+          "--react-lane-labels-Sync,InputContinuousHydration,InputContinuous,DefaultHydration,Default,TransitionHydration,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Transition,Retry,Retry,Retry,Retry,Retry,SelectiveHydration,IdleHydration,Idle,Offscreen",
+          "--layout-effects-start-4",
+          "--layout-effects-stop",
+          "--commit-stop",
+        ]
+      `);
     }
   });
 });
diff --git a/packages/react-reconciler/src/__tests__/StrictEffectsModeDefaults-test.internal.js b/packages/react-reconciler/src/__tests__/StrictEffectsModeDefaults-test.internal.js
index e443aa0a23190..f2041b4ba1e32 100644
--- a/packages/react-reconciler/src/__tests__/StrictEffectsModeDefaults-test.internal.js
+++ b/packages/react-reconciler/src/__tests__/StrictEffectsModeDefaults-test.internal.js
@@ -113,6 +113,7 @@ describe('StrictEffectsMode defaults', () => {
           </>,
         );
 
+        expect(Scheduler).toHaveYielded([]);
         expect(Scheduler).toFlushUntilNextPaint([
           // Cleanup and re-run "one" (and "two") since there is no dependencies array.
           'useLayoutEffect unmount "one"',
diff --git a/packages/react/index.classic.fb.js b/packages/react/index.classic.fb.js
index 5335c4053a70e..568a8ada613d9 100644
--- a/packages/react/index.classic.fb.js
+++ b/packages/react/index.classic.fb.js
@@ -37,6 +37,7 @@ export {
   unstable_LegacyHidden,
   unstable_Offscreen,
   unstable_Scope,
+  unstable_getCacheSignal,
   unstable_getCacheForType,
   unstable_useCacheRefresh,
   unstable_useOpaqueIdentifier,
diff --git a/packages/react/index.experimental.js b/packages/react/index.experimental.js
index 19490fb214c97..7491bbb7e832d 100644
--- a/packages/react/index.experimental.js
+++ b/packages/react/index.experimental.js
@@ -33,6 +33,7 @@ export {
   unstable_DebugTracingMode,
   unstable_LegacyHidden,
   unstable_Offscreen,
+  unstable_getCacheSignal,
   unstable_getCacheForType,
   unstable_useCacheRefresh,
   unstable_useOpaqueIdentifier,
diff --git a/packages/react/index.js b/packages/react/index.js
index b4a3bf6e11b81..59cc05f0254e6 100644
--- a/packages/react/index.js
+++ b/packages/react/index.js
@@ -58,6 +58,7 @@ export {
   unstable_LegacyHidden,
   unstable_Offscreen,
   unstable_Scope,
+  unstable_getCacheSignal,
   unstable_getCacheForType,
   unstable_useCacheRefresh,
   unstable_useOpaqueIdentifier,
diff --git a/packages/react/index.modern.fb.js b/packages/react/index.modern.fb.js
index 2b847d2336312..cd60ee426fa65 100644
--- a/packages/react/index.modern.fb.js
+++ b/packages/react/index.modern.fb.js
@@ -36,6 +36,7 @@ export {
   unstable_LegacyHidden,
   unstable_Offscreen,
   unstable_Scope,
+  unstable_getCacheSignal,
   unstable_getCacheForType,
   unstable_useCacheRefresh,
   unstable_useOpaqueIdentifier,
diff --git a/packages/react/src/React.js b/packages/react/src/React.js
index 2246d32db662e..d29858c9b07fd 100644
--- a/packages/react/src/React.js
+++ b/packages/react/src/React.js
@@ -35,6 +35,7 @@ import {lazy} from './ReactLazy';
 import {forwardRef} from './ReactForwardRef';
 import {memo} from './ReactMemo';
 import {
+  getCacheSignal,
   getCacheForType,
   useCallback,
   useContext,
@@ -119,6 +120,7 @@ export {
   REACT_SUSPENSE_LIST_TYPE as SuspenseList,
   REACT_LEGACY_HIDDEN_TYPE as unstable_LegacyHidden,
   REACT_OFFSCREEN_TYPE as unstable_Offscreen,
+  getCacheSignal as unstable_getCacheSignal,
   getCacheForType as unstable_getCacheForType,
   useCacheRefresh as unstable_useCacheRefresh,
   REACT_CACHE_TYPE as unstable_Cache,
diff --git a/packages/react/src/ReactHooks.js b/packages/react/src/ReactHooks.js
index 0108c545fae5f..1892f926c59cf 100644
--- a/packages/react/src/ReactHooks.js
+++ b/packages/react/src/ReactHooks.js
@@ -41,6 +41,12 @@ function resolveDispatcher() {
   return ((dispatcher: any): Dispatcher);
 }
 
+export function getCacheSignal(): AbortSignal {
+  const dispatcher = resolveDispatcher();
+  // $FlowFixMe This is unstable, thus optional
+  return dispatcher.getCacheSignal();
+}
+
 export function getCacheForType<T>(resourceType: () => T): T {
   const dispatcher = resolveDispatcher();
   // $FlowFixMe This is unstable, thus optional
diff --git a/packages/react/unstable-shared-subset.experimental.js b/packages/react/unstable-shared-subset.experimental.js
index 9381778b4435d..a663ca8a5a89d 100644
--- a/packages/react/unstable-shared-subset.experimental.js
+++ b/packages/react/unstable-shared-subset.experimental.js
@@ -25,6 +25,7 @@ export {
   memo,
   startTransition,
   unstable_DebugTracingMode,
+  unstable_getCacheSignal,
   unstable_getCacheForType,
   unstable_useOpaqueIdentifier,
   useCallback,
diff --git a/scripts/jest/setupEnvironment.js b/scripts/jest/setupEnvironment.js
index 2ba88b156169d..d2d510088c45e 100644
--- a/scripts/jest/setupEnvironment.js
+++ b/scripts/jest/setupEnvironment.js
@@ -1,5 +1,7 @@
 /* eslint-disable */
 
+const AbortController = require('abort-controller');
+
 const NODE_ENV = process.env.NODE_ENV;
 if (NODE_ENV !== 'development' && NODE_ENV !== 'production') {
   throw new Error('NODE_ENV must either be set to development or production.');
@@ -21,6 +23,8 @@ global.__EXPERIMENTAL__ =
 
 global.__VARIANT__ = !!process.env.VARIANT;
 
+global.AbortController = AbortController;
+
 if (typeof window !== 'undefined') {
   global.requestIdleCallback = function(callback) {
     return setTimeout(() => {