Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[flags] Cleanup enableCache #31778

Merged
merged 3 commits into from
Dec 15, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 5 additions & 10 deletions packages/react-dom/src/__tests__/ReactCompositeComponent-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,6 @@ describe('ReactCompositeComponent', () => {
});

it('should cleanup even if render() fatals', async () => {
const dispatcherEnabled = __DEV__ || gate(flags => flags.enableCache);
const ownerEnabled = __DEV__;

let stashedDispatcher;
Expand All @@ -551,7 +550,7 @@ describe('ReactCompositeComponent', () => {
}

const instance = <BadComponent />;
expect(ReactSharedInternals.A).toBe(dispatcherEnabled ? null : undefined);
expect(ReactSharedInternals.A).toBe(null);

const root = ReactDOMClient.createRoot(document.createElement('div'));
await expect(async () => {
Expand All @@ -560,15 +559,11 @@ describe('ReactCompositeComponent', () => {
});
}).rejects.toThrow();

expect(ReactSharedInternals.A).toBe(dispatcherEnabled ? null : undefined);
if (dispatcherEnabled) {
if (ownerEnabled) {
expect(stashedDispatcher.getOwner()).toBe(null);
} else {
expect(stashedDispatcher.getOwner).toBe(undefined);
}
expect(ReactSharedInternals.A).toBe(null);
if (ownerEnabled) {
expect(stashedDispatcher.getOwner()).toBe(null);
} else {
expect(stashedDispatcher).toBe(undefined);
expect(stashedDispatcher.getOwner).toBe(undefined);
}
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,6 @@ if (!__EXPERIMENTAL__) {
);
});

// @gate enableCache
it('supports cache', async () => {
let counter = 0;
const getCount = React.cache(() => {
Expand Down
4 changes: 0 additions & 4 deletions packages/react-reconciler/src/ReactFiberAsyncDispatcher.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,12 @@
import type {AsyncDispatcher, Fiber} from './ReactInternalTypes';
import type {Cache} from './ReactFiberCacheComponent';

import {enableCache} from 'shared/ReactFeatureFlags';
import {readContext} from './ReactFiberNewContext';
import {CacheContext} from './ReactFiberCacheComponent';

import {current as currentOwner} from './ReactCurrentFiber';

function getCacheForType<T>(resourceType: () => T): T {
if (!enableCache) {
throw new Error('Not implemented.');
}
const cache: Cache = readContext(CacheContext);
let cacheForType: T | void = (cache.data.get(resourceType): any);
if (cacheForType === undefined) {
Expand Down
118 changes: 47 additions & 71 deletions packages/react-reconciler/src/ReactFiberBeginWork.js
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,6 @@
enableProfilerCommitHooks,
enableProfilerTimer,
enableScopeAPI,
enableCache,
enableLazyContextPropagation,
enableSchedulingProfiler,
enableTransitionTracing,
Expand Down Expand Up @@ -712,12 +711,10 @@
cachePool: null,
};
workInProgress.memoizedState = nextState;
if (enableCache) {
// push the cache pool even though we're going to bail out
// because otherwise there'd be a context mismatch
if (current !== null) {
pushTransition(workInProgress, null, null);
}
// push the cache pool even though we're going to bail out
// because otherwise there'd be a context mismatch
if (current !== null) {
pushTransition(workInProgress, null, null);
}
reuseHiddenContextOnStack(workInProgress);
pushOffscreenSuspenseHandler(workInProgress);
Expand Down Expand Up @@ -751,7 +748,7 @@
cachePool: null,
};
workInProgress.memoizedState = nextState;
if (enableCache && current !== null) {
if (current !== null) {
// If the render that spawned this one accessed the cache pool, resume
// using the same cache. Unless the parent changed, since that means
// there was a refresh.
Expand All @@ -774,12 +771,10 @@
if (prevState !== null) {
// We're going from hidden -> visible.
let prevCachePool = null;
if (enableCache) {
// If the render that spawned this one accessed the cache pool, resume
// using the same cache. Unless the parent changed, since that means
// there was a refresh.
prevCachePool = prevState.cachePool;
}
// If the render that spawned this one accessed the cache pool, resume
// using the same cache. Unless the parent changed, since that means
// there was a refresh.
prevCachePool = prevState.cachePool;

let transitions = null;
if (enableTransitionTracing) {
Expand All @@ -804,13 +799,11 @@
// special to do. Need to push to the stack regardless, though, to avoid
// a push/pop misalignment.

if (enableCache) {
// If the render that spawned this one accessed the cache pool, resume
// using the same cache. Unless the parent changed, since that means
// there was a refresh.
if (current !== null) {
pushTransition(workInProgress, null, null);
}
// If the render that spawned this one accessed the cache pool, resume
// using the same cache. Unless the parent changed, since that means
// there was a refresh.
if (current !== null) {
pushTransition(workInProgress, null, null);
}

// We're about to bail out, but we need to push this to the stack anyway
Expand All @@ -833,15 +826,13 @@
const nextState: OffscreenState = {
baseLanes: nextBaseLanes,
// Save the cache pool so we can resume later.
cachePool: enableCache ? getOffscreenDeferredCache() : null,
cachePool: getOffscreenDeferredCache(),
};
workInProgress.memoizedState = nextState;
if (enableCache) {
// push the cache pool even though we're going to bail out
// because otherwise there'd be a context mismatch
if (current !== null) {
pushTransition(workInProgress, null, null);
}
// push the cache pool even though we're going to bail out
// because otherwise there'd be a context mismatch
if (current !== null) {
pushTransition(workInProgress, null, null);
}

// We're about to bail out, but we need to push this to the stack anyway
Expand Down Expand Up @@ -874,10 +865,6 @@
workInProgress: Fiber,
renderLanes: Lanes,
) {
if (!enableCache) {
return null;
}

prepareToReadContext(workInProgress, renderLanes);
const parentCache = readContext(CacheContext);

Expand Down Expand Up @@ -1478,13 +1465,11 @@
pushRootMarkerInstance(workInProgress);
}

if (enableCache) {
const nextCache: Cache = nextState.cache;
pushCacheProvider(workInProgress, nextCache);
if (nextCache !== prevState.cache) {
// The root cache refreshed.
propagateContextChange(workInProgress, CacheContext, renderLanes);
}
const nextCache: Cache = nextState.cache;
pushCacheProvider(workInProgress, nextCache);
if (nextCache !== prevState.cache) {
// The root cache refreshed.
propagateContextChange(workInProgress, CacheContext, renderLanes);
}

// This would ideally go inside processUpdateQueue, but because it suspends,
Expand Down Expand Up @@ -1988,28 +1973,26 @@
renderLanes: Lanes,
): OffscreenState {
let cachePool: SpawnedCachePool | null = null;
if (enableCache) {
const prevCachePool: SpawnedCachePool | null = prevOffscreenState.cachePool;
if (prevCachePool !== null) {
const parentCache = isPrimaryRenderer
? CacheContext._currentValue
: CacheContext._currentValue2;
if (prevCachePool.parent !== parentCache) {
// Detected a refresh in the parent. This overrides any previously
// suspended cache.
cachePool = {
parent: parentCache,
pool: parentCache,
};
} else {
// We can reuse the cache from last time. The only thing that would have
// overridden it is a parent refresh, which we checked for above.
cachePool = prevCachePool;
}
const prevCachePool: SpawnedCachePool | null = prevOffscreenState.cachePool;
if (prevCachePool !== null) {
const parentCache = isPrimaryRenderer
? CacheContext._currentValue
: CacheContext._currentValue2;
if (prevCachePool.parent !== parentCache) {
// Detected a refresh in the parent. This overrides any previously
// suspended cache.
cachePool = {
parent: parentCache,
pool: parentCache,
};
} else {
// If there's no previous cache pool, grab the current one.
cachePool = getSuspendedCache();
// We can reuse the cache from last time. The only thing that would have
// overridden it is a parent refresh, which we checked for above.
cachePool = prevCachePool;
}
} else {
// If there's no previous cache pool, grab the current one.
cachePool = getSuspendedCache();
}
return {
baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes),
Expand Down Expand Up @@ -3610,10 +3593,8 @@
pushRootMarkerInstance(workInProgress);
}

if (enableCache) {
const cache: Cache = current.memoizedState.cache;
pushCacheProvider(workInProgress, cache);
}
const cache: Cache = current.memoizedState.cache;
pushCacheProvider(workInProgress, cache);
resetHydrationState();
break;
case HostSingleton:
Expand Down Expand Up @@ -3797,10 +3778,8 @@
return updateOffscreenComponent(current, workInProgress, renderLanes);
}
case CacheComponent: {
if (enableCache) {
const cache: Cache = current.memoizedState.cache;
pushCacheProvider(workInProgress, cache);
}
const cache: Cache = current.memoizedState.cache;

Check failure on line 3781 in packages/react-reconciler/src/ReactFiberBeginWork.js

View workflow job for this annotation

GitHub Actions / Run eslint

'cache' is already declared in the upper scope on line 3596 column 13
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Need to fix these lints. Interesting that it wasn't failing for shadowing.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TIL case statements without braces do not scope let/const to that case, fixed by adding braces to the previous case.

pushCacheProvider(workInProgress, cache);
break;
}
case TracingMarkerComponent: {
Expand Down Expand Up @@ -4087,10 +4066,7 @@
break;
}
case CacheComponent: {
if (enableCache) {
return updateCacheComponent(current, workInProgress, renderLanes);
}
break;
return updateCacheComponent(current, workInProgress, renderLanes);
}
case TracingMarkerComponent: {
if (enableTransitionTracing) {
Expand Down
52 changes: 15 additions & 37 deletions packages/react-reconciler/src/ReactFiberCacheComponent.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,15 @@
import type {ReactContext} from 'shared/ReactTypes';
import type {Fiber} from 'react-reconciler/src/ReactInternalTypes';

import {enableCache} from 'shared/ReactFeatureFlags';
import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols';

import {pushProvider, popProvider} from './ReactFiberNewContext';
import * as Scheduler from 'scheduler';

// In environments without AbortController (e.g. tests)
// replace it with a lightweight shim that only has the features we use.
const AbortControllerLocal: typeof AbortController = enableCache
? typeof AbortController !== 'undefined'
const AbortControllerLocal: typeof AbortController =
typeof AbortController !== 'undefined'
? AbortController
: // $FlowFixMe[missing-this-annot]
// $FlowFixMe[prop-missing]
Expand All @@ -36,9 +35,7 @@ const AbortControllerLocal: typeof AbortController = enableCache
signal.aborted = true;
listeners.forEach(listener => listener());
};
}
: // $FlowFixMe[incompatible-type]
null;
};

export type Cache = {
controller: AbortController,
Expand All @@ -63,20 +60,18 @@ const {
unstable_NormalPriority: NormalPriority,
} = Scheduler;

export const CacheContext: ReactContext<Cache> = enableCache
? {
$$typeof: REACT_CONTEXT_TYPE,
// We don't use Consumer/Provider for Cache components. So we'll cheat.
Consumer: (null: any),
Provider: (null: any),
// We'll initialize these at the root.
_currentValue: (null: any),
_currentValue2: (null: any),
_threadCount: 0,
}
: (null: any);
export const CacheContext: ReactContext<Cache> = {
$$typeof: REACT_CONTEXT_TYPE,
// We don't use Consumer/Provider for Cache components. So we'll cheat.
Consumer: (null: any),
Provider: (null: any),
// We'll initialize these at the root.
_currentValue: (null: any),
_currentValue2: (null: any),
_threadCount: 0,
};

if (__DEV__ && enableCache) {
if (__DEV__) {
CacheContext._currentRenderer = null;
CacheContext._currentRenderer2 = null;
}
Expand All @@ -85,22 +80,14 @@ if (__DEV__ && enableCache) {
// for retaining the cache once it is in use (retainCache), and releasing the cache
// once it is no longer needed (releaseCache).
export function createCache(): Cache {
if (!enableCache) {
return (null: any);
}
const cache: Cache = {
return {
controller: new AbortControllerLocal(),
data: new Map(),
refCount: 0,
};

return cache;
}

export function retainCache(cache: Cache) {
if (!enableCache) {
return;
}
if (__DEV__) {
if (cache.controller.signal.aborted) {
console.warn(
Expand All @@ -114,9 +101,6 @@ export function retainCache(cache: Cache) {

// Cleanup a cache instance, potentially freeing it if there are no more references
export function releaseCache(cache: Cache) {
if (!enableCache) {
return;
}
cache.refCount--;
if (__DEV__) {
if (cache.refCount < 0) {
Expand All @@ -134,15 +118,9 @@ export function releaseCache(cache: Cache) {
}

export function pushCacheProvider(workInProgress: Fiber, cache: Cache) {
if (!enableCache) {
return;
}
pushProvider(workInProgress, CacheContext, cache);
}

export function popCacheProvider(workInProgress: Fiber, cache: Cache) {
if (!enableCache) {
return;
}
popProvider(CacheContext, workInProgress);
}
Loading
Loading