diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts index 0c7267553a2..9d6f739b599 100644 --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -1705,6 +1705,127 @@ describe("type policies", function () { expect(cache.extract()).toMatchSnapshot(); }); + describe("custom finalize functions", function () { + const makeCache = (resolve: () => void) => new InMemoryCache({ + typePolicies: { + Parent: { + keyFields: false, + fields: { + deleteMe: { + read(existing, { storage }) { + expect(existing).toBe("merged value"); + expect(storage.cached).toBe(existing); + return "read value"; + }, + merge(existing, incoming, { storage }) { + expect(existing).toBeUndefined(); + expect(incoming).toBe("initial value"); + return storage.cached = "merged value"; + }, + finalize(existing, { storage }) { + expect(existing).toBe("merged value"); + expect(storage.cached).toBe(existing); + delete storage.cached; + // Finish the test (success). + resolve(); + }, + }, + }, + }, + }, + }); + + const query = gql` + query { + parent { + deleteMe @client + } + } + `; + + function testWriteAndRead(cache: InMemoryCache) { + cache.writeQuery({ + query, + data: { + parent: { + __typename: "Parent", + deleteMe: "initial value", + }, + }, + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + parent: { + __typename: "Parent", + deleteMe: "merged value", + }, + }, + }); + + expect(cache.readQuery({ query })).toEqual({ + parent: { + __typename: "Parent", + deleteMe: "read value", + }, + }); + } + + itAsync("are called when a parent object is evicted from the cache", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const evicted = cache.evict({ + // Note that we're removing Query.parent, not directly removing + // Parent.deleteMe, but we still expect the Parent.deleteMe finalize + // function to be called. + fieldName: "parent", + }); + expect(evicted).toBe(true); + }); + + itAsync("are called when cache.modify causes the parent object to lose fields", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const modified = cache.modify({ + fields: { + parent(value: StoreObject) { + const { deleteMe, ...rest } = value; + expect(rest).toEqual({ + __typename: "Parent", + }); + return rest; + }, + }, + }); + expect(modified).toBe(true); + }); + + itAsync("are called even if cache is cleared/restored", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const snapshot = cache.extract(); + cache.reset(); + expect(cache.extract()).toEqual({}); + cache.restore(snapshot); + expect(cache.extract()).toEqual(snapshot); + + cache.writeQuery({ + query, + overwrite: true, + data: { + parent: { + __typename: "Parent", + deleteMe: void 0, + }, + }, + }); + }); + }); + it("merge functions can deduplicate items using readField", function () { const cache = new InMemoryCache({ typePolicies: { diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index 894069fe75a..7c8ae0e1b3d 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -13,8 +13,8 @@ import { maybeDeepFreeze, canUseWeakMap, } from '../../utilities'; -import { NormalizedCache, NormalizedCacheObject } from './types'; -import { hasOwn, fieldNameFromStoreName } from './helpers'; +import { NormalizedCache, NormalizedCacheObject, ReadMergeModifyContext } from './types'; +import { hasOwn, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; import { Policies, StorageType } from './policies'; import { Cache } from '../core/types/Cache'; import { @@ -99,7 +99,7 @@ export abstract class EntityStore implements NormalizedCache { older: string | StoreObject, newer: StoreObject | string, ): void { - let dataId: string | undefined; + let dataId: string; const existing: StoreObject | undefined = typeof older === "string" @@ -116,6 +116,7 @@ export abstract class EntityStore implements NormalizedCache { if (!incoming) return; invariant( + // @ts-ignore typeof dataId === "string", "store.merge expects a string ID", ); @@ -123,13 +124,10 @@ export abstract class EntityStore implements NormalizedCache { const merged: StoreObject = new DeepMerger(storeObjectReconciler).merge(existing, incoming); - // Even if merged === existing, existing may have come from a lower - // layer, so we always need to set this.data[dataId] on this level. - this.data[dataId] = merged; - if (merged !== existing) { delete this.refs[dataId]; if (this.group.caching) { + const isLayer = this instanceof Layer; const fieldsToDirty: Record = Object.create(null); // If we added a new StoreObject where there was previously none, dirty @@ -160,8 +158,9 @@ export abstract class EntityStore implements NormalizedCache { // If merged[storeFieldName] has become undefined, and this is the // Root layer, actually delete the property from the merged object, - // which is guaranteed to have been created fresh in this method. - if (merged[storeFieldName] === void 0 && !(this instanceof Layer)) { + // which is guaranteed to have been created fresh in store.merge. + // TODO Move this to the end of the store.merge method. + if (merged[storeFieldName] === void 0 && !isLayer) { delete merged[storeFieldName]; } } @@ -178,9 +177,70 @@ export abstract class EntityStore implements NormalizedCache { } Object.keys(fieldsToDirty).forEach( - fieldName => this.group.dirty(dataId as string, fieldName)); + fieldName => this.group.dirty(dataId, fieldName)); + } + + // Make sure we have a (string | number)[] path for every object in the + // merged object tree, including non-normalized non-Reference objects that + // are embedded/nested within normalized parent objects. The path of such + // objects will be an array starting with the string ID of the closest + // enclosing entity object, followed by the string and number properties + // that lead from the entity to the nested object within it. + this.group.assignPaths(dataId, merged); + + // Run finalize functions for fields that are being removed. We consider + // only the fields of existing that are shared by incoming, since those + // are the only fields that could be changing. + if (existing) { + const context: ReadMergeModifyContext = { store: this }; + + const walk = (existing: StoreValue, incoming: StoreValue | undefined) => { + if (existing === incoming) return; + + if (Array.isArray(existing)) { + (existing as StoreValue[]).forEach((child, i) => { + walk( + child, + incoming && Array.isArray(incoming) + ? incoming[i] + : void 0, + ); + }); + + } else if (storeValueIsStoreObject(existing)) { + Object.keys(existing).forEach(storeFieldName => { + const eChild = existing[storeFieldName]; + const iChild = incoming && storeValueIsStoreObject(incoming) + ? incoming[storeFieldName] + : void 0; + + // Visit children before running finalizeField for eChild. + walk(eChild, iChild); + + if (iChild === void 0) { + this.policies.finalizeField( + existing.__typename, + existing, + storeFieldName, + context, + ); + } + }); + } + }; + + // To detect field removals (in order to run finalize functions), we can + // restrict our attention to the incoming fields, since those are the + // top-level fields that might have changed. + Object.keys(incoming).forEach(storeFieldName => { + walk(existing[storeFieldName], incoming[storeFieldName]); + }); } } + + // Even if merged === existing, existing may have come from a lower + // layer, so we always need to set this.data[dataId] on this level. + this.data[dataId] = merged; } public modify( @@ -225,7 +285,10 @@ export abstract class EntityStore implements NormalizedCache { ...sharedDetails, fieldName, storeFieldName, - storage: this.getStorage(dataId, storeFieldName), + storage: this.group.getStorage( + makeReference(dataId), + storeFieldName, + ), }); if (newValue === INVALIDATE) { this.group.dirty(dataId, storeFieldName); @@ -352,11 +415,6 @@ export abstract class EntityStore implements NormalizedCache { return this; } - public abstract getStorage( - idOrObj: string | StoreObject, - ...storeFieldNames: (string | number)[] - ): StorageType; - // Maps root entity IDs to the number of times they have been retained, minus // the number of times they have been released. Retained entities keep other // entities they reference (even indirectly) from being garbage collected. @@ -449,7 +507,8 @@ export abstract class EntityStore implements NormalizedCache { // Used to compute cache keys specific to this.group. public makeCacheKey(...args: any[]): object; public makeCacheKey() { - return this.group.keyMaker.lookupArray(arguments); + const found = this.group.keyMaker.lookupArray(arguments); + return found.cacheKey || (found.cacheKey = Object.create(null)); } // Bound function that can be passed around to provide easy access to fields @@ -549,9 +608,71 @@ class CacheGroup { } } + private paths = new WeakMap(); + public assignPaths(dataId: string, merged: StoreObject) { + const paths = this.paths; + const path: (string | number)[] = [dataId]; + + // TODO + function assign(this: void, obj: StoreValue) { + if (Array.isArray(obj)) { + obj.forEach(handleChild); + } else if (storeValueIsStoreObject(obj) && !paths.has(obj)) { + Object.keys(obj).forEach(storeFieldName => { + const child = obj[storeFieldName]; + handleChild(child, storeFieldName); + }); + } + } + + function handleChild(child: StoreValue, key: string | number) { + if (storeValueIsStoreObject(child)) { + if (paths.has(child)) return; + paths.set(child, path.concat(key)); + } + try { + path.push(key); + assign(child); + } finally { + invariant(path.pop() === key); + } + } + + assign(merged); + } + + public getStorage( + parentObjOrRef: StoreObject | Reference, + ...pathSuffix: (string | number)[] + ) { + const path: any[] = []; + const push = (key: string | number) => path.push(key); + + if (isReference(parentObjOrRef)) { + path.push(parentObjOrRef.__ref); + } else { + // See assignPathsAndFinalize to understand how this map is populated. + const assignedPath = this.paths.get(parentObjOrRef); + if (assignedPath) { + assignedPath.forEach(push); + } else { + path.push(parentObjOrRef); + } + } + + // Append the provided suffix to the path array. + pathSuffix.forEach(push); + + const found = this.keyMaker.lookupArray(path); + return found.storage || (found.storage = Object.create(null)); + } + // Used by the EntityStore#makeCacheKey method to compute cache keys // specific to this CacheGroup. - public readonly keyMaker = new Trie(canUseWeakMap); + public readonly keyMaker = new Trie<{ + cacheKey?: object; + storage?: StorageType; + }>(canUseWeakMap); } function makeDepKey(dataId: string, storeFieldName: string) { @@ -593,11 +714,6 @@ export namespace EntityStore { // Never remove the root layer. return this; } - - public readonly storageTrie = new Trie(canUseWeakMap); - public getStorage(): StorageType { - return this.storageTrie.lookupArray(arguments); - } } } @@ -662,12 +778,6 @@ class Layer extends EntityStore { ...super.findChildRefIds(dataId), } : fromParent; } - - public getStorage(): StorageType { - let p: EntityStore = this.parent; - while ((p as Layer).parent) p = (p as Layer).parent; - return p.getStorage.apply(p, arguments); - } } // Represents a Layer permanently installed just above the Root, which allows @@ -702,10 +812,11 @@ class Stump extends Layer { function storeObjectReconciler( existingObject: StoreObject, incomingObject: StoreObject, - property: string, + storeFieldName: string, ): StoreValue { - const existingValue = existingObject[property]; - const incomingValue = incomingObject[property]; + const existingValue = existingObject[storeFieldName]; + const incomingValue = incomingObject[storeFieldName]; + // Wherever there is a key collision, prefer the incoming value, unless // it is deeply equal to the existing value. It's worth checking deep // equality here (even though blindly returning incoming would be diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index f0a92829522..6cc9df73089 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -47,6 +47,7 @@ import { CanReadFunction, } from '../core/types/common'; import { WriteContext } from './writeToStore'; +import { EntityStore } from './entityStore'; export type TypePolicies = { [__typename: string]: TypePolicy; @@ -131,6 +132,7 @@ export type FieldPolicy< keyArgs?: KeySpecifier | KeyArgsFunction | false; read?: FieldReadFunction; merge?: FieldMergeFunction | boolean; + finalize?: FieldFinalizeFunction; }; export type StorageType = Record; @@ -221,6 +223,11 @@ export type FieldMergeFunction = ( options: FieldFunctionOptions, ) => SafeReadonly; +export type FieldFinalizeFunction = ( + existing: SafeReadonly | undefined, + options: FieldFunctionOptions, +) => void; + export const defaultDataIdFromObject = ( { __typename, id, _id }: Readonly, context?: KeyFieldsContext, @@ -256,6 +263,9 @@ export type PossibleTypesMap = { [supertype: string]: string[]; }; +type InternalTypePolicy = Policies["typePolicies"][string]; +type InternalFieldPolicy = InternalTypePolicy["fields"][string]; + export class Policies { private typePolicies: { [__typename: string]: { @@ -266,6 +276,7 @@ export class Policies { keyFn?: KeyArgsFunction; read?: FieldReadFunction; merge?: FieldMergeFunction; + finalize?: FieldFinalizeFunction; }; }; }; @@ -441,7 +452,7 @@ export class Policies { if (typeof incoming === "function") { existing.read = incoming; } else { - const { keyArgs, read, merge } = incoming; + const { keyArgs, read, merge, finalize } = incoming; existing.keyFn = // Pass false to disable argument-based differentiation of @@ -460,6 +471,10 @@ export class Policies { } setMerge(existing, merge); + + if (typeof finalize === "function") { + existing.finalize = finalize; + } } if (existing.read && existing.merge) { @@ -511,7 +526,7 @@ export class Policies { }); } - private getTypePolicy(typename: string): Policies["typePolicies"][string] { + private getTypePolicy(typename: string): InternalTypePolicy { if (!hasOwn.call(this.typePolicies, typename)) { const policy: Policies["typePolicies"][string] = this.typePolicies[typename] = Object.create(null); @@ -560,11 +575,7 @@ export class Policies { typename: string | undefined, fieldName: string, createIfMissing: boolean, - ): { - keyFn?: KeyArgsFunction; - read?: FieldReadFunction; - merge?: FieldMergeFunction; - } | undefined { + ): InternalFieldPolicy | undefined { if (typename) { const fieldPolicies = this.getTypePolicy(typename).fields; return fieldPolicies[fieldName] || ( @@ -753,12 +764,8 @@ export class Policies { objectOrReference, options, context, - context.store.getStorage( - isReference(objectOrReference) - ? objectOrReference.__ref - : objectOrReference, - storeFieldName, - ), + (context.store as EntityStore).group + .getStorage(objectOrReference, storeFieldName), ); // Call read(existing, readOptions) with cacheSlot holding this.cache. @@ -772,20 +779,41 @@ export class Policies { return existing; } + public finalizeField( + typename: string | undefined, + objectOrReference: StoreObject | Reference, + storeFieldName: string, + context: ReadMergeModifyContext, + ) { + const fieldName = fieldNameFromStoreName(storeFieldName); + const policy = this.getFieldPolicy(typename, fieldName, false); + const finalize = policy && policy.finalize; + if (finalize) { + finalize( + context.store.getFieldValue(objectOrReference, storeFieldName), + // TODO Consolidate this code with similiar code in readField? + makeFieldFunctionOptions( + this, + objectOrReference, + { typename, fieldName }, + context, + (context.store as EntityStore).group + .getStorage(objectOrReference, storeFieldName), + ), + ); + } + } + public getMergeFunction( parentTypename: string | undefined, fieldName: string, childTypename: string | undefined, ): FieldMergeFunction | undefined { - let policy: - | Policies["typePolicies"][string] - | Policies["typePolicies"][string]["fields"][string] - | undefined = - this.getFieldPolicy(parentTypename, fieldName, false); - let merge = policy && policy.merge; + const fieldPolicy = this.getFieldPolicy(parentTypename, fieldName, false); + let merge = fieldPolicy && fieldPolicy.merge; if (!merge && childTypename) { - policy = this.getTypePolicy(childTypename); - merge = policy && policy.merge; + const typePolicy = this.getTypePolicy(childTypename); + merge = typePolicy && typePolicy.merge; } return merge; } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index 13959a0c3c2..e1584ac2b79 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -7,7 +7,7 @@ import { Reference, } from '../../utilities'; import { FieldValueGetter } from './entityStore'; -import { KeyFieldsFunction, StorageType, FieldMergeFunction } from './policies'; +import { KeyFieldsFunction, FieldMergeFunction } from './policies'; import { Modifier, Modifiers, @@ -68,11 +68,6 @@ export interface NormalizedCache { getFieldValue: FieldValueGetter; toReference: ToReferenceFunction; canRead: CanReadFunction; - - getStorage( - idOrObj: string | StoreObject, - ...storeFieldNames: (string | number)[] - ): StorageType; } /** diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 091e47884db..22dbe9e55d9 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -24,11 +24,13 @@ import { import { NormalizedCache, ReadMergeModifyContext, MergeTree } from './types'; import { makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; +import { EntityStore } from './entityStore'; import { StoreReader } from './readFromStore'; import { InMemoryCache } from './inMemoryCache'; -import { EntityStore } from './entityStore'; import { Cache } from '../../core'; +type GetStorageParams = Parameters; + export interface WriteContext extends ReadMergeModifyContext { readonly written: { [dataId: string]: SelectionSetNode[]; @@ -343,7 +345,7 @@ export class StoreWriter { existing: StoreValue, incoming: T, context: WriteContext, - getStorageArgs?: Parameters, + getStorageArgs?: GetStorageParams, ): T { if (mergeTree.map.size && !isReference(incoming)) { const e: StoreObject | Reference | undefined = ( @@ -367,7 +369,7 @@ export class StoreWriter { // sequence of storeFieldName strings/numbers identifying the nested // field name path of each field value to be merged. if (e && !getStorageArgs) { - getStorageArgs = [isReference(e) ? e.__ref : e]; + getStorageArgs = [e]; } // It's possible that applying merge functions to this subtree will @@ -423,7 +425,9 @@ export class StoreWriter { incoming, mergeTree.info, context, - getStorageArgs && context.store.getStorage(...getStorageArgs), + getStorageArgs && ( + context.store as EntityStore + ).group.getStorage(...getStorageArgs), ); }