From 640d504223594c0f74dfc290eb0ea5cf46138a7e Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Wed, 28 Apr 2021 11:39:23 -0400 Subject: [PATCH 1/8] Support InMemoryCache field policy `drop` function. As I described in this comment, though I decided to call the new function `drop` instead of `finalize`: https://github.com/apollographql/apollo-client/issues/8052#issuecomment-828547846 --- .../__tests__/__snapshots__/policies.ts.snap | 80 +++++ src/cache/inmemory/__tests__/policies.ts | 302 ++++++++++++++++++ src/cache/inmemory/entityStore.ts | 195 +++++++++-- src/cache/inmemory/policies.ts | 70 ++-- src/cache/inmemory/types.ts | 7 +- src/cache/inmemory/writeToStore.ts | 12 +- 6 files changed, 603 insertions(+), 63 deletions(-) diff --git a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap index 296372a0cd1..77a4add9107 100644 --- a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap @@ -1211,6 +1211,86 @@ Object { } `; +exports[`type policies field policies custom field policy drop functions are called if merge function returns undefined 1`] = ` +Object { + "ROOT_QUERY": Object { + "__typename": "Query", + "todoList": Object { + "__ref": "ToDoList:{}", + }, + }, + "Task:{\\"taskID\\":1}": Object { + "__typename": "Task", + "taskID": 1, + "text": "task #1", + }, + "Task:{\\"taskID\\":2}": Object { + "__typename": "Task", + "taskID": 2, + "text": "task #2", + }, + "ToDoList:{}": Object { + "__typename": "ToDoList", + "tasks": Array [ + Object { + "__ref": "Task:{\\"taskID\\":1}", + }, + Object { + "__ref": "Task:{\\"taskID\\":2}", + }, + ], + }, +} +`; + +exports[`type policies field policies custom field policy drop functions are called if merge function returns undefined 2`] = ` +Object { + "ROOT_QUERY": Object { + "__typename": "Query", + "todoList": Object { + "__ref": "ToDoList:{}", + }, + }, + "Task:{\\"taskID\\":1}": Object { + "__typename": "Task", + "taskID": 1, + "text": "task #1", + }, + "Task:{\\"taskID\\":2}": Object { + "__typename": "Task", + "taskID": 2, + "text": "task #2", + }, + "Task:{\\"taskID\\":3}": Object { + "__typename": "Task", + "taskID": 3, + "text": "task #3", + }, + "Task:{\\"taskID\\":4}": Object { + "__typename": "Task", + "taskID": 4, + "text": "task #4", + }, + "ToDoList:{}": Object { + "__typename": "ToDoList", + "tasks": Array [ + Object { + "__ref": "Task:{\\"taskID\\":1}", + }, + Object { + "__ref": "Task:{\\"taskID\\":2}", + }, + Object { + "__ref": "Task:{\\"taskID\\":3}", + }, + Object { + "__ref": "Task:{\\"taskID\\":4}", + }, + ], + }, +} +`; + exports[`type policies field policies read, merge, and modify functions can access options.storage 1`] = ` Object { "ROOT_QUERY": Object { diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts index 0c7267553a2..665f1ec337a 100644 --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -1705,6 +1705,308 @@ describe("type policies", function () { expect(cache.extract()).toMatchSnapshot(); }); + describe("custom field policy drop functions", function () { + const makeCache = (resolve: () => void) => new InMemoryCache({ + typePolicies: { + Parent: { + keyFields: false, + fields: { + deleteMe: { + read(existing, { storage }) { + expect(existing).toBe("merged value"); + expect(storage.cached).toBe(existing); + return "read value"; + }, + merge(existing, incoming, { storage }) { + expect(existing).toBeUndefined(); + expect(incoming).toBe("initial value"); + return storage.cached = "merged value"; + }, + drop(existing, { storage }) { + expect(existing).toBe("merged value"); + expect(storage.cached).toBe(existing); + delete storage.cached; + // Finish the test (success). + resolve(); + }, + }, + }, + }, + }, + }); + + const query = gql` + query { + parent { + deleteMe @client + } + } + `; + + function testWriteAndRead(cache: InMemoryCache) { + cache.writeQuery({ + query, + data: { + parent: { + __typename: "Parent", + deleteMe: "initial value", + }, + }, + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + parent: { + __typename: "Parent", + deleteMe: "merged value", + }, + }, + }); + + expect(cache.readQuery({ query })).toEqual({ + parent: { + __typename: "Parent", + deleteMe: "read value", + }, + }); + } + + itAsync("are called when a parent object is evicted from the cache", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const evicted = cache.evict({ + // Note that we're removing Query.parent, not directly removing + // Parent.deleteMe, but we still expect the Parent.deleteMe drop + // function to be called. + fieldName: "parent", + }); + expect(evicted).toBe(true); + }); + + itAsync("are called when cache.modify causes the parent object to lose fields", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const modified = cache.modify({ + fields: { + parent(value: StoreObject) { + const { deleteMe, ...rest } = value; + expect(rest).toEqual({ + __typename: "Parent", + }); + return rest; + }, + }, + }); + expect(modified).toBe(true); + }); + + itAsync("are called even if cache is cleared/restored", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const snapshot = cache.extract(); + cache.reset(); + expect(cache.extract()).toEqual({}); + cache.restore(snapshot); + expect(cache.extract()).toEqual(snapshot); + + cache.writeQuery({ + query, + overwrite: true, + data: { + parent: { + __typename: "Parent", + deleteMe: void 0, + }, + }, + }); + }); + + itAsync("are called if merge function returns undefined", resolve => { + const cache = new InMemoryCache({ + typePolicies: { + ToDoList: { + keyFields: [], + fields: { + tasks: { + keyArgs: false, + + merge(existing: number[] | undefined, incoming: number[], { args }) { + if (args && args.deleteOnMerge) return; + return existing ? [ + ...existing, + ...incoming, + ] : incoming; + }, + + drop(existing) { + expect(existing).toEqual([ + { __ref: 'Task:{"taskID":1}' }, + { __ref: 'Task:{"taskID":2}' }, + { __ref: 'Task:{"taskID":3}' }, + { __ref: 'Task:{"taskID":4}' }, + ]); + // Finish the test (success). + resolve(); + }, + }, + }, + }, + + Task: { + keyFields: ["taskID"], + }, + }, + }); + + const query = gql` + query { + todoList { + tasks { + taskID + text + } + } + } + `; + + const deleteQuery = gql` + query { + todoList { + tasks(deleteOnMerge: true) { + taskID + text + } + } + } + `; + + const deleteData = { + todoList: { + __typename: "ToDoList", + tasks: [], + }, + }; + + // This write will cause the merge function to return undefined, but + // since the field is already undefined, the undefined return from the + // merge function should not trigger the drop function. + cache.writeQuery({ + query: deleteQuery, + data: deleteData, + }); + + cache.writeQuery({ + query, + data: { + todoList: { + __typename: "ToDoList", + tasks: [ + { __typename: "Task", taskID: 1, text: "task #1" }, + { __typename: "Task", taskID: 2, text: "task #2" }, + ], + }, + }, + }); + + expect(cache.extract()).toMatchSnapshot(); + + cache.writeQuery({ + query, + data: { + todoList: { + __typename: "ToDoList", + tasks: [ + { __typename: "Task", taskID: 3, text: "task #3" }, + { __typename: "Task", taskID: 4, text: "task #4" }, + ], + }, + }, + }); + + expect(cache.extract()).toMatchSnapshot(); + + // Since the ToDoList.tasks field has data now, this deletion should + // trigger the drop function, unlike the last time we used deleteQuery. + cache.writeQuery({ + query: deleteQuery, + data: deleteData, + }); + }); + + itAsync("are called for fields within garbage collected objects", (resolve, reject) => { + const cache = new InMemoryCache({ + typePolicies: { + Garbage: { + keyFields: ["gid"], + fields: { + isToxic: { + drop(isToxic: boolean, { readField }) { + const gid = readField("gid")!; + if (expectedToxicities.has(gid)) { + expect(expectedToxicities.get(gid)).toBe(isToxic); + if (expectedToxicities.delete(gid) && + expectedToxicities.size === 0) { + resolve(); + } + } else { + reject(`unexpectedly dropped garbage ${gid}`); + } + }, + }, + }, + }, + }, + }); + + const expectedToxicities = new Map(); + expectedToxicities.set(234, true); + expectedToxicities.set(456, false); + + const query = gql` + query { + garbages { + gid + isToxic + } + } + `; + + cache.writeQuery({ + query, + data: { + garbages: [ + { __typename: "Garbage", gid: 123, isToxic: false }, + { __typename: "Garbage", gid: 234, isToxic: true }, + { __typename: "Garbage", gid: 345, isToxic: true }, + { __typename: "Garbage", gid: 456, isToxic: false }, + ], + }, + }); + + expect(cache.gc()).toEqual([]); + + cache.writeQuery({ + query, + overwrite: true, + data: { + garbages: [ + { __typename: "Garbage", gid: 123, isToxic: false }, + { __typename: "Garbage", gid: 345, isToxic: true }, + ], + }, + }); + + expect(cache.gc().sort()).toEqual([ + 'Garbage:{"gid":234}', + 'Garbage:{"gid":456}', + ]); + }); + }); + it("merge functions can deduplicate items using readField", function () { const cache = new InMemoryCache({ typePolicies: { diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index 894069fe75a..ace6dfd498e 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -13,8 +13,8 @@ import { maybeDeepFreeze, canUseWeakMap, } from '../../utilities'; -import { NormalizedCache, NormalizedCacheObject } from './types'; -import { hasOwn, fieldNameFromStoreName } from './helpers'; +import { NormalizedCache, NormalizedCacheObject, ReadMergeModifyContext } from './types'; +import { hasOwn, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; import { Policies, StorageType } from './policies'; import { Cache } from '../core/types/Cache'; import { @@ -99,7 +99,7 @@ export abstract class EntityStore implements NormalizedCache { older: string | StoreObject, newer: StoreObject | string, ): void { - let dataId: string | undefined; + let dataId: string; const existing: StoreObject | undefined = typeof older === "string" @@ -116,6 +116,7 @@ export abstract class EntityStore implements NormalizedCache { if (!incoming) return; invariant( + // @ts-ignore typeof dataId === "string", "store.merge expects a string ID", ); @@ -123,13 +124,10 @@ export abstract class EntityStore implements NormalizedCache { const merged: StoreObject = new DeepMerger(storeObjectReconciler).merge(existing, incoming); - // Even if merged === existing, existing may have come from a lower - // layer, so we always need to set this.data[dataId] on this level. - this.data[dataId] = merged; - if (merged !== existing) { delete this.refs[dataId]; if (this.group.caching) { + const isLayer = this instanceof Layer; const fieldsToDirty: Record = Object.create(null); // If we added a new StoreObject where there was previously none, dirty @@ -160,8 +158,9 @@ export abstract class EntityStore implements NormalizedCache { // If merged[storeFieldName] has become undefined, and this is the // Root layer, actually delete the property from the merged object, - // which is guaranteed to have been created fresh in this method. - if (merged[storeFieldName] === void 0 && !(this instanceof Layer)) { + // which is guaranteed to have been created fresh in store.merge. + // TODO Move this to the end of the store.merge method. + if (merged[storeFieldName] === void 0 && !isLayer) { delete merged[storeFieldName]; } } @@ -178,9 +177,81 @@ export abstract class EntityStore implements NormalizedCache { } Object.keys(fieldsToDirty).forEach( - fieldName => this.group.dirty(dataId as string, fieldName)); + fieldName => this.group.dirty(dataId, fieldName)); + } + + // Make sure we have a (string | number)[] path for every object in the + // merged object tree, including non-normalized non-Reference objects that + // are embedded/nested within normalized parent objects. The path of such + // objects will be an array starting with the string ID of the closest + // enclosing entity object, followed by the string and number properties + // that lead from the entity to the nested object within it. + this.group.assignPaths(dataId, merged); + + if (existing) { + // Collect objects and field names removed by this merge, so we can run + // drop functions configured for the fields that are about to removed + // (before we finally set this.data[dataId] = merged, below). + const drops: [StoreObject, string][] = []; + + const walk = (exVal: StoreValue, inVal: StoreValue | undefined) => { + if (exVal === inVal) return; + + if (Array.isArray(exVal)) { + (exVal as StoreValue[]).forEach((exChild, i) => { + const inChild = inVal && Array.isArray(inVal) ? inVal[i] : void 0; + walk(exChild, inChild); + }); + + } else if (storeValueIsStoreObject(exVal)) { + Object.keys(exVal).forEach(storeFieldName => { + const exChild = exVal[storeFieldName]; + const inChild = inVal && storeValueIsStoreObject(inVal) + ? inVal[storeFieldName] + : void 0; + + // Visit children before running dropField for eChild. + walk(exChild, inChild); + + if (inChild === void 0) { + drops.push([exVal, storeFieldName]); + } + }); + } + }; + + // To detect field removals (in order to run drop functions), we can + // restrict our attention to the incoming fields, since those are the + // top-level fields that might have changed. + Object.keys(incoming).forEach(storeFieldName => { + const eChild = existing[storeFieldName]; + const iChild = incoming[storeFieldName]; + + walk(eChild, iChild); + + if (iChild === void 0) { + drops.push([existing, storeFieldName]); + } + }); + + if (drops.length) { + const context: ReadMergeModifyContext = { store: this }; + + drops.forEach(([storeObject, storeFieldName]) => { + this.policies.dropField( + storeObject.__typename, + storeObject, + storeFieldName, + context, + ); + }); + } } } + + // Even if merged === existing, existing may have come from a lower + // layer, so we always need to set this.data[dataId] on this level. + this.data[dataId] = merged; } public modify( @@ -225,7 +296,10 @@ export abstract class EntityStore implements NormalizedCache { ...sharedDetails, fieldName, storeFieldName, - storage: this.getStorage(dataId, storeFieldName), + storage: this.group.getStorage( + makeReference(dataId), + storeFieldName, + ), }); if (newValue === INVALIDATE) { this.group.dirty(dataId, storeFieldName); @@ -352,11 +426,6 @@ export abstract class EntityStore implements NormalizedCache { return this; } - public abstract getStorage( - idOrObj: string | StoreObject, - ...storeFieldNames: (string | number)[] - ): StorageType; - // Maps root entity IDs to the number of times they have been retained, minus // the number of times they have been released. Retained entities keep other // entities they reference (even indirectly) from being garbage collected. @@ -449,7 +518,8 @@ export abstract class EntityStore implements NormalizedCache { // Used to compute cache keys specific to this.group. public makeCacheKey(...args: any[]): object; public makeCacheKey() { - return this.group.keyMaker.lookupArray(arguments); + const found = this.group.keyMaker.lookupArray(arguments); + return found.cacheKey || (found.cacheKey = Object.create(null)); } // Bound function that can be passed around to provide easy access to fields @@ -549,9 +619,80 @@ class CacheGroup { } } + // This WeakMap maps every non-normalized object reference contained by the + // store to the path of that object within the enclosing entity object. This + // information is collected by the assignPaths method after every store.merge, + // so store.data should never contain any un-pathed objects. As a reminder, + // these object references are handled immutably from here on, so the objects + // should not move around in a way that invalidates these paths. This path + // information is useful in the getStorage method, below. + private paths = new WeakMap(); + + public assignPaths(dataId: string, merged: StoreObject) { + const paths = this.paths; + const path: (string | number)[] = [dataId]; + + function assign(this: void, obj: StoreValue) { + if (Array.isArray(obj)) { + obj.forEach(handleChild); + } else if (storeValueIsStoreObject(obj) && !paths.has(obj)) { + Object.keys(obj).forEach(storeFieldName => { + const child = obj[storeFieldName]; + handleChild(child, storeFieldName); + }); + } + } + + function handleChild(child: StoreValue, key: string | number) { + if (storeValueIsStoreObject(child)) { + if (paths.has(child)) return; + paths.set(child, path.concat(key)); + } + try { + path.push(key); + assign(child); + } finally { + invariant(path.pop() === key); + } + } + + assign(merged); + } + + public getStorage( + parentObjOrRef: StoreObject | Reference, + ...pathSuffix: (string | number)[] + ) { + const path: any[] = []; + const push = (key: string | number) => path.push(key); + + if (isReference(parentObjOrRef)) { + path.push(parentObjOrRef.__ref); + } else { + // See assignPaths to understand how this map is populated. + const assignedPath = this.paths.get(parentObjOrRef); + if (assignedPath) { + assignedPath.forEach(push); + } else { + // If we can't find a path for this object, use the object reference + // itself as a key. + path.push(parentObjOrRef); + } + } + + // Append the provided suffix to the path array. + pathSuffix.forEach(push); + + const found = this.keyMaker.lookupArray(path); + return found.storage || (found.storage = Object.create(null)); + } + // Used by the EntityStore#makeCacheKey method to compute cache keys // specific to this CacheGroup. - public readonly keyMaker = new Trie(canUseWeakMap); + public readonly keyMaker = new Trie<{ + cacheKey?: object; + storage?: StorageType; + }>(canUseWeakMap); } function makeDepKey(dataId: string, storeFieldName: string) { @@ -593,11 +734,6 @@ export namespace EntityStore { // Never remove the root layer. return this; } - - public readonly storageTrie = new Trie(canUseWeakMap); - public getStorage(): StorageType { - return this.storageTrie.lookupArray(arguments); - } } } @@ -662,12 +798,6 @@ class Layer extends EntityStore { ...super.findChildRefIds(dataId), } : fromParent; } - - public getStorage(): StorageType { - let p: EntityStore = this.parent; - while ((p as Layer).parent) p = (p as Layer).parent; - return p.getStorage.apply(p, arguments); - } } // Represents a Layer permanently installed just above the Root, which allows @@ -702,10 +832,11 @@ class Stump extends Layer { function storeObjectReconciler( existingObject: StoreObject, incomingObject: StoreObject, - property: string, + storeFieldName: string, ): StoreValue { - const existingValue = existingObject[property]; - const incomingValue = incomingObject[property]; + const existingValue = existingObject[storeFieldName]; + const incomingValue = incomingObject[storeFieldName]; + // Wherever there is a key collision, prefer the incoming value, unless // it is deeply equal to the existing value. It's worth checking deep // equality here (even though blindly returning incoming would be diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index f0a92829522..4084765bcbf 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -47,6 +47,7 @@ import { CanReadFunction, } from '../core/types/common'; import { WriteContext } from './writeToStore'; +import { EntityStore } from './entityStore'; export type TypePolicies = { [__typename: string]: TypePolicy; @@ -131,6 +132,7 @@ export type FieldPolicy< keyArgs?: KeySpecifier | KeyArgsFunction | false; read?: FieldReadFunction; merge?: FieldMergeFunction | boolean; + drop?: FieldDropFunction; }; export type StorageType = Record; @@ -221,6 +223,11 @@ export type FieldMergeFunction = ( options: FieldFunctionOptions, ) => SafeReadonly; +export type FieldDropFunction = ( + existing: SafeReadonly | undefined, + options: FieldFunctionOptions, +) => void; + export const defaultDataIdFromObject = ( { __typename, id, _id }: Readonly, context?: KeyFieldsContext, @@ -256,6 +263,9 @@ export type PossibleTypesMap = { [supertype: string]: string[]; }; +type InternalTypePolicy = Policies["typePolicies"][string]; +type InternalFieldPolicy = InternalTypePolicy["fields"][string]; + export class Policies { private typePolicies: { [__typename: string]: { @@ -266,6 +276,7 @@ export class Policies { keyFn?: KeyArgsFunction; read?: FieldReadFunction; merge?: FieldMergeFunction; + drop?: FieldDropFunction; }; }; }; @@ -441,7 +452,7 @@ export class Policies { if (typeof incoming === "function") { existing.read = incoming; } else { - const { keyArgs, read, merge } = incoming; + const { keyArgs, read, merge, drop } = incoming; existing.keyFn = // Pass false to disable argument-based differentiation of @@ -460,6 +471,10 @@ export class Policies { } setMerge(existing, merge); + + if (typeof drop === "function") { + existing.drop = drop; + } } if (existing.read && existing.merge) { @@ -511,7 +526,7 @@ export class Policies { }); } - private getTypePolicy(typename: string): Policies["typePolicies"][string] { + private getTypePolicy(typename: string): InternalTypePolicy { if (!hasOwn.call(this.typePolicies, typename)) { const policy: Policies["typePolicies"][string] = this.typePolicies[typename] = Object.create(null); @@ -560,11 +575,7 @@ export class Policies { typename: string | undefined, fieldName: string, createIfMissing: boolean, - ): { - keyFn?: KeyArgsFunction; - read?: FieldReadFunction; - merge?: FieldMergeFunction; - } | undefined { + ): InternalFieldPolicy | undefined { if (typename) { const fieldPolicies = this.getTypePolicy(typename).fields; return fieldPolicies[fieldName] || ( @@ -753,12 +764,8 @@ export class Policies { objectOrReference, options, context, - context.store.getStorage( - isReference(objectOrReference) - ? objectOrReference.__ref - : objectOrReference, - storeFieldName, - ), + (context.store as EntityStore).group + .getStorage(objectOrReference, storeFieldName), ); // Call read(existing, readOptions) with cacheSlot holding this.cache. @@ -772,20 +779,41 @@ export class Policies { return existing; } + public dropField( + typename: string | undefined, + objectOrReference: StoreObject | Reference, + storeFieldName: string, + context: ReadMergeModifyContext, + ) { + const fieldName = fieldNameFromStoreName(storeFieldName); + const policy = this.getFieldPolicy(typename, fieldName, false); + const drop = policy && policy.drop; + if (drop) { + drop( + context.store.getFieldValue(objectOrReference, storeFieldName), + // TODO Consolidate this code with similiar code in readField? + makeFieldFunctionOptions( + this, + objectOrReference, + { typename, fieldName }, + context, + (context.store as EntityStore).group + .getStorage(objectOrReference, storeFieldName), + ), + ); + } + } + public getMergeFunction( parentTypename: string | undefined, fieldName: string, childTypename: string | undefined, ): FieldMergeFunction | undefined { - let policy: - | Policies["typePolicies"][string] - | Policies["typePolicies"][string]["fields"][string] - | undefined = - this.getFieldPolicy(parentTypename, fieldName, false); - let merge = policy && policy.merge; + const fieldPolicy = this.getFieldPolicy(parentTypename, fieldName, false); + let merge = fieldPolicy && fieldPolicy.merge; if (!merge && childTypename) { - policy = this.getTypePolicy(childTypename); - merge = policy && policy.merge; + const typePolicy = this.getTypePolicy(childTypename); + merge = typePolicy && typePolicy.merge; } return merge; } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index 13959a0c3c2..e1584ac2b79 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -7,7 +7,7 @@ import { Reference, } from '../../utilities'; import { FieldValueGetter } from './entityStore'; -import { KeyFieldsFunction, StorageType, FieldMergeFunction } from './policies'; +import { KeyFieldsFunction, FieldMergeFunction } from './policies'; import { Modifier, Modifiers, @@ -68,11 +68,6 @@ export interface NormalizedCache { getFieldValue: FieldValueGetter; toReference: ToReferenceFunction; canRead: CanReadFunction; - - getStorage( - idOrObj: string | StoreObject, - ...storeFieldNames: (string | number)[] - ): StorageType; } /** diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 091e47884db..22dbe9e55d9 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -24,11 +24,13 @@ import { import { NormalizedCache, ReadMergeModifyContext, MergeTree } from './types'; import { makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; +import { EntityStore } from './entityStore'; import { StoreReader } from './readFromStore'; import { InMemoryCache } from './inMemoryCache'; -import { EntityStore } from './entityStore'; import { Cache } from '../../core'; +type GetStorageParams = Parameters; + export interface WriteContext extends ReadMergeModifyContext { readonly written: { [dataId: string]: SelectionSetNode[]; @@ -343,7 +345,7 @@ export class StoreWriter { existing: StoreValue, incoming: T, context: WriteContext, - getStorageArgs?: Parameters, + getStorageArgs?: GetStorageParams, ): T { if (mergeTree.map.size && !isReference(incoming)) { const e: StoreObject | Reference | undefined = ( @@ -367,7 +369,7 @@ export class StoreWriter { // sequence of storeFieldName strings/numbers identifying the nested // field name path of each field value to be merged. if (e && !getStorageArgs) { - getStorageArgs = [isReference(e) ? e.__ref : e]; + getStorageArgs = [e]; } // It's possible that applying merge functions to this subtree will @@ -423,7 +425,9 @@ export class StoreWriter { incoming, mergeTree.info, context, - getStorageArgs && context.store.getStorage(...getStorageArgs), + getStorageArgs && ( + context.store as EntityStore + ).group.getStorage(...getStorageArgs), ); } From e4ee0be2d39224be422c93068825b7d51609e103 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 29 Apr 2021 15:25:47 -0400 Subject: [PATCH 2/8] Address a remaining TODO. --- src/cache/inmemory/entityStore.ts | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index ace6dfd498e..d055d253f20 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -127,7 +127,6 @@ export abstract class EntityStore implements NormalizedCache { if (merged !== existing) { delete this.refs[dataId]; if (this.group.caching) { - const isLayer = this instanceof Layer; const fieldsToDirty: Record = Object.create(null); // If we added a new StoreObject where there was previously none, dirty @@ -155,14 +154,6 @@ export abstract class EntityStore implements NormalizedCache { !this.policies.hasKeyArgs(merged.__typename, fieldName)) { fieldsToDirty[fieldName] = 1; } - - // If merged[storeFieldName] has become undefined, and this is the - // Root layer, actually delete the property from the merged object, - // which is guaranteed to have been created fresh in store.merge. - // TODO Move this to the end of the store.merge method. - if (merged[storeFieldName] === void 0 && !isLayer) { - delete merged[storeFieldName]; - } } }); @@ -220,6 +211,8 @@ export abstract class EntityStore implements NormalizedCache { } }; + const isLayer = this instanceof Layer; + // To detect field removals (in order to run drop functions), we can // restrict our attention to the incoming fields, since those are the // top-level fields that might have changed. @@ -231,6 +224,15 @@ export abstract class EntityStore implements NormalizedCache { if (iChild === void 0) { drops.push([existing, storeFieldName]); + + // If merged[storeFieldName] has become undefined, and this is the + // Root layer, actually delete the property from the merged object, + // which is guaranteed to have been created fresh in store.merge. + if (hasOwn.call(merged, storeFieldName) && + merged[storeFieldName] === void 0 && + !isLayer) { + delete merged[storeFieldName]; + } } }); From 621986dee3c9c984c8beb2d25826bedb9a897ef9 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 29 Apr 2021 16:23:13 -0400 Subject: [PATCH 3/8] Avoid recreating function object in inner loop of assignPaths. --- src/cache/inmemory/entityStore.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index d055d253f20..d88deca4416 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -638,13 +638,15 @@ class CacheGroup { if (Array.isArray(obj)) { obj.forEach(handleChild); } else if (storeValueIsStoreObject(obj) && !paths.has(obj)) { - Object.keys(obj).forEach(storeFieldName => { - const child = obj[storeFieldName]; - handleChild(child, storeFieldName); - }); + Object.keys(obj).forEach(handleObjectProperty, obj); } } + function handleObjectProperty(this: StoreObject, storeFieldName: string) { + const child = this[storeFieldName]; + handleChild(child, storeFieldName); + } + function handleChild(child: StoreValue, key: string | number) { if (storeValueIsStoreObject(child)) { if (paths.has(child)) return; From 742a3628ba9046cc70db8f8d74e29149bd45b7fe Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 29 Apr 2021 16:24:22 -0400 Subject: [PATCH 4/8] Use helper function more consistently in getStorage. --- src/cache/inmemory/entityStore.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index d88deca4416..efa0b4b255a 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -668,10 +668,10 @@ class CacheGroup { ...pathSuffix: (string | number)[] ) { const path: any[] = []; - const push = (key: string | number) => path.push(key); + const push = (key: StoreObject | string | number) => path.push(key); if (isReference(parentObjOrRef)) { - path.push(parentObjOrRef.__ref); + push(parentObjOrRef.__ref); } else { // See assignPaths to understand how this map is populated. const assignedPath = this.paths.get(parentObjOrRef); @@ -680,7 +680,7 @@ class CacheGroup { } else { // If we can't find a path for this object, use the object reference // itself as a key. - path.push(parentObjOrRef); + push(parentObjOrRef); } } From 561eccaeed80d0e40949ae6ea7e67a663c1506b6 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 29 Apr 2021 16:57:57 -0400 Subject: [PATCH 5/8] Compare existing data to merged data when scanning for fields to drop. --- src/cache/inmemory/entityStore.ts | 48 +++++++++++++++++++------------ 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index efa0b4b255a..eb12b1bf365 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -184,28 +184,35 @@ export abstract class EntityStore implements NormalizedCache { // drop functions configured for the fields that are about to removed // (before we finally set this.data[dataId] = merged, below). const drops: [StoreObject, string][] = []; + const empty: StoreObject | any[] = Object.create(null); - const walk = (exVal: StoreValue, inVal: StoreValue | undefined) => { - if (exVal === inVal) return; + const scanOldDataForFieldsToDrop = ( + oldVal: StoreValue, + newVal: StoreValue | undefined, + ) => { + if (oldVal === newVal) return; - if (Array.isArray(exVal)) { - (exVal as StoreValue[]).forEach((exChild, i) => { - const inChild = inVal && Array.isArray(inVal) ? inVal[i] : void 0; - walk(exChild, inChild); + if (Array.isArray(oldVal)) { + const newArray: any[] = + Array.isArray(newVal) ? newVal : empty as any[]; + + (oldVal as StoreValue[]).forEach((oldChild, i) => { + scanOldDataForFieldsToDrop(oldChild, newArray[i]); }); - } else if (storeValueIsStoreObject(exVal)) { - Object.keys(exVal).forEach(storeFieldName => { - const exChild = exVal[storeFieldName]; - const inChild = inVal && storeValueIsStoreObject(inVal) - ? inVal[storeFieldName] - : void 0; + } else if (storeValueIsStoreObject(oldVal)) { + const newObject: StoreObject = + storeValueIsStoreObject(newVal) ? newVal : empty as StoreObject; + + Object.keys(oldVal).forEach(storeFieldName => { + const oldChild = oldVal[storeFieldName]; + const newChild = newObject[storeFieldName]; // Visit children before running dropField for eChild. - walk(exChild, inChild); + scanOldDataForFieldsToDrop(oldChild, newChild); - if (inChild === void 0) { - drops.push([exVal, storeFieldName]); + if (newChild === void 0) { + drops.push([oldVal, storeFieldName]); } }); } @@ -217,12 +224,15 @@ export abstract class EntityStore implements NormalizedCache { // restrict our attention to the incoming fields, since those are the // top-level fields that might have changed. Object.keys(incoming).forEach(storeFieldName => { - const eChild = existing[storeFieldName]; - const iChild = incoming[storeFieldName]; + // Although we're using the keys from incoming, we want to compare + // existing data to merged data, since the merged data have a much + // better chance of being partly === to the existing data, whereas + // incoming tends to be all fresh objects. + const newFieldValue = merged[storeFieldName]; - walk(eChild, iChild); + scanOldDataForFieldsToDrop(existing[storeFieldName], newFieldValue); - if (iChild === void 0) { + if (newFieldValue === void 0) { drops.push([existing, storeFieldName]); // If merged[storeFieldName] has become undefined, and this is the From d246ad61cd699e818abb39908387c97f5df5c6b2 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 29 Apr 2021 17:19:44 -0400 Subject: [PATCH 6/8] Mention PR #8078 in CHANGELOG.md. --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9d74dff42da..ed6335b7b9f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,9 @@ - `InMemoryCache` now _guarantees_ that any two result objects returned by the cache (from `readQuery`, `readFragment`, etc.) will be referentially equal (`===`) if they are deeply equal. Previously, `===` equality was often achievable for results for the same query, on a best-effort basis. Now, equivalent result objects will be automatically shared among the result trees of completely different queries. This guarantee is important for taking full advantage of optimistic updates that correctly guess the final data, and for "pure" UI components that can skip re-rendering when their input data are unchanged.
[@benjamn](https://github.com/benjamn) in [#7439](https://github.com/apollographql/apollo-client/pull/7439) +- In addition to `read` and `merge` functions, `InMemoryCache` field policies may now configure a `drop` function, which will be called just before the field in question is removed from the cache, facilitating any final field cleanup that may be needed by `read` and `merge`.
+ [@benjamn](https://github.com/benjamn) in [#8078](https://github.com/apollographql/apollo-client/pull/8078) + - `InMemoryCache` supports a new method called `batch`, which is similar to `performTransaction` but takes named options rather than positional parameters. One of these named options is an `onDirty(watch, diff)` callback, which can be used to determine which watched queries were invalidated by the `batch` operation.
[@benjamn](https://github.com/benjamn) in [#7819](https://github.com/apollographql/apollo-client/pull/7819) From 8545da846f734d3d468152b11aa4482cac640aaa Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 29 Apr 2021 17:54:17 -0400 Subject: [PATCH 7/8] Make group.assignPaths processing lazier. --- src/cache/inmemory/entityStore.ts | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index eb12b1bf365..9eeff979944 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -639,8 +639,16 @@ class CacheGroup { // should not move around in a way that invalidates these paths. This path // information is useful in the getStorage method, below. private paths = new WeakMap(); + private pending = new Map(); public assignPaths(dataId: string, merged: StoreObject) { + // Since path assignment can be expensive, this method merely registers the + // pending assignment and returns immediately. The private assign method + // will be called with these arguments only if needed by getStorage. + this.pending.set(dataId, merged); + } + + private assign(dataId: string, merged: StoreObject) { const paths = this.paths; const path: (string | number)[] = [dataId]; @@ -682,15 +690,23 @@ class CacheGroup { if (isReference(parentObjOrRef)) { push(parentObjOrRef.__ref); - } else { + } else while (true) { // See assignPaths to understand how this map is populated. const assignedPath = this.paths.get(parentObjOrRef); if (assignedPath) { assignedPath.forEach(push); + break; + } + if (this.pending.size) { + // If we didn't find a path for this StoreObject, but we have some + // pending path assignments to do, do those assignments and try again. + this.pending.forEach((object, dataId) => this.assign(dataId, object)); + this.pending.clear(); } else { // If we can't find a path for this object, use the object reference // itself as a key. push(parentObjOrRef); + break; } } From bdadc992ce7e1daaf169bdf10e6df7cec8b4ab47 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 29 Apr 2021 18:20:06 -0400 Subject: [PATCH 8/8] Do less work when no drop functions have been configured. --- src/cache/inmemory/entityStore.ts | 24 +++++++++++++------- src/cache/inmemory/policies.ts | 37 ++++++++++++++++++------------- 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index 9eeff979944..558a41d7351 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -185,8 +185,11 @@ export abstract class EntityStore implements NormalizedCache { // (before we finally set this.data[dataId] = merged, below). const drops: [StoreObject, string][] = []; const empty: StoreObject | any[] = Object.create(null); + const isLayer = this instanceof Layer; + const haveAnyDropFunctions = this.policies.dropCount > 0; - const scanOldDataForFieldsToDrop = ( + // This function object is created only if we have any drop functions. + const scanOldDataForFieldsToDrop = haveAnyDropFunctions ? ( oldVal: StoreValue, newVal: StoreValue | undefined, ) => { @@ -197,7 +200,7 @@ export abstract class EntityStore implements NormalizedCache { Array.isArray(newVal) ? newVal : empty as any[]; (oldVal as StoreValue[]).forEach((oldChild, i) => { - scanOldDataForFieldsToDrop(oldChild, newArray[i]); + scanOldDataForFieldsToDrop!(oldChild, newArray[i]); }); } else if (storeValueIsStoreObject(oldVal)) { @@ -209,16 +212,14 @@ export abstract class EntityStore implements NormalizedCache { const newChild = newObject[storeFieldName]; // Visit children before running dropField for eChild. - scanOldDataForFieldsToDrop(oldChild, newChild); + scanOldDataForFieldsToDrop!(oldChild, newChild); if (newChild === void 0) { drops.push([oldVal, storeFieldName]); } }); } - }; - - const isLayer = this instanceof Layer; + } : void 0; // To detect field removals (in order to run drop functions), we can // restrict our attention to the incoming fields, since those are the @@ -230,7 +231,14 @@ export abstract class EntityStore implements NormalizedCache { // incoming tends to be all fresh objects. const newFieldValue = merged[storeFieldName]; - scanOldDataForFieldsToDrop(existing[storeFieldName], newFieldValue); + // No point scanning the existing data for fields with drop functions + // if we happen to know the Policies object has no drop functions. + if (haveAnyDropFunctions) { + scanOldDataForFieldsToDrop!( + existing[storeFieldName], + newFieldValue, + ); + } if (newFieldValue === void 0) { drops.push([existing, storeFieldName]); @@ -246,7 +254,7 @@ export abstract class EntityStore implements NormalizedCache { } }); - if (drops.length) { + if (haveAnyDropFunctions && drops.length) { const context: ReadMergeModifyContext = { store: this }; drops.forEach(([storeObject, storeFieldName]) => { diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index 4084765bcbf..3df85b6b94f 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -474,6 +474,7 @@ export class Policies { if (typeof drop === "function") { existing.drop = drop; + ++(this as { dropCount: number }).dropCount; } } @@ -779,28 +780,32 @@ export class Policies { return existing; } + public readonly dropCount = 0; + public dropField( typename: string | undefined, objectOrReference: StoreObject | Reference, storeFieldName: string, context: ReadMergeModifyContext, ) { - const fieldName = fieldNameFromStoreName(storeFieldName); - const policy = this.getFieldPolicy(typename, fieldName, false); - const drop = policy && policy.drop; - if (drop) { - drop( - context.store.getFieldValue(objectOrReference, storeFieldName), - // TODO Consolidate this code with similiar code in readField? - makeFieldFunctionOptions( - this, - objectOrReference, - { typename, fieldName }, - context, - (context.store as EntityStore).group - .getStorage(objectOrReference, storeFieldName), - ), - ); + if (this.dropCount) { + const fieldName = fieldNameFromStoreName(storeFieldName); + const policy = this.getFieldPolicy(typename, fieldName, false); + const drop = policy && policy.drop; + if (drop) { + drop( + context.store.getFieldValue(objectOrReference, storeFieldName), + // TODO Consolidate this code with similiar code in readField? + makeFieldFunctionOptions( + this, + objectOrReference, + { typename, fieldName }, + context, + (context.store as EntityStore).group + .getStorage(objectOrReference, storeFieldName), + ), + ); + } } }