From 78004fac5becf67b94bfb0fd7ffdac64df17c24c Mon Sep 17 00:00:00 2001 From: Infinite15 Date: Wed, 28 Apr 2021 11:39:23 -0400 Subject: [PATCH] Support InMemoryCache field policy `drop` function. As I described in this comment, though I decided to call the new function `drop` instead of `finalize`: https://github.com/apollographql/apollo-client/issues/8052#issuecomment-828547846 --- .../__tests__/__snapshots__/policies.ts.snap | 80 +++++ src/cache/inmemory/__tests__/policies.ts | 302 ++++++++++++++++++ src/cache/inmemory/entityStore.ts | 195 +++++++++-- src/cache/inmemory/policies.ts | 70 ++-- src/cache/inmemory/types.ts | 7 +- src/cache/inmemory/writeToStore.ts | 12 +- 6 files changed, 603 insertions(+), 63 deletions(-) diff --git a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap index 296372a0c..77a4add91 100644 --- a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap @@ -1211,6 +1211,86 @@ Object { } `; +exports[`type policies field policies custom field policy drop functions are called if merge function returns undefined 1`] = ` +Object { + "ROOT_QUERY": Object { + "__typename": "Query", + "todoList": Object { + "__ref": "ToDoList:{}", + }, + }, + "Task:{\\"taskID\\":1}": Object { + "__typename": "Task", + "taskID": 1, + "text": "task #1", + }, + "Task:{\\"taskID\\":2}": Object { + "__typename": "Task", + "taskID": 2, + "text": "task #2", + }, + "ToDoList:{}": Object { + "__typename": "ToDoList", + "tasks": Array [ + Object { + "__ref": "Task:{\\"taskID\\":1}", + }, + Object { + "__ref": "Task:{\\"taskID\\":2}", + }, + ], + }, +} +`; + +exports[`type policies field policies custom field policy drop functions are called if merge function returns undefined 2`] = ` +Object { + "ROOT_QUERY": Object { + "__typename": "Query", + "todoList": Object { + "__ref": "ToDoList:{}", + }, + }, + "Task:{\\"taskID\\":1}": Object { + "__typename": "Task", + "taskID": 1, + "text": "task #1", + }, + "Task:{\\"taskID\\":2}": Object { + "__typename": "Task", + "taskID": 2, + "text": "task #2", + }, + "Task:{\\"taskID\\":3}": Object { + "__typename": "Task", + "taskID": 3, + "text": "task #3", + }, + "Task:{\\"taskID\\":4}": Object { + "__typename": "Task", + "taskID": 4, + "text": "task #4", + }, + "ToDoList:{}": Object { + "__typename": "ToDoList", + "tasks": Array [ + Object { + "__ref": "Task:{\\"taskID\\":1}", + }, + Object { + "__ref": "Task:{\\"taskID\\":2}", + }, + Object { + "__ref": "Task:{\\"taskID\\":3}", + }, + Object { + "__ref": "Task:{\\"taskID\\":4}", + }, + ], + }, +} +`; + exports[`type policies field policies read, merge, and modify functions can access options.storage 1`] = ` Object { "ROOT_QUERY": Object { diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts index 0c7267553..665f1ec33 100644 --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -1705,6 +1705,308 @@ describe("type policies", function () { expect(cache.extract()).toMatchSnapshot(); }); + describe("custom field policy drop functions", function () { + const makeCache = (resolve: () => void) => new InMemoryCache({ + typePolicies: { + Parent: { + keyFields: false, + fields: { + deleteMe: { + read(existing, { storage }) { + expect(existing).toBe("merged value"); + expect(storage.cached).toBe(existing); + return "read value"; + }, + merge(existing, incoming, { storage }) { + expect(existing).toBeUndefined(); + expect(incoming).toBe("initial value"); + return storage.cached = "merged value"; + }, + drop(existing, { storage }) { + expect(existing).toBe("merged value"); + expect(storage.cached).toBe(existing); + delete storage.cached; + // Finish the test (success). + resolve(); + }, + }, + }, + }, + }, + }); + + const query = gql` + query { + parent { + deleteMe @client + } + } + `; + + function testWriteAndRead(cache: InMemoryCache) { + cache.writeQuery({ + query, + data: { + parent: { + __typename: "Parent", + deleteMe: "initial value", + }, + }, + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + parent: { + __typename: "Parent", + deleteMe: "merged value", + }, + }, + }); + + expect(cache.readQuery({ query })).toEqual({ + parent: { + __typename: "Parent", + deleteMe: "read value", + }, + }); + } + + itAsync("are called when a parent object is evicted from the cache", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const evicted = cache.evict({ + // Note that we're removing Query.parent, not directly removing + // Parent.deleteMe, but we still expect the Parent.deleteMe drop + // function to be called. + fieldName: "parent", + }); + expect(evicted).toBe(true); + }); + + itAsync("are called when cache.modify causes the parent object to lose fields", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const modified = cache.modify({ + fields: { + parent(value: StoreObject) { + const { deleteMe, ...rest } = value; + expect(rest).toEqual({ + __typename: "Parent", + }); + return rest; + }, + }, + }); + expect(modified).toBe(true); + }); + + itAsync("are called even if cache is cleared/restored", resolve => { + const cache = makeCache(resolve); + testWriteAndRead(cache); + + const snapshot = cache.extract(); + cache.reset(); + expect(cache.extract()).toEqual({}); + cache.restore(snapshot); + expect(cache.extract()).toEqual(snapshot); + + cache.writeQuery({ + query, + overwrite: true, + data: { + parent: { + __typename: "Parent", + deleteMe: void 0, + }, + }, + }); + }); + + itAsync("are called if merge function returns undefined", resolve => { + const cache = new InMemoryCache({ + typePolicies: { + ToDoList: { + keyFields: [], + fields: { + tasks: { + keyArgs: false, + + merge(existing: number[] | undefined, incoming: number[], { args }) { + if (args && args.deleteOnMerge) return; + return existing ? [ + ...existing, + ...incoming, + ] : incoming; + }, + + drop(existing) { + expect(existing).toEqual([ + { __ref: 'Task:{"taskID":1}' }, + { __ref: 'Task:{"taskID":2}' }, + { __ref: 'Task:{"taskID":3}' }, + { __ref: 'Task:{"taskID":4}' }, + ]); + // Finish the test (success). + resolve(); + }, + }, + }, + }, + + Task: { + keyFields: ["taskID"], + }, + }, + }); + + const query = gql` + query { + todoList { + tasks { + taskID + text + } + } + } + `; + + const deleteQuery = gql` + query { + todoList { + tasks(deleteOnMerge: true) { + taskID + text + } + } + } + `; + + const deleteData = { + todoList: { + __typename: "ToDoList", + tasks: [], + }, + }; + + // This write will cause the merge function to return undefined, but + // since the field is already undefined, the undefined return from the + // merge function should not trigger the drop function. + cache.writeQuery({ + query: deleteQuery, + data: deleteData, + }); + + cache.writeQuery({ + query, + data: { + todoList: { + __typename: "ToDoList", + tasks: [ + { __typename: "Task", taskID: 1, text: "task #1" }, + { __typename: "Task", taskID: 2, text: "task #2" }, + ], + }, + }, + }); + + expect(cache.extract()).toMatchSnapshot(); + + cache.writeQuery({ + query, + data: { + todoList: { + __typename: "ToDoList", + tasks: [ + { __typename: "Task", taskID: 3, text: "task #3" }, + { __typename: "Task", taskID: 4, text: "task #4" }, + ], + }, + }, + }); + + expect(cache.extract()).toMatchSnapshot(); + + // Since the ToDoList.tasks field has data now, this deletion should + // trigger the drop function, unlike the last time we used deleteQuery. + cache.writeQuery({ + query: deleteQuery, + data: deleteData, + }); + }); + + itAsync("are called for fields within garbage collected objects", (resolve, reject) => { + const cache = new InMemoryCache({ + typePolicies: { + Garbage: { + keyFields: ["gid"], + fields: { + isToxic: { + drop(isToxic: boolean, { readField }) { + const gid = readField("gid")!; + if (expectedToxicities.has(gid)) { + expect(expectedToxicities.get(gid)).toBe(isToxic); + if (expectedToxicities.delete(gid) && + expectedToxicities.size === 0) { + resolve(); + } + } else { + reject(`unexpectedly dropped garbage ${gid}`); + } + }, + }, + }, + }, + }, + }); + + const expectedToxicities = new Map(); + expectedToxicities.set(234, true); + expectedToxicities.set(456, false); + + const query = gql` + query { + garbages { + gid + isToxic + } + } + `; + + cache.writeQuery({ + query, + data: { + garbages: [ + { __typename: "Garbage", gid: 123, isToxic: false }, + { __typename: "Garbage", gid: 234, isToxic: true }, + { __typename: "Garbage", gid: 345, isToxic: true }, + { __typename: "Garbage", gid: 456, isToxic: false }, + ], + }, + }); + + expect(cache.gc()).toEqual([]); + + cache.writeQuery({ + query, + overwrite: true, + data: { + garbages: [ + { __typename: "Garbage", gid: 123, isToxic: false }, + { __typename: "Garbage", gid: 345, isToxic: true }, + ], + }, + }); + + expect(cache.gc().sort()).toEqual([ + 'Garbage:{"gid":234}', + 'Garbage:{"gid":456}', + ]); + }); + }); + it("merge functions can deduplicate items using readField", function () { const cache = new InMemoryCache({ typePolicies: { diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index 894069fe7..ace6dfd49 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -13,8 +13,8 @@ import { maybeDeepFreeze, canUseWeakMap, } from '../../utilities'; -import { NormalizedCache, NormalizedCacheObject } from './types'; -import { hasOwn, fieldNameFromStoreName } from './helpers'; +import { NormalizedCache, NormalizedCacheObject, ReadMergeModifyContext } from './types'; +import { hasOwn, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; import { Policies, StorageType } from './policies'; import { Cache } from '../core/types/Cache'; import { @@ -99,7 +99,7 @@ export abstract class EntityStore implements NormalizedCache { older: string | StoreObject, newer: StoreObject | string, ): void { - let dataId: string | undefined; + let dataId: string; const existing: StoreObject | undefined = typeof older === "string" @@ -116,6 +116,7 @@ export abstract class EntityStore implements NormalizedCache { if (!incoming) return; invariant( + // @ts-ignore typeof dataId === "string", "store.merge expects a string ID", ); @@ -123,13 +124,10 @@ export abstract class EntityStore implements NormalizedCache { const merged: StoreObject = new DeepMerger(storeObjectReconciler).merge(existing, incoming); - // Even if merged === existing, existing may have come from a lower - // layer, so we always need to set this.data[dataId] on this level. - this.data[dataId] = merged; - if (merged !== existing) { delete this.refs[dataId]; if (this.group.caching) { + const isLayer = this instanceof Layer; const fieldsToDirty: Record = Object.create(null); // If we added a new StoreObject where there was previously none, dirty @@ -160,8 +158,9 @@ export abstract class EntityStore implements NormalizedCache { // If merged[storeFieldName] has become undefined, and this is the // Root layer, actually delete the property from the merged object, - // which is guaranteed to have been created fresh in this method. - if (merged[storeFieldName] === void 0 && !(this instanceof Layer)) { + // which is guaranteed to have been created fresh in store.merge. + // TODO Move this to the end of the store.merge method. + if (merged[storeFieldName] === void 0 && !isLayer) { delete merged[storeFieldName]; } } @@ -178,9 +177,81 @@ export abstract class EntityStore implements NormalizedCache { } Object.keys(fieldsToDirty).forEach( - fieldName => this.group.dirty(dataId as string, fieldName)); + fieldName => this.group.dirty(dataId, fieldName)); + } + + // Make sure we have a (string | number)[] path for every object in the + // merged object tree, including non-normalized non-Reference objects that + // are embedded/nested within normalized parent objects. The path of such + // objects will be an array starting with the string ID of the closest + // enclosing entity object, followed by the string and number properties + // that lead from the entity to the nested object within it. + this.group.assignPaths(dataId, merged); + + if (existing) { + // Collect objects and field names removed by this merge, so we can run + // drop functions configured for the fields that are about to removed + // (before we finally set this.data[dataId] = merged, below). + const drops: [StoreObject, string][] = []; + + const walk = (exVal: StoreValue, inVal: StoreValue | undefined) => { + if (exVal === inVal) return; + + if (Array.isArray(exVal)) { + (exVal as StoreValue[]).forEach((exChild, i) => { + const inChild = inVal && Array.isArray(inVal) ? inVal[i] : void 0; + walk(exChild, inChild); + }); + + } else if (storeValueIsStoreObject(exVal)) { + Object.keys(exVal).forEach(storeFieldName => { + const exChild = exVal[storeFieldName]; + const inChild = inVal && storeValueIsStoreObject(inVal) + ? inVal[storeFieldName] + : void 0; + + // Visit children before running dropField for eChild. + walk(exChild, inChild); + + if (inChild === void 0) { + drops.push([exVal, storeFieldName]); + } + }); + } + }; + + // To detect field removals (in order to run drop functions), we can + // restrict our attention to the incoming fields, since those are the + // top-level fields that might have changed. + Object.keys(incoming).forEach(storeFieldName => { + const eChild = existing[storeFieldName]; + const iChild = incoming[storeFieldName]; + + walk(eChild, iChild); + + if (iChild === void 0) { + drops.push([existing, storeFieldName]); + } + }); + + if (drops.length) { + const context: ReadMergeModifyContext = { store: this }; + + drops.forEach(([storeObject, storeFieldName]) => { + this.policies.dropField( + storeObject.__typename, + storeObject, + storeFieldName, + context, + ); + }); + } } } + + // Even if merged === existing, existing may have come from a lower + // layer, so we always need to set this.data[dataId] on this level. + this.data[dataId] = merged; } public modify( @@ -225,7 +296,10 @@ export abstract class EntityStore implements NormalizedCache { ...sharedDetails, fieldName, storeFieldName, - storage: this.getStorage(dataId, storeFieldName), + storage: this.group.getStorage( + makeReference(dataId), + storeFieldName, + ), }); if (newValue === INVALIDATE) { this.group.dirty(dataId, storeFieldName); @@ -352,11 +426,6 @@ export abstract class EntityStore implements NormalizedCache { return this; } - public abstract getStorage( - idOrObj: string | StoreObject, - ...storeFieldNames: (string | number)[] - ): StorageType; - // Maps root entity IDs to the number of times they have been retained, minus // the number of times they have been released. Retained entities keep other // entities they reference (even indirectly) from being garbage collected. @@ -449,7 +518,8 @@ export abstract class EntityStore implements NormalizedCache { // Used to compute cache keys specific to this.group. public makeCacheKey(...args: any[]): object; public makeCacheKey() { - return this.group.keyMaker.lookupArray(arguments); + const found = this.group.keyMaker.lookupArray(arguments); + return found.cacheKey || (found.cacheKey = Object.create(null)); } // Bound function that can be passed around to provide easy access to fields @@ -549,9 +619,80 @@ class CacheGroup { } } + // This WeakMap maps every non-normalized object reference contained by the + // store to the path of that object within the enclosing entity object. This + // information is collected by the assignPaths method after every store.merge, + // so store.data should never contain any un-pathed objects. As a reminder, + // these object references are handled immutably from here on, so the objects + // should not move around in a way that invalidates these paths. This path + // information is useful in the getStorage method, below. + private paths = new WeakMap(); + + public assignPaths(dataId: string, merged: StoreObject) { + const paths = this.paths; + const path: (string | number)[] = [dataId]; + + function assign(this: void, obj: StoreValue) { + if (Array.isArray(obj)) { + obj.forEach(handleChild); + } else if (storeValueIsStoreObject(obj) && !paths.has(obj)) { + Object.keys(obj).forEach(storeFieldName => { + const child = obj[storeFieldName]; + handleChild(child, storeFieldName); + }); + } + } + + function handleChild(child: StoreValue, key: string | number) { + if (storeValueIsStoreObject(child)) { + if (paths.has(child)) return; + paths.set(child, path.concat(key)); + } + try { + path.push(key); + assign(child); + } finally { + invariant(path.pop() === key); + } + } + + assign(merged); + } + + public getStorage( + parentObjOrRef: StoreObject | Reference, + ...pathSuffix: (string | number)[] + ) { + const path: any[] = []; + const push = (key: string | number) => path.push(key); + + if (isReference(parentObjOrRef)) { + path.push(parentObjOrRef.__ref); + } else { + // See assignPaths to understand how this map is populated. + const assignedPath = this.paths.get(parentObjOrRef); + if (assignedPath) { + assignedPath.forEach(push); + } else { + // If we can't find a path for this object, use the object reference + // itself as a key. + path.push(parentObjOrRef); + } + } + + // Append the provided suffix to the path array. + pathSuffix.forEach(push); + + const found = this.keyMaker.lookupArray(path); + return found.storage || (found.storage = Object.create(null)); + } + // Used by the EntityStore#makeCacheKey method to compute cache keys // specific to this CacheGroup. - public readonly keyMaker = new Trie(canUseWeakMap); + public readonly keyMaker = new Trie<{ + cacheKey?: object; + storage?: StorageType; + }>(canUseWeakMap); } function makeDepKey(dataId: string, storeFieldName: string) { @@ -593,11 +734,6 @@ export namespace EntityStore { // Never remove the root layer. return this; } - - public readonly storageTrie = new Trie(canUseWeakMap); - public getStorage(): StorageType { - return this.storageTrie.lookupArray(arguments); - } } } @@ -662,12 +798,6 @@ class Layer extends EntityStore { ...super.findChildRefIds(dataId), } : fromParent; } - - public getStorage(): StorageType { - let p: EntityStore = this.parent; - while ((p as Layer).parent) p = (p as Layer).parent; - return p.getStorage.apply(p, arguments); - } } // Represents a Layer permanently installed just above the Root, which allows @@ -702,10 +832,11 @@ class Stump extends Layer { function storeObjectReconciler( existingObject: StoreObject, incomingObject: StoreObject, - property: string, + storeFieldName: string, ): StoreValue { - const existingValue = existingObject[property]; - const incomingValue = incomingObject[property]; + const existingValue = existingObject[storeFieldName]; + const incomingValue = incomingObject[storeFieldName]; + // Wherever there is a key collision, prefer the incoming value, unless // it is deeply equal to the existing value. It's worth checking deep // equality here (even though blindly returning incoming would be diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index f0a928295..4084765bc 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -47,6 +47,7 @@ import { CanReadFunction, } from '../core/types/common'; import { WriteContext } from './writeToStore'; +import { EntityStore } from './entityStore'; export type TypePolicies = { [__typename: string]: TypePolicy; @@ -131,6 +132,7 @@ export type FieldPolicy< keyArgs?: KeySpecifier | KeyArgsFunction | false; read?: FieldReadFunction; merge?: FieldMergeFunction | boolean; + drop?: FieldDropFunction; }; export type StorageType = Record; @@ -221,6 +223,11 @@ export type FieldMergeFunction = ( options: FieldFunctionOptions, ) => SafeReadonly; +export type FieldDropFunction = ( + existing: SafeReadonly | undefined, + options: FieldFunctionOptions, +) => void; + export const defaultDataIdFromObject = ( { __typename, id, _id }: Readonly, context?: KeyFieldsContext, @@ -256,6 +263,9 @@ export type PossibleTypesMap = { [supertype: string]: string[]; }; +type InternalTypePolicy = Policies["typePolicies"][string]; +type InternalFieldPolicy = InternalTypePolicy["fields"][string]; + export class Policies { private typePolicies: { [__typename: string]: { @@ -266,6 +276,7 @@ export class Policies { keyFn?: KeyArgsFunction; read?: FieldReadFunction; merge?: FieldMergeFunction; + drop?: FieldDropFunction; }; }; }; @@ -441,7 +452,7 @@ export class Policies { if (typeof incoming === "function") { existing.read = incoming; } else { - const { keyArgs, read, merge } = incoming; + const { keyArgs, read, merge, drop } = incoming; existing.keyFn = // Pass false to disable argument-based differentiation of @@ -460,6 +471,10 @@ export class Policies { } setMerge(existing, merge); + + if (typeof drop === "function") { + existing.drop = drop; + } } if (existing.read && existing.merge) { @@ -511,7 +526,7 @@ export class Policies { }); } - private getTypePolicy(typename: string): Policies["typePolicies"][string] { + private getTypePolicy(typename: string): InternalTypePolicy { if (!hasOwn.call(this.typePolicies, typename)) { const policy: Policies["typePolicies"][string] = this.typePolicies[typename] = Object.create(null); @@ -560,11 +575,7 @@ export class Policies { typename: string | undefined, fieldName: string, createIfMissing: boolean, - ): { - keyFn?: KeyArgsFunction; - read?: FieldReadFunction; - merge?: FieldMergeFunction; - } | undefined { + ): InternalFieldPolicy | undefined { if (typename) { const fieldPolicies = this.getTypePolicy(typename).fields; return fieldPolicies[fieldName] || ( @@ -753,12 +764,8 @@ export class Policies { objectOrReference, options, context, - context.store.getStorage( - isReference(objectOrReference) - ? objectOrReference.__ref - : objectOrReference, - storeFieldName, - ), + (context.store as EntityStore).group + .getStorage(objectOrReference, storeFieldName), ); // Call read(existing, readOptions) with cacheSlot holding this.cache. @@ -772,20 +779,41 @@ export class Policies { return existing; } + public dropField( + typename: string | undefined, + objectOrReference: StoreObject | Reference, + storeFieldName: string, + context: ReadMergeModifyContext, + ) { + const fieldName = fieldNameFromStoreName(storeFieldName); + const policy = this.getFieldPolicy(typename, fieldName, false); + const drop = policy && policy.drop; + if (drop) { + drop( + context.store.getFieldValue(objectOrReference, storeFieldName), + // TODO Consolidate this code with similiar code in readField? + makeFieldFunctionOptions( + this, + objectOrReference, + { typename, fieldName }, + context, + (context.store as EntityStore).group + .getStorage(objectOrReference, storeFieldName), + ), + ); + } + } + public getMergeFunction( parentTypename: string | undefined, fieldName: string, childTypename: string | undefined, ): FieldMergeFunction | undefined { - let policy: - | Policies["typePolicies"][string] - | Policies["typePolicies"][string]["fields"][string] - | undefined = - this.getFieldPolicy(parentTypename, fieldName, false); - let merge = policy && policy.merge; + const fieldPolicy = this.getFieldPolicy(parentTypename, fieldName, false); + let merge = fieldPolicy && fieldPolicy.merge; if (!merge && childTypename) { - policy = this.getTypePolicy(childTypename); - merge = policy && policy.merge; + const typePolicy = this.getTypePolicy(childTypename); + merge = typePolicy && typePolicy.merge; } return merge; } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index 13959a0c3..e1584ac2b 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -7,7 +7,7 @@ import { Reference, } from '../../utilities'; import { FieldValueGetter } from './entityStore'; -import { KeyFieldsFunction, StorageType, FieldMergeFunction } from './policies'; +import { KeyFieldsFunction, FieldMergeFunction } from './policies'; import { Modifier, Modifiers, @@ -68,11 +68,6 @@ export interface NormalizedCache { getFieldValue: FieldValueGetter; toReference: ToReferenceFunction; canRead: CanReadFunction; - - getStorage( - idOrObj: string | StoreObject, - ...storeFieldNames: (string | number)[] - ): StorageType; } /** diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 091e47884..22dbe9e55 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -24,11 +24,13 @@ import { import { NormalizedCache, ReadMergeModifyContext, MergeTree } from './types'; import { makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; +import { EntityStore } from './entityStore'; import { StoreReader } from './readFromStore'; import { InMemoryCache } from './inMemoryCache'; -import { EntityStore } from './entityStore'; import { Cache } from '../../core'; +type GetStorageParams = Parameters; + export interface WriteContext extends ReadMergeModifyContext { readonly written: { [dataId: string]: SelectionSetNode[]; @@ -343,7 +345,7 @@ export class StoreWriter { existing: StoreValue, incoming: T, context: WriteContext, - getStorageArgs?: Parameters, + getStorageArgs?: GetStorageParams, ): T { if (mergeTree.map.size && !isReference(incoming)) { const e: StoreObject | Reference | undefined = ( @@ -367,7 +369,7 @@ export class StoreWriter { // sequence of storeFieldName strings/numbers identifying the nested // field name path of each field value to be merged. if (e && !getStorageArgs) { - getStorageArgs = [isReference(e) ? e.__ref : e]; + getStorageArgs = [e]; } // It's possible that applying merge functions to this subtree will @@ -423,7 +425,9 @@ export class StoreWriter { incoming, mergeTree.info, context, - getStorageArgs && context.store.getStorage(...getStorageArgs), + getStorageArgs && ( + context.store as EntityStore + ).group.getStorage(...getStorageArgs), ); }