From 45bfd4dd31e6ce89862dafe2d0dd832338411f82 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Fri, 18 Sep 2020 19:28:03 -0400 Subject: [PATCH 01/10] Rename mergedFields to avoid confusion with merge functions. --- src/cache/inmemory/writeToStore.ts | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 1d05ba8e3f2..93c181620cd 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -164,14 +164,14 @@ export class StoreWriter { } } - // This mergedFields variable will be repeatedly updated using context.merge - // to accumulate all fields that need to be written into the store. - let mergedFields: StoreObject = Object.create(null); + // This variable will be repeatedly updated using context.merge to + // accumulate all fields that need to be written into the store. + let incomingFields: StoreObject = Object.create(null); // Write any key fields that were used during identification, even if // they were not mentioned in the original query. if (keyObject) { - mergedFields = context.merge(mergedFields, keyObject); + incomingFields = context.merge(incomingFields, keyObject); } // If typename was not passed in, infer it. Note that typename is @@ -183,7 +183,7 @@ export class StoreWriter { (dataId && context.store.get(dataId, "__typename") as string); if ("string" === typeof typename) { - mergedFields.__typename = typename; + incomingFields.__typename = typename; } const workSet = new Set(selectionSet.selections); @@ -217,12 +217,12 @@ export class StoreWriter { __value: incomingValue, } as FieldValueToBeMerged; - // Communicate to the caller that mergedFields contains at + // Communicate to the caller that newFields contains at // least one FieldValueToBeMerged. out.shouldApplyMerges = true; } - mergedFields = context.merge(mergedFields, { + incomingFields = context.merge(incomingFields, { [storeFieldName]: incomingValue, }); @@ -274,18 +274,18 @@ export class StoreWriter { const entityRef = makeReference(dataId); if (out.shouldApplyMerges) { - mergedFields = policies.applyMerges(entityRef, mergedFields, context); + incomingFields = policies.applyMerges(entityRef, incomingFields, context); } if (process.env.NODE_ENV !== "production") { - Object.keys(mergedFields).forEach(storeFieldName => { + Object.keys(incomingFields).forEach(storeFieldName => { const fieldName = fieldNameFromStoreName(storeFieldName); // If a merge function was defined for this field, trust that it // did the right thing about (not) clobbering data. if (!policies.hasMergeFunction(typename, fieldName)) { warnAboutDataLoss( entityRef, - mergedFields, + incomingFields, storeFieldName, context.store, ); @@ -293,12 +293,12 @@ export class StoreWriter { }); } - context.store.merge(dataId, mergedFields); + context.store.merge(dataId, incomingFields); return entityRef; } - return mergedFields; + return incomingFields; } private processFieldValue( From 8172159e7033d195b5dd7747ecb54c8b437a0df9 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Fri, 18 Sep 2020 20:03:44 -0400 Subject: [PATCH 02/10] Efficiently track locations of incoming data to be merged. Instead of recursively searching for FieldValueToBeMerged wrapper objects anywhere in the incoming data, processSelectionSet and processFieldValue can build a sparse tree specifying just the paths of fields that need to be merged, and then applyMerges can use that tree to traverse only the parts of the data where merge functions need to be called. These changes effectively revert #5880, since the idea of giving merge functions a chance to transform their child data before calling nested merge functions no longer makes as much sense. Instead, applyMerges will be recursively called on the child data before parent merge functions run, the way it used to be (before #5880). --- src/cache/inmemory/entityStore.ts | 15 ++- src/cache/inmemory/helpers.ts | 57 +--------- src/cache/inmemory/policies.ts | 102 +++--------------- src/cache/inmemory/types.ts | 14 ++- src/cache/inmemory/writeToStore.ts | 167 ++++++++++++++++++++++++----- 5 files changed, 181 insertions(+), 174 deletions(-) diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index c2c135f08da..9bdc401c419 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -258,7 +258,7 @@ export abstract class EntityStore implements NormalizedCache { public abstract getStorage( idOrObj: string | StoreObject, - storeFieldName: string, + storeFieldName: string | number, ): StorageType; // Maps root entity IDs to the number of times they have been retained, minus @@ -353,7 +353,7 @@ export abstract class EntityStore implements NormalizedCache { // Bound function that can be passed around to provide easy access to fields // of Reference objects as well as ordinary objects. public getFieldValue = ( - objectOrReference: StoreObject | Reference, + objectOrReference: StoreObject | Reference | undefined, storeFieldName: string, ) => maybeDeepFreeze( isReference(objectOrReference) @@ -484,9 +484,14 @@ export namespace EntityStore { public readonly storageTrie = new KeyTrie(canUseWeakMap); public getStorage( idOrObj: string | StoreObject, - storeFieldName: string, + storeFieldName: string | number, ): StorageType { - return this.storageTrie.lookup(idOrObj, storeFieldName); + return this.storageTrie.lookup( + idOrObj, + // Normalize numbers to strings, so we don't accidentally end up + // with multiple storage objects for the same field. + String(storeFieldName), + ); } } } @@ -555,7 +560,7 @@ class Layer extends EntityStore { public getStorage( idOrObj: string | StoreObject, - storeFieldName: string, + storeFieldName: string | number, ): StorageType { return this.parent.getStorage(idOrObj, storeFieldName); } diff --git a/src/cache/inmemory/helpers.ts b/src/cache/inmemory/helpers.ts index 47d9ee28e5c..dc12bc65838 100644 --- a/src/cache/inmemory/helpers.ts +++ b/src/cache/inmemory/helpers.ts @@ -1,4 +1,4 @@ -import { FieldNode, SelectionSetNode } from 'graphql'; +import { SelectionSetNode } from 'graphql'; import { NormalizedCache } from './types'; import { @@ -8,7 +8,6 @@ import { StoreObject, isField, DeepMerger, - ReconcilerFunction, resultKeyNameFromField, shouldInclude, } from '../../utilities'; @@ -57,17 +56,6 @@ export function selectionSetMatchesResult( return false; } -// Invoking merge functions needs to happen after processSelectionSet has -// finished, but requires information that is more readily available -// during processSelectionSet, so processSelectionSet embeds special -// objects of the following shape within its result tree, which then must -// be removed by calling Policies#applyMerges. -export interface FieldValueToBeMerged { - __field: FieldNode; - __typename: string; - __value: StoreValue; -} - export function storeValueIsStoreObject( value: StoreValue, ): value is StoreObject { @@ -77,47 +65,6 @@ export function storeValueIsStoreObject( !Array.isArray(value); } -export function isFieldValueToBeMerged( - value: any, -): value is FieldValueToBeMerged { - const field = value && value.__field; - return field && isField(field); -} - export function makeProcessedFieldsMerger() { - // A DeepMerger that merges arrays and objects structurally, but otherwise - // prefers incoming scalar values over existing values. Provides special - // treatment for FieldValueToBeMerged objects. Used to accumulate fields - // when processing a single selection set. - return new DeepMerger(reconcileProcessedFields); -} - -const reconcileProcessedFields: ReconcilerFunction<[]> = function ( - existingObject, - incomingObject, - property, -) { - const existing = existingObject[property]; - const incoming = incomingObject[property]; - - if (isFieldValueToBeMerged(existing)) { - existing.__value = this.merge( - existing.__value, - isFieldValueToBeMerged(incoming) - // TODO Check compatibility of __field and __typename properties? - ? incoming.__value - : incoming, - ); - return existing; - } - - if (isFieldValueToBeMerged(incoming)) { - incoming.__value = this.merge( - existing, - incoming.__value, - ); - return incoming; - } - - return this.merge(existing, incoming); + return new DeepMerger; } diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index 8266e9971ae..0c8f359da0d 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -23,12 +23,10 @@ import { canUseWeakMap, compact, } from '../../utilities'; -import { IdGetter, ReadMergeModifyContext } from "./types"; +import { IdGetter, ReadMergeModifyContext, MergeInfo } from "./types"; import { hasOwn, fieldNameFromStoreName, - FieldValueToBeMerged, - isFieldValueToBeMerged, storeValueIsStoreObject, selectionSetMatchesResult, TypeOrFieldNameRegExp, @@ -715,21 +713,17 @@ export class Policies { return !!(policy && policy.merge); } - public applyMerges( - existing: T | Reference, - incoming: T | FieldValueToBeMerged, + public runMergeFunction( + existing: StoreValue, + incoming: StoreValue, + { field, typename }: MergeInfo, context: ReadMergeModifyContext, - storageKeys?: [string | StoreObject, string], - ): T { - if (isFieldValueToBeMerged(incoming)) { - const field = incoming.__field; - const fieldName = field.name.value; - // This policy and its merge function are guaranteed to exist - // because the incoming value is a FieldValueToBeMerged object. - const { merge } = this.getFieldPolicy( - incoming.__typename, fieldName, false)!; - - incoming = merge!(existing, incoming.__value, makeFieldFunctionOptions( + storage?: StorageType, + ) { + const fieldName = field.name.value; + const { merge } = this.getFieldPolicy(typename, fieldName, false)!; + if (merge) { + return merge(existing, incoming, makeFieldFunctionOptions( this, // Unlike options.readField for read functions, we do not fall // back to the current object if no foreignObjOrRef is provided, @@ -743,69 +737,13 @@ export class Policies { // However, readField(name, ref) is useful for merge functions // that need to deduplicate child objects and references. void 0, - { typename: incoming.__typename, + { typename, fieldName, field, variables: context.variables }, context, - storageKeys - ? context.store.getStorage(...storageKeys) - : Object.create(null), - )) as T; - } - - if (Array.isArray(incoming)) { - return incoming!.map(item => this.applyMerges( - // Items in the same position in different arrays are not - // necessarily related to each other, so there is no basis for - // merging them. Passing void here means any FieldValueToBeMerged - // objects within item will be handled as if there was no existing - // data. Also, we do not pass storageKeys because the array itself - // is never an entity with a __typename, so its indices can never - // have custom read or merge functions. - void 0, - item, - context, - )) as T; - } - - if (storeValueIsStoreObject(incoming)) { - const e = existing as StoreObject | Reference; - const i = incoming as StoreObject; - - // If the existing object is a { __ref } object, e.__ref provides a - // stable key for looking up the storage object associated with - // e.__ref and storeFieldName. Otherwise, storage is enabled only if - // existing is actually a non-null object. It's less common for a - // merge function to use options.storage, but it's conceivable that a - // pair of read and merge functions might want to cooperate in - // managing their shared options.storage object. - const firstStorageKey = isReference(e) - ? e.__ref - : typeof e === "object" && e; - - let newFields: StoreObject | undefined; - - Object.keys(i).forEach(storeFieldName => { - const incomingValue = i[storeFieldName]; - const appliedValue = this.applyMerges( - context.store.getFieldValue(e, storeFieldName), - incomingValue, - context, - // Avoid enabling options.storage when firstStorageKey is falsy, - // which implies no options.storage object has ever been created - // for a read/merge function for this field. - firstStorageKey ? [firstStorageKey, storeFieldName] : void 0, - ); - if (appliedValue !== incomingValue) { - newFields = newFields || Object.create(null); - newFields![storeFieldName] = appliedValue; - } - }); - - if (newFields) { - return { ...i, ...newFields } as typeof incoming; - } + storage || Object.create(null), + )); } return incoming; @@ -872,21 +810,15 @@ function makeFieldFunctionOptions( const iType = getFieldValue(incoming, "__typename"); const typesDiffer = eType && iType && eType !== iType; - const applied = policies.applyMerges( - typesDiffer ? void 0 : existing, - incoming, - context, - ); - if ( typesDiffer || !storeValueIsStoreObject(existing) || - !storeValueIsStoreObject(applied) + !storeValueIsStoreObject(incoming) ) { - return applied; + return incoming; } - return { ...existing, ...applied }; + return { ...existing, ...incoming }; } return incoming; diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index bed9b7adb60..3004ea8cc3a 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -1,4 +1,4 @@ -import { DocumentNode } from 'graphql'; +import { DocumentNode, FieldNode } from 'graphql'; import { Transaction } from '../core/cache'; import { @@ -65,7 +65,7 @@ export interface NormalizedCache { getStorage( idOrObj: string | StoreObject, - storeFieldName: string, + storeFieldName: string | number, ): StorageType; } @@ -101,6 +101,16 @@ export type ApolloReducerConfig = { addTypename?: boolean; }; +export interface MergeInfo { + field: FieldNode; + typename: string | undefined; +}; + +export interface MergeTree { + info?: MergeInfo; + map: Map; +}; + export interface ReadMergeModifyContext { store: NormalizedCache; variables?: Record; diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 93c181620cd..bba6c0969fb 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -22,8 +22,8 @@ import { cloneDeep, } from '../../utilities'; -import { NormalizedCache, ReadMergeModifyContext } from './types'; -import { makeProcessedFieldsMerger, FieldValueToBeMerged, fieldNameFromStoreName } from './helpers'; +import { NormalizedCache, ReadMergeModifyContext, MergeTree } from './types'; +import { makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; import { StoreReader } from './readFromStore'; import { InMemoryCache } from './inMemoryCache'; @@ -41,9 +41,7 @@ interface ProcessSelectionSetOptions { result: Record; selectionSet: SelectionSetNode; context: WriteContext; - out?: { - shouldApplyMerges: boolean; - }; + mergeTree: MergeTree; } export interface WriteToStoreOptions { @@ -93,6 +91,7 @@ export class StoreWriter { result: result || Object.create(null), dataId, selectionSet: operationDefinition.selectionSet, + mergeTree: { map: new Map }, context: { store, written: Object.create(null), @@ -123,9 +122,7 @@ export class StoreWriter { context, // This object allows processSelectionSet to report useful information // to its callers without explicitly returning that information. - out = { - shouldApplyMerges: false, - }, + mergeTree, }: ProcessSelectionSetOptions): StoreObject | Reference { const { policies } = this.cache; @@ -203,23 +200,20 @@ export class StoreWriter { variables: context.variables, }); + const childTree = getChildMergeTree(mergeTree, storeFieldName); + let incomingValue = - this.processFieldValue(value, selection, context, out); + this.processFieldValue(value, selection, context, childTree); if (policies.hasMergeFunction(typename, selection.name.value)) { - // If a custom merge function is defined for this field, store - // a special FieldValueToBeMerged object, so that we can run - // the merge function later, after all processSelectionSet - // work is finished. - incomingValue = { - __field: selection, - __typename: typename, - __value: incomingValue, - } as FieldValueToBeMerged; - - // Communicate to the caller that newFields contains at - // least one FieldValueToBeMerged. - out.shouldApplyMerges = true; + childTree.info = { + // TODO Check compatibility against any existing + // childTree.field? + field: selection, + typename, + }; + } else { + maybeRecycleChildMergeTree(mergeTree, storeFieldName); } incomingFields = context.merge(incomingFields, { @@ -273,8 +267,8 @@ export class StoreWriter { if ("string" === typeof dataId) { const entityRef = makeReference(dataId); - if (out.shouldApplyMerges) { - incomingFields = policies.applyMerges(entityRef, incomingFields, context); + if (mergeTree.map.size) { + incomingFields = this.applyMerges(mergeTree, entityRef, incomingFields, context); } if (process.env.NODE_ENV !== "production") { @@ -305,7 +299,7 @@ export class StoreWriter { value: any, field: FieldNode, context: WriteContext, - out: ProcessSelectionSetOptions["out"], + mergeTree: MergeTree, ): StoreValue { if (!field.selectionSet || value === null) { // In development, we need to clone scalar values so that they can be @@ -315,16 +309,135 @@ export class StoreWriter { } if (Array.isArray(value)) { - return value.map(item => this.processFieldValue(item, field, context, out)); + return value.map((item, i) => { + const value = this.processFieldValue( + item, field, context, getChildMergeTree(mergeTree, i)); + maybeRecycleChildMergeTree(mergeTree, i); + return value; + }); } return this.processSelectionSet({ result: value, selectionSet: field.selectionSet, context, - out, + mergeTree, }); } + + private applyMerges( + mergeTree: MergeTree, + existing: StoreValue, + incoming: T, + context: ReadMergeModifyContext, + storageKeys?: [string | StoreObject, string | number], + ): T { + if (mergeTree.map.size && !isReference(incoming)) { + const e: StoreObject | Reference | undefined = ( + // Items in the same position in different arrays are not + // necessarily related to each other, so when incoming is an array + // we process its elements as if there was no existing data. + !Array.isArray(incoming) && + // Likewise, existing must be either a Reference or a StoreObject + // in order for its fields to be safe to merge with the fields of + // the incoming object. + (isReference(existing) || storeValueIsStoreObject(existing)) + ) ? existing : void 0; + + // This narrowing is implied by mergeTree.map.size > 0 and + // !isReference(incoming), though TypeScript understandably cannot + // hope to infer this type. + const i = incoming as StoreObject | StoreValue[]; + + // The options.storage objects provided to read and merge functions + // are derived from the identity of the parent object and the + // storeFieldName string of each field to be merged. Since the + // parent identity remains the same for each iteration of the loop + // below, we can precompute it here. + const firstStorageKey: string | StoreObject | undefined = + isReference(e) ? e.__ref : e; + + // It's possible that applying merge functions to this subtree will + // not change the incoming data, so this variable tracks the fields + // that did change, so we can create a new incoming object when (and + // only when) at least one incoming field has changed. We use a Map + // to preserve the type of numeric keys. + let changedFields: Map | undefined; + + const getValue = ( + from: typeof e | typeof i, + name: string | number, + ): StoreValue => { + return Array.isArray(from) + ? (typeof name === "number" ? from[name] : void 0) + : context.store.getFieldValue(from, String(name)) + }; + + mergeTree.map.forEach((childTree, storeFieldName) => { + const eVal = getValue(e, storeFieldName); + const iVal = getValue(i, storeFieldName); + const aVal = this.applyMerges( + childTree, + eVal, + iVal, + context, + firstStorageKey ? [ + firstStorageKey, + storeFieldName, + ] : void 0, + ); + if (aVal !== iVal) { + changedFields = changedFields || new Map; + changedFields.set(storeFieldName, aVal); + } + }); + + if (changedFields) { + // Shallow clone i so we can add changed fields to it. + incoming = (Array.isArray(i) ? i.slice(0) : { ...i }) as T; + changedFields.forEach((value, name) => { + (incoming as any)[name] = value; + }); + } + } + + if (mergeTree.info) { + return this.cache.policies.runMergeFunction( + existing, + incoming, + mergeTree.info, + context, + storageKeys && context.store.getStorage(...storageKeys), + ); + } + + return incoming; + } +} + +const emptyMergeTreePool: MergeTree[] = []; + +function getChildMergeTree( + { map }: MergeTree, + name: string | number, +): MergeTree { + if (!map.has(name)) { + map.set(name, emptyMergeTreePool.pop() || { map: new Map }); + } + return map.get(name)!; +} + +function maybeRecycleChildMergeTree( + { map }: MergeTree, + name: string | number, +) { + const childTree = map.get(name); + if (childTree && + !childTree.info && + !childTree.map.size) { + emptyMergeTreePool.push(childTree); + map.delete(name); + } } const warnings = new Set(); From cc2b0dc10d6eeca2e51952abca4534cba548fe94 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Tue, 22 Sep 2020 15:55:47 -0400 Subject: [PATCH 03/10] Decompose makeMergeObjectsFunction helper function. --- src/cache/inmemory/policies.ts | 61 +++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 27 deletions(-) diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index 0c8f359da0d..137bf9fa8d2 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -41,6 +41,7 @@ import { ReadFieldOptions, CanReadFunction, } from '../core/types/common'; +import { FieldValueGetter } from './entityStore'; export type TypePolicies = { [__typename: string]: TypePolicy; @@ -171,12 +172,14 @@ export interface FieldFunctionOptions< // Instead of just merging objects with { ...existing, ...incoming }, this // helper function can be used to merge objects in a way that respects any // custom merge functions defined for their fields. - mergeObjects( - existing: T, - incoming: T, - ): T | undefined; + mergeObjects: MergeObjectsFunction; } +type MergeObjectsFunction = ( + existing: T, + incoming: T, +) => T; + export type FieldReadFunction = ( // When reading a field, one often needs to know about any existing // value stored for that field. If the field is read before any value @@ -795,34 +798,38 @@ function makeFieldFunctionOptions( return policies.readField(options, context); }, - mergeObjects(existing, incoming) { - if (Array.isArray(existing) || Array.isArray(incoming)) { - throw new InvariantError("Cannot automatically merge arrays"); - } + mergeObjects: makeMergeObjectsFunction(getFieldValue), + }; +} - // These dynamic checks are necessary because the parameters of a - // custom merge function can easily have the any type, so the type - // system cannot always enforce the StoreObject | Reference - // parameter types of options.mergeObjects. - if (existing && typeof existing === "object" && - incoming && typeof incoming === "object") { - const eType = getFieldValue(existing, "__typename"); - const iType = getFieldValue(incoming, "__typename"); - const typesDiffer = eType && iType && eType !== iType; - - if ( - typesDiffer || - !storeValueIsStoreObject(existing) || - !storeValueIsStoreObject(incoming) - ) { - return incoming; - } +function makeMergeObjectsFunction( + getFieldValue: FieldValueGetter, +): MergeObjectsFunction { + return function mergeObjects(existing, incoming) { + if (Array.isArray(existing) || Array.isArray(incoming)) { + throw new InvariantError("Cannot automatically merge arrays"); + } - return { ...existing, ...incoming }; + // These dynamic checks are necessary because the parameters of a + // custom merge function can easily have the any type, so the type + // system cannot always enforce the StoreObject | Reference parameter + // types of options.mergeObjects. + if (existing && typeof existing === "object" && + incoming && typeof incoming === "object") { + const eType = getFieldValue(existing, "__typename"); + const iType = getFieldValue(incoming, "__typename"); + const typesDiffer = eType && iType && eType !== iType; + + if (typesDiffer || + !storeValueIsStoreObject(existing) || + !storeValueIsStoreObject(incoming)) { + return incoming; } - return incoming; + return { ...existing, ...incoming }; } + + return incoming; }; } From 6b6350f3cf5a9f941f8572ff5981e4ddb086dcad Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Tue, 22 Sep 2020 16:12:06 -0400 Subject: [PATCH 04/10] Optimize merge:true and merge:false implementations. We know what these functions do, so we can detect them by reference and then just do what they would have done, without creating a complete options object or actually calling mergeTrueFn or mergeFalseFn. --- src/cache/inmemory/policies.ts | 65 +++++++++++++++++------------- src/cache/inmemory/types.ts | 3 +- src/cache/inmemory/writeToStore.ts | 12 ++++-- 3 files changed, 48 insertions(+), 32 deletions(-) diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index 137bf9fa8d2..69e409b72dd 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -708,48 +708,57 @@ export class Policies { return existing; } - public hasMergeFunction( + public getMergeFunction( typename: string | undefined, fieldName: string, ) { const policy = this.getFieldPolicy(typename, fieldName, false); - return !!(policy && policy.merge); + return policy && policy.merge; } public runMergeFunction( existing: StoreValue, incoming: StoreValue, - { field, typename }: MergeInfo, + { field, typename, merge }: MergeInfo, context: ReadMergeModifyContext, storage?: StorageType, ) { - const fieldName = field.name.value; - const { merge } = this.getFieldPolicy(typename, fieldName, false)!; - if (merge) { - return merge(existing, incoming, makeFieldFunctionOptions( - this, - // Unlike options.readField for read functions, we do not fall - // back to the current object if no foreignObjOrRef is provided, - // because it's not clear what the current object should be for - // merge functions: the (possibly undefined) existing object, or - // the incoming object? If you think your merge function needs - // to read sibling fields in order to produce a new value for - // the current field, you might want to rethink your strategy, - // because that's a recipe for making merge behavior sensitive - // to the order in which fields are written into the cache. - // However, readField(name, ref) is useful for merge functions - // that need to deduplicate child objects and references. - void 0, - { typename, - fieldName, - field, - variables: context.variables }, - context, - storage || Object.create(null), - )); + if (merge === mergeTrueFn) { + // Instead of going to the trouble of creating a full + // FieldFunctionOptions object and calling mergeTrueFn, we can + // simply call mergeObjects, as mergeTrueFn would. + return makeMergeObjectsFunction( + context.store.getFieldValue + )(existing as StoreObject, + incoming as StoreObject); } - return incoming; + if (merge === mergeFalseFn) { + // Likewise for mergeFalseFn, whose implementation is even simpler. + return incoming; + } + + return merge(existing, incoming, makeFieldFunctionOptions( + this, + // Unlike options.readField for read functions, we do not fall + // back to the current object if no foreignObjOrRef is provided, + // because it's not clear what the current object should be for + // merge functions: the (possibly undefined) existing object, or + // the incoming object? If you think your merge function needs + // to read sibling fields in order to produce a new value for + // the current field, you might want to rethink your strategy, + // because that's a recipe for making merge behavior sensitive + // to the order in which fields are written into the cache. + // However, readField(name, ref) is useful for merge functions + // that need to deduplicate child objects and references. + void 0, + { typename, + fieldName: field.name.value, + field, + variables: context.variables }, + context, + storage || Object.create(null), + )); } } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index 3004ea8cc3a..82289c7c934 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -7,7 +7,7 @@ import { Reference, } from '../../utilities'; import { FieldValueGetter } from './entityStore'; -import { KeyFieldsFunction, StorageType } from './policies'; +import { KeyFieldsFunction, StorageType, FieldMergeFunction } from './policies'; import { Modifier, Modifiers, @@ -104,6 +104,7 @@ export type ApolloReducerConfig = { export interface MergeInfo { field: FieldNode; typename: string | undefined; + merge: FieldMergeFunction; }; export interface MergeTree { diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index bba6c0969fb..0a66313ef7a 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -205,12 +205,14 @@ export class StoreWriter { let incomingValue = this.processFieldValue(value, selection, context, childTree); - if (policies.hasMergeFunction(typename, selection.name.value)) { + const merge = policies.getMergeFunction(typename, selection.name.value); + if (merge) { childTree.info = { // TODO Check compatibility against any existing // childTree.field? field: selection, typename, + merge, }; } else { maybeRecycleChildMergeTree(mergeTree, storeFieldName); @@ -272,11 +274,15 @@ export class StoreWriter { } if (process.env.NODE_ENV !== "production") { + const hasMergeFunction = (storeFieldName: string) => { + const childTree = mergeTree.map.get(storeFieldName); + return Boolean(childTree && childTree.info && childTree.info.merge); + }; + Object.keys(incomingFields).forEach(storeFieldName => { - const fieldName = fieldNameFromStoreName(storeFieldName); // If a merge function was defined for this field, trust that it // did the right thing about (not) clobbering data. - if (!policies.hasMergeFunction(typename, fieldName)) { + if (!hasMergeFunction(storeFieldName)) { warnAboutDataLoss( entityRef, incomingFields, From a2486c17daf939548290f7e219321215ad37722a Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Tue, 22 Sep 2020 16:47:19 -0400 Subject: [PATCH 05/10] Allow configuring merge functions/true/false for type policies. https://github.com/apollographql/apollo-client/issues/6949#issuecomment-695055680 --- src/cache/inmemory/policies.ts | 65 +++++++++++++++++++++++------- src/cache/inmemory/writeToStore.ts | 11 ++++- 2 files changed, 60 insertions(+), 16 deletions(-) diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index 69e409b72dd..8f2160ba9f5 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -65,11 +65,22 @@ export type KeyFieldsFunction = ( context: KeyFieldsContext, ) => KeySpecifier | ReturnType; +// TODO Should TypePolicy be a generic type, with a TObject or TEntity +// type parameter? export type TypePolicy = { // Allows defining the primary key fields for this type, either using an // array of field names or a function that returns an arbitrary string. keyFields?: KeySpecifier | KeyFieldsFunction | false; + // Allows defining a merge function (or merge:true/false shorthand) to + // be used for merging objects of this type wherever they appear, unless + // the parent field also defines a merge function/boolean (that is, + // parent field merge functions take precedence over type policy merge + // functions). In many cases, defining merge:true for a given type + // policy can save you from specifying merge:true for all the field + // policies where that type might be encountered. + merge?: FieldMergeFunction | boolean; + // In the rare event that your schema happens to use a different // __typename for the root Query, Mutation, and/or Schema types, you can // express your deviant preferences by enabling one of these options. @@ -240,6 +251,7 @@ export class Policies { private typePolicies: { [__typename: string]: { keyFn?: KeyFieldsFunction; + merge?: FieldMergeFunction; fields: { [fieldName: string]: { keyFn?: KeyArgsFunction; @@ -359,6 +371,25 @@ export class Policies { const existing = this.getTypePolicy(typename); const { keyFields, fields } = incoming; + function setMerge( + existing: { merge?: FieldMergeFunction | boolean; }, + merge?: FieldMergeFunction | boolean, + ) { + existing.merge = + typeof merge === "function" ? merge : + // Pass merge:true as a shorthand for a merge implementation + // that returns options.mergeObjects(existing, incoming). + merge === true ? mergeTrueFn : + // Pass merge:false to make incoming always replace existing + // without any warnings about data clobbering. + merge === false ? mergeFalseFn : + existing.merge; + } + + // Type policies can define merge functions, as an alternative to + // using field policies to merge child objects. + setMerge(existing, incoming.merge); + if (incoming.queryType) this.setRootTypename("Query", typename); if (incoming.mutationType) this.setRootTypename("Mutation", typename); if (incoming.subscriptionType) this.setRootTypename("Subscription", typename); @@ -396,17 +427,11 @@ export class Policies { // Leave existing.keyFn unchanged if above cases fail. existing.keyFn; - if (typeof read === "function") existing.read = read; - - existing.merge = - typeof merge === "function" ? merge : - // Pass merge:true as a shorthand for a merge implementation - // that returns options.mergeObjects(existing, incoming). - merge === true ? mergeTrueFn : - // Pass merge:false to make incoming always replace existing - // without any warnings about data clobbering. - merge === false ? mergeFalseFn : - existing.merge; + if (typeof read === "function") { + existing.read = read; + } + + setMerge(existing, merge); } if (existing.read && existing.merge) { @@ -709,11 +734,21 @@ export class Policies { } public getMergeFunction( - typename: string | undefined, + parentTypename: string | undefined, fieldName: string, - ) { - const policy = this.getFieldPolicy(typename, fieldName, false); - return policy && policy.merge; + childTypename: string | undefined, + ): FieldMergeFunction | undefined { + let policy: + | Policies["typePolicies"][string] + | Policies["typePolicies"][string]["fields"][string] + | undefined = + this.getFieldPolicy(parentTypename, fieldName, false); + let merge = policy && policy.merge; + if (!merge && childTypename) { + policy = this.getTypePolicy(childTypename); + merge = policy && policy.merge; + } + return merge; } public runMergeFunction( diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 0a66313ef7a..be241071512 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -205,7 +205,16 @@ export class StoreWriter { let incomingValue = this.processFieldValue(value, selection, context, childTree); - const merge = policies.getMergeFunction(typename, selection.name.value); + const childTypename = selection.selectionSet + && context.store.getFieldValue(incomingValue as StoreObject, "__typename") + || void 0; + + const merge = policies.getMergeFunction( + typename, + selection.name.value, + childTypename, + ); + if (merge) { childTree.info = { // TODO Check compatibility against any existing From 75e4da1a2c22bc9bf6a910e41691a1f85895d8c7 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Wed, 23 Sep 2020 15:03:42 -0400 Subject: [PATCH 06/10] Derive options.storage from parent ID + field name sequence. The optimism update allows passing an arguments object to KeyTrie#lookupArray, so we don't have to convert arguments to an array before performing KeyTrie lookups. --- package-lock.json | 6 +++--- package.json | 2 +- src/cache/inmemory/entityStore.ts | 23 +++++++---------------- src/cache/inmemory/types.ts | 2 +- src/cache/inmemory/writeToStore.ts | 28 ++++++++++++++++------------ 5 files changed, 28 insertions(+), 33 deletions(-) diff --git a/package-lock.json b/package-lock.json index a4b96f2851b..a7c53ee3660 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8317,9 +8317,9 @@ } }, "optimism": { - "version": "0.12.1", - "resolved": "https://registry.npmjs.org/optimism/-/optimism-0.12.1.tgz", - "integrity": "sha512-t8I7HM1dw0SECitBYAqFOVHoBAHEQBTeKjIL9y9ImHzAVkdyPK4ifTgM4VJRDtTUY4r/u5Eqxs4XcGPHaoPkeQ==", + "version": "0.12.2", + "resolved": "https://registry.npmjs.org/optimism/-/optimism-0.12.2.tgz", + "integrity": "sha512-k7hFhlmfLl6HNThIuuvYMQodC1c+q6Uc6V9cLVsMWyW514QuaxVJH/khPu2vLRIoDTpFdJ5sojlARhg1rzyGbg==", "requires": { "@wry/context": "^0.5.2" } diff --git a/package.json b/package.json index b004ef97b1f..daa65197023 100644 --- a/package.json +++ b/package.json @@ -77,7 +77,7 @@ "fast-json-stable-stringify": "^2.0.0", "graphql-tag": "^2.11.0", "hoist-non-react-statics": "^3.3.2", - "optimism": "^0.12.1", + "optimism": "^0.12.2", "prop-types": "^15.7.2", "symbol-observable": "^2.0.0", "terser": "^5.2.0", diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index 9bdc401c419..b98a226092a 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -258,7 +258,7 @@ export abstract class EntityStore implements NormalizedCache { public abstract getStorage( idOrObj: string | StoreObject, - storeFieldName: string | number, + ...storeFieldNames: (string | number)[] ): StorageType; // Maps root entity IDs to the number of times they have been retained, minus @@ -482,16 +482,8 @@ export namespace EntityStore { } public readonly storageTrie = new KeyTrie(canUseWeakMap); - public getStorage( - idOrObj: string | StoreObject, - storeFieldName: string | number, - ): StorageType { - return this.storageTrie.lookup( - idOrObj, - // Normalize numbers to strings, so we don't accidentally end up - // with multiple storage objects for the same field. - String(storeFieldName), - ); + public getStorage(): StorageType { + return this.storageTrie.lookupArray(arguments); } } } @@ -558,11 +550,10 @@ class Layer extends EntityStore { } : fromParent; } - public getStorage( - idOrObj: string | StoreObject, - storeFieldName: string | number, - ): StorageType { - return this.parent.getStorage(idOrObj, storeFieldName); + public getStorage(): StorageType { + let p: EntityStore = this.parent; + while ((p as Layer).parent) p = (p as Layer).parent; + return p.getStorage.apply(p, arguments); } } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index 82289c7c934..e79f4ddc324 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -65,7 +65,7 @@ export interface NormalizedCache { getStorage( idOrObj: string | StoreObject, - storeFieldName: string | number, + ...storeFieldNames: (string | number)[] ): StorageType; } diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index be241071512..595a0073ab6 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -26,6 +26,7 @@ import { NormalizedCache, ReadMergeModifyContext, MergeTree } from './types'; import { makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject } from './helpers'; import { StoreReader } from './readFromStore'; import { InMemoryCache } from './inMemoryCache'; +import { EntityStore } from './entityStore'; export interface WriteContext extends ReadMergeModifyContext { readonly written: { @@ -345,7 +346,7 @@ export class StoreWriter { existing: StoreValue, incoming: T, context: ReadMergeModifyContext, - storageKeys?: [string | StoreObject, string | number], + getStorageArgs?: Parameters, ): T { if (mergeTree.map.size && !isReference(incoming)) { const e: StoreObject | Reference | undefined = ( @@ -365,12 +366,12 @@ export class StoreWriter { const i = incoming as StoreObject | StoreValue[]; // The options.storage objects provided to read and merge functions - // are derived from the identity of the parent object and the - // storeFieldName string of each field to be merged. Since the - // parent identity remains the same for each iteration of the loop - // below, we can precompute it here. - const firstStorageKey: string | StoreObject | undefined = - isReference(e) ? e.__ref : e; + // are derived from the identity of the parent object plus a + // sequence of storeFieldName strings/numbers identifying the nested + // field name path of each field value to be merged. + if (e && !getStorageArgs) { + getStorageArgs = [isReference(e) ? e.__ref : e]; + } // It's possible that applying merge functions to this subtree will // not change the incoming data, so this variable tracks the fields @@ -389,6 +390,9 @@ export class StoreWriter { }; mergeTree.map.forEach((childTree, storeFieldName) => { + if (getStorageArgs) { + getStorageArgs.push(storeFieldName); + } const eVal = getValue(e, storeFieldName); const iVal = getValue(i, storeFieldName); const aVal = this.applyMerges( @@ -396,15 +400,15 @@ export class StoreWriter { eVal, iVal, context, - firstStorageKey ? [ - firstStorageKey, - storeFieldName, - ] : void 0, + getStorageArgs, ); if (aVal !== iVal) { changedFields = changedFields || new Map; changedFields.set(storeFieldName, aVal); } + if (getStorageArgs) { + invariant(getStorageArgs.pop() === storeFieldName); + } }); if (changedFields) { @@ -422,7 +426,7 @@ export class StoreWriter { incoming, mergeTree.info, context, - storageKeys && context.store.getStorage(...storageKeys), + getStorageArgs && context.store.getStorage(...getStorageArgs), ); } From 41c6b74d2832d7e5343c52b76e7b37b3169969d7 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 24 Sep 2020 14:23:00 -0400 Subject: [PATCH 07/10] Bump bundlesize limit from 24.9kB to 25kB. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index daa65197023..dcae931c762 100644 --- a/package.json +++ b/package.json @@ -53,7 +53,7 @@ { "name": "apollo-client", "path": "./dist/apollo-client.cjs.min.js", - "maxSize": "24.9 kB" + "maxSize": "25 kB" } ], "peerDependencies": { From acfc70fa8e48a8c21e2779d6b827c077a357d8f4 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 24 Sep 2020 14:59:55 -0400 Subject: [PATCH 08/10] Tests of merge:function/boolean in type/field policies. --- src/cache/inmemory/__tests__/policies.ts | 253 ++++++++++++++++++----- 1 file changed, 202 insertions(+), 51 deletions(-) diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts index 3e0d62dd670..4d00064d3ec 100644 --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -3703,32 +3703,7 @@ describe("type policies", function () { Author: { keyFields: false, fields: { - books: { - merge(existing: any[], incoming: any[], { - isReference, - }) { - const merged = existing ? existing.slice(0) : []; - const seen = new Set(); - if (existing) { - existing.forEach(book => { - if (isReference(book)) { - seen.add(book.__ref); - } - }); - } - incoming.forEach(book => { - if (isReference(book)) { - if (!seen.has(book.__ref)) { - merged.push(book); - seen.add(book.__ref); - } - } else { - merged.push(book); - } - }); - return merged; - }, - }, + books: booksMergePolicy(), }, }, }, @@ -3737,6 +3712,35 @@ describe("type policies", function () { testForceMerges(cache); }); + function booksMergePolicy(): FieldPolicy { + return { + merge(existing, incoming, { + isReference, + }) { + const merged = existing ? existing.slice(0) : []; + const seen = new Set(); + if (existing) { + existing.forEach(book => { + if (isReference(book)) { + seen.add(book.__ref); + } + }); + } + incoming.forEach(book => { + if (isReference(book)) { + if (!seen.has(book.__ref)) { + merged.push(book); + seen.add(book.__ref); + } + } else { + merged.push(book); + } + }); + return merged; + }, + }; + } + function testForceMerges(cache: InMemoryCache) { const queryWithAuthorName = gql` query { @@ -3808,6 +3812,7 @@ describe("type policies", function () { books: [{ __typename: "Book", isbn: "1250758009", + title: "The Topeka School", }], }, }, @@ -3917,7 +3922,7 @@ describe("type policies", function () { } // Same as previous test, except with merge:true for Book.author. - it("can force merging with merge: true", function () { + it("can force merging with merge:true field policy", function () { const cache = new InMemoryCache({ typePolicies: { Book: { @@ -3932,38 +3937,184 @@ describe("type policies", function () { Author: { keyFields: false, fields: { - books: { - merge(existing: any[], incoming: any[], { - isReference, - }) { - const merged = existing ? existing.slice(0) : []; - const seen = new Set(); - if (existing) { - existing.forEach(book => { - if (isReference(book)) { - seen.add(book.__ref); - } - }); - } - incoming.forEach(book => { - if (isReference(book)) { - if (!seen.has(book.__ref)) { - merged.push(book); - seen.add(book.__ref); - } - } else { - merged.push(book); - } - }); - return merged; + books: booksMergePolicy(), + }, + }, + }, + }); + + testForceMerges(cache); + }); + + // Same as previous test, except configuring merge:true for the Author + // type instead of for the Book.author field. + it("can force merging with merge:true type policy", function () { + const cache = new InMemoryCache({ + typePolicies: { + Book: { + keyFields: ["isbn"], + }, + + Author: { + keyFields: false, + merge: true, + fields: { + books: booksMergePolicy(), + }, + }, + }, + }); + + testForceMerges(cache); + }); + + it("can force merging with inherited merge:true field policy", function () { + const cache = new InMemoryCache({ + typePolicies: { + Authored: { + fields: { + author: { + merge: true, + }, + }, + }, + + Book: { + keyFields: ["isbn"], + }, + + Author: { + keyFields: false, + fields: { + books: booksMergePolicy(), + }, + }, + }, + + possibleTypes: { + Authored: ["Book", "Destruction"], + }, + }); + + testForceMerges(cache); + }); + + it("can force merging with inherited merge:true type policy", function () { + const cache = new InMemoryCache({ + typePolicies: { + Book: { + keyFields: ["isbn"], + }, + + Author: { + fields: { + books: booksMergePolicy(), + }, + }, + + Person: { + keyFields: false, + merge: true, + }, + }, + + possibleTypes: { + Person: ["Author"], + }, + }); + + testForceMerges(cache); + }); + + function checkAuthor(data: TData, canBeUndefined = false) { + if (data || !canBeUndefined) { + expect(data).toBeTruthy(); + expect(typeof data).toBe("object"); + expect((data as any).__typename).toBe("Author"); + } + return data; + } + + it("can force merging with inherited type policy merge function", function () { + let personMergeCount = 0; + + const cache = new InMemoryCache({ + typePolicies: { + Book: { + keyFields: ["isbn"], + }, + + Author: { + fields: { + books: booksMergePolicy(), + }, + }, + + Person: { + keyFields: false, + + merge(existing, incoming) { + checkAuthor(existing, true); + checkAuthor(incoming); + ++personMergeCount; + return { ...existing, ...incoming }; + }, + }, + }, + + possibleTypes: { + Person: ["Author"], + }, + }); + + testForceMerges(cache); + + expect(personMergeCount).toBe(3); + }); + + it("can force merging with inherited field merge function", function () { + let authorMergeCount = 0; + + const cache = new InMemoryCache({ + typePolicies: { + Book: { + keyFields: ["isbn"], + }, + + Authored: { + fields: { + author: { + merge(existing, incoming) { + checkAuthor(existing, true); + checkAuthor(incoming); + ++authorMergeCount; + return { ...existing, ...incoming }; }, }, }, }, + + + Author: { + fields: { + books: booksMergePolicy(), + }, + }, + + Person: { + keyFields: false, + }, + }, + + possibleTypes: { + Authored: ["Destiny", "Book"], + Person: ["Author"], }, }); testForceMerges(cache); + + expect(authorMergeCount).toBe(3); }); }); From 5e74c703b5538723e41902f126d26a759d2496d2 Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 24 Sep 2020 16:35:06 -0400 Subject: [PATCH 09/10] Update data loss warning to recommend type policy merge function. This warning was introduced in #6372. --- docs/source/caching/cache-field-behavior.md | 27 +++++++++++++++++++++ src/cache/inmemory/writeToStore.ts | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/docs/source/caching/cache-field-behavior.md b/docs/source/caching/cache-field-behavior.md index 966822f23db..a381ef814a3 100644 --- a/docs/source/caching/cache-field-behavior.md +++ b/docs/source/caching/cache-field-behavior.md @@ -269,6 +269,33 @@ const cache = new InMemoryCache({ In summary, the `Book.author` policy above allows the cache to safely merge the `author` objects of any two `Book` objects that have the same identity. +#### Configuring `merge` functions for types rather than fields + +Beginning with Apollo Client 3.3, you can avoid having to configure `merge` functions for lots of different fields that might hold an `Author` object, and instead put the `merge` configuration in the `Author` type policy: + +```ts{13} +const cache = new InMemoryCache({ + typePolicies: { + Book: { + fields: { + // No longer necessary! + // author: { + // merge: true, + // }, + }, + }, + + Author: { + merge: true, + }, + }, +}); +``` + +These configurations have the same behavior, but putting the `merge: true` in the `Author` type policy is shorter and easier to maintain, especially when `Author` objects could appear in lots of different fields besides `Book.author`. + +Remember that mergeable objects will only be merged with existing objects occupying the same field of the same parent object, and only when the `__typename` of the objects is the same. If you really need to work around these rules, you can write a custom `merge` function to do whatever you want, but `merge: true` follows these rules. + ### Merging arrays of non-normalized objects Once you're comfortable with the ideas and recommendations from the previous section, consider what happens when a `Book` can have multiple authors: diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index 595a0073ab6..bb0d8c43864 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -525,7 +525,7 @@ function warnAboutDataLoss( To address this problem (which is not a bug in Apollo Client), ${ childTypenames.length ? "either ensure all objects of type " + - childTypenames.join(" and ") + " have IDs, or " + childTypenames.join(" and ") + " have an ID or a custom merge function, or " : "" }define a custom merge function for the ${ typeDotName From 3d3af865188d3a52b4e19d0de821ea11487d58eb Mon Sep 17 00:00:00 2001 From: Ben Newman Date: Thu, 24 Sep 2020 16:51:43 -0400 Subject: [PATCH 10/10] Mention PR #7070 in CHANGELOG.md. --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 26f04d6f330..b58d6e4f71e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,9 @@ - Support inheritance of type and field policies, according to `possibleTypes`.
[@benjamn](https://github.com/benjamn) in [#7065](https://github.com/apollographql/apollo-client/pull/7065) +- Allow configuring custom `merge` functions, including the `merge: true` and `merge: false` shorthands, in type policies as well as field policies.
+ [@benjamn](https://github.com/benjamn) in [#7070](https://github.com/apollographql/apollo-client/pull/7070) + - Shallow-merge `options.variables` when combining existing or default options with newly-provided options, so new variables do not completely overwrite existing variables.
[@amannn](https://github.com/amannn) in [#6927](https://github.com/apollographql/apollo-client/pull/6927)