Skip to content

Commit f5c97db

Browse files
author
Liam MILOR
committed
Allow the removal of multiple optimistics at once
1 parent 3f6d023 commit f5c97db

File tree

5 files changed

+88
-41
lines changed

5 files changed

+88
-41
lines changed

.changeset/new-snails-love.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@apollo/client": patch
3+
---
4+
5+
Add `removeOptimistics` to remove multiple optimistics at once

src/cache/core/cache.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,8 @@ export abstract class ApolloCache<TSerialized> implements DataProxy {
143143

144144
// Optimistic API
145145

146+
public abstract removeOptimistics(ids: string[]): void;
147+
146148
public abstract removeOptimistic(id: string): void;
147149

148150
// Transactional API

src/cache/inmemory/entityStore.ts

Lines changed: 72 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,8 @@ export abstract class EntityStore implements NormalizedCache {
5353

5454
public abstract removeLayer(layerId: string): EntityStore;
5555

56+
public abstract removeLayers(layersIds: string[]): EntityStore;
57+
5658
// Although the EntityStore class is abstract, it contains concrete
5759
// implementations of the various NormalizedCache interface methods that
5860
// are inherited by the Root and Layer subclasses.
@@ -721,6 +723,10 @@ export namespace EntityStore {
721723
return this;
722724
}
723725

726+
public removeLayers(): Root {
727+
return this;
728+
}
729+
724730
public readonly storageTrie = new Trie<StorageType>(canUseWeakMap);
725731
public getStorage(): StorageType {
726732
return this.storageTrie.lookupArray(arguments);
@@ -745,52 +751,73 @@ class Layer extends EntityStore {
745751
return new Layer(layerId, this, replay, this.group);
746752
}
747753

754+
private dirtyFieds(newParent: EntityStore): void {
755+
if (this.group.caching) {
756+
// Dirty every ID we're removing. Technically we might be able to avoid
757+
// dirtying fields that have values in higher layers, but we don't have
758+
// easy access to higher layers here, and we're about to recreate those
759+
// layers anyway (see parent.addLayer below).
760+
Object.keys(this.data).forEach((dataId) => {
761+
const ownStoreObject = this.data[dataId];
762+
const parentStoreObject = newParent["lookup"](dataId);
763+
if (!parentStoreObject) {
764+
// The StoreObject identified by dataId was defined in this layer
765+
// but will be undefined in the parent layer, so we can delete the
766+
// whole entity using this.delete(dataId). Since we're about to
767+
// throw this layer away, the only goal of this deletion is to dirty
768+
// the removed fields.
769+
this.delete(dataId);
770+
} else if (!ownStoreObject) {
771+
// This layer had an entry for dataId but it was undefined, which
772+
// means the entity was deleted in this layer, and it's about to
773+
// become undeleted when we remove this layer, so we need to dirty
774+
// all fields that are about to be reexposed.
775+
this.group.dirty(dataId, "__exists");
776+
Object.keys(parentStoreObject).forEach((storeFieldName) => {
777+
this.group.dirty(dataId, storeFieldName);
778+
});
779+
} else if (ownStoreObject !== parentStoreObject) {
780+
// If ownStoreObject is not exactly the same as parentStoreObject,
781+
// dirty any fields whose values will change as a result of this
782+
// removal.
783+
Object.keys(ownStoreObject).forEach((storeFieldName) => {
784+
if (
785+
!equal(
786+
ownStoreObject[storeFieldName],
787+
parentStoreObject[storeFieldName]
788+
)
789+
) {
790+
this.group.dirty(dataId, storeFieldName);
791+
}
792+
});
793+
}
794+
});
795+
}
796+
}
797+
748798
public removeLayer(layerId: string): EntityStore {
749799
// Remove all instances of the given id, not just the first one.
750800
const parent = this.parent.removeLayer(layerId);
751801

752802
if (layerId === this.id) {
753-
if (this.group.caching) {
754-
// Dirty every ID we're removing. Technically we might be able to avoid
755-
// dirtying fields that have values in higher layers, but we don't have
756-
// easy access to higher layers here, and we're about to recreate those
757-
// layers anyway (see parent.addLayer below).
758-
Object.keys(this.data).forEach((dataId) => {
759-
const ownStoreObject = this.data[dataId];
760-
const parentStoreObject = parent["lookup"](dataId);
761-
if (!parentStoreObject) {
762-
// The StoreObject identified by dataId was defined in this layer
763-
// but will be undefined in the parent layer, so we can delete the
764-
// whole entity using this.delete(dataId). Since we're about to
765-
// throw this layer away, the only goal of this deletion is to dirty
766-
// the removed fields.
767-
this.delete(dataId);
768-
} else if (!ownStoreObject) {
769-
// This layer had an entry for dataId but it was undefined, which
770-
// means the entity was deleted in this layer, and it's about to
771-
// become undeleted when we remove this layer, so we need to dirty
772-
// all fields that are about to be reexposed.
773-
this.group.dirty(dataId, "__exists");
774-
Object.keys(parentStoreObject).forEach((storeFieldName) => {
775-
this.group.dirty(dataId, storeFieldName);
776-
});
777-
} else if (ownStoreObject !== parentStoreObject) {
778-
// If ownStoreObject is not exactly the same as parentStoreObject,
779-
// dirty any fields whose values will change as a result of this
780-
// removal.
781-
Object.keys(ownStoreObject).forEach((storeFieldName) => {
782-
if (
783-
!equal(
784-
ownStoreObject[storeFieldName],
785-
parentStoreObject[storeFieldName]
786-
)
787-
) {
788-
this.group.dirty(dataId, storeFieldName);
789-
}
790-
});
791-
}
792-
});
793-
}
803+
this.dirtyFieds(parent);
804+
805+
return parent;
806+
}
807+
808+
// No changes are necessary if the parent chain remains identical.
809+
if (parent === this.parent) return this;
810+
811+
// Recreate this layer on top of the new parent.
812+
return parent.addLayer(this.id, this.replay);
813+
}
814+
815+
public removeLayers(layerIds: string[]): EntityStore {
816+
// Remove all instances of the given id, not just the first one.
817+
const parent = this.parent.removeLayers(layerIds);
818+
819+
if (layerIds.includes(this.id)) {
820+
this.dirtyFieds(parent);
794821

795822
return parent;
796823
}
@@ -849,6 +876,10 @@ class Stump extends Layer {
849876
return this;
850877
}
851878

879+
public removeLayers() {
880+
return this;
881+
}
882+
852883
public merge(older: string | StoreObject, newer: string | StoreObject) {
853884
// We never want to write any data into the Stump, so we forward any merge
854885
// calls to the Root instead. Another option here would be to throw an

src/cache/inmemory/inMemoryCache.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -411,6 +411,14 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
411411
}
412412
}
413413

414+
public removeOptimistics(idsToRemove: string[]) {
415+
const newOptimisticData = this.optimisticData.removeLayers(idsToRemove);
416+
if (newOptimisticData !== this.optimisticData) {
417+
this.optimisticData = newOptimisticData;
418+
this.broadcastWatches();
419+
}
420+
}
421+
414422
private txCount = 0;
415423

416424
public batch<TUpdateResult>(

src/core/QueryManager.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -291,6 +291,7 @@ export class QueryManager<TStore> {
291291
{
292292
...context,
293293
optimisticResponse: isOptimistic ? optimisticResponse : void 0,
294+
mutationId,
294295
},
295296
variables,
296297
false

0 commit comments

Comments
 (0)