@@ -53,6 +53,8 @@ export abstract class EntityStore implements NormalizedCache {
5353
5454 public abstract removeLayer ( layerId : string ) : EntityStore ;
5555
56+ public abstract removeLayers ( layersIds : string [ ] ) : EntityStore ;
57+
5658 // Although the EntityStore class is abstract, it contains concrete
5759 // implementations of the various NormalizedCache interface methods that
5860 // are inherited by the Root and Layer subclasses.
@@ -721,6 +723,10 @@ export namespace EntityStore {
721723 return this ;
722724 }
723725
726+ public removeLayers ( ) : Root {
727+ return this ;
728+ }
729+
724730 public readonly storageTrie = new Trie < StorageType > ( canUseWeakMap ) ;
725731 public getStorage ( ) : StorageType {
726732 return this . storageTrie . lookupArray ( arguments ) ;
@@ -745,52 +751,73 @@ class Layer extends EntityStore {
745751 return new Layer ( layerId , this , replay , this . group ) ;
746752 }
747753
754+ private dirtyFieds ( newParent : EntityStore ) : void {
755+ if ( this . group . caching ) {
756+ // Dirty every ID we're removing. Technically we might be able to avoid
757+ // dirtying fields that have values in higher layers, but we don't have
758+ // easy access to higher layers here, and we're about to recreate those
759+ // layers anyway (see parent.addLayer below).
760+ Object . keys ( this . data ) . forEach ( ( dataId ) => {
761+ const ownStoreObject = this . data [ dataId ] ;
762+ const parentStoreObject = newParent [ "lookup" ] ( dataId ) ;
763+ if ( ! parentStoreObject ) {
764+ // The StoreObject identified by dataId was defined in this layer
765+ // but will be undefined in the parent layer, so we can delete the
766+ // whole entity using this.delete(dataId). Since we're about to
767+ // throw this layer away, the only goal of this deletion is to dirty
768+ // the removed fields.
769+ this . delete ( dataId ) ;
770+ } else if ( ! ownStoreObject ) {
771+ // This layer had an entry for dataId but it was undefined, which
772+ // means the entity was deleted in this layer, and it's about to
773+ // become undeleted when we remove this layer, so we need to dirty
774+ // all fields that are about to be reexposed.
775+ this . group . dirty ( dataId , "__exists" ) ;
776+ Object . keys ( parentStoreObject ) . forEach ( ( storeFieldName ) => {
777+ this . group . dirty ( dataId , storeFieldName ) ;
778+ } ) ;
779+ } else if ( ownStoreObject !== parentStoreObject ) {
780+ // If ownStoreObject is not exactly the same as parentStoreObject,
781+ // dirty any fields whose values will change as a result of this
782+ // removal.
783+ Object . keys ( ownStoreObject ) . forEach ( ( storeFieldName ) => {
784+ if (
785+ ! equal (
786+ ownStoreObject [ storeFieldName ] ,
787+ parentStoreObject [ storeFieldName ]
788+ )
789+ ) {
790+ this . group . dirty ( dataId , storeFieldName ) ;
791+ }
792+ } ) ;
793+ }
794+ } ) ;
795+ }
796+ }
797+
748798 public removeLayer ( layerId : string ) : EntityStore {
749799 // Remove all instances of the given id, not just the first one.
750800 const parent = this . parent . removeLayer ( layerId ) ;
751801
752802 if ( layerId === this . id ) {
753- if ( this . group . caching ) {
754- // Dirty every ID we're removing. Technically we might be able to avoid
755- // dirtying fields that have values in higher layers, but we don't have
756- // easy access to higher layers here, and we're about to recreate those
757- // layers anyway (see parent.addLayer below).
758- Object . keys ( this . data ) . forEach ( ( dataId ) => {
759- const ownStoreObject = this . data [ dataId ] ;
760- const parentStoreObject = parent [ "lookup" ] ( dataId ) ;
761- if ( ! parentStoreObject ) {
762- // The StoreObject identified by dataId was defined in this layer
763- // but will be undefined in the parent layer, so we can delete the
764- // whole entity using this.delete(dataId). Since we're about to
765- // throw this layer away, the only goal of this deletion is to dirty
766- // the removed fields.
767- this . delete ( dataId ) ;
768- } else if ( ! ownStoreObject ) {
769- // This layer had an entry for dataId but it was undefined, which
770- // means the entity was deleted in this layer, and it's about to
771- // become undeleted when we remove this layer, so we need to dirty
772- // all fields that are about to be reexposed.
773- this . group . dirty ( dataId , "__exists" ) ;
774- Object . keys ( parentStoreObject ) . forEach ( ( storeFieldName ) => {
775- this . group . dirty ( dataId , storeFieldName ) ;
776- } ) ;
777- } else if ( ownStoreObject !== parentStoreObject ) {
778- // If ownStoreObject is not exactly the same as parentStoreObject,
779- // dirty any fields whose values will change as a result of this
780- // removal.
781- Object . keys ( ownStoreObject ) . forEach ( ( storeFieldName ) => {
782- if (
783- ! equal (
784- ownStoreObject [ storeFieldName ] ,
785- parentStoreObject [ storeFieldName ]
786- )
787- ) {
788- this . group . dirty ( dataId , storeFieldName ) ;
789- }
790- } ) ;
791- }
792- } ) ;
793- }
803+ this . dirtyFieds ( parent ) ;
804+
805+ return parent ;
806+ }
807+
808+ // No changes are necessary if the parent chain remains identical.
809+ if ( parent === this . parent ) return this ;
810+
811+ // Recreate this layer on top of the new parent.
812+ return parent . addLayer ( this . id , this . replay ) ;
813+ }
814+
815+ public removeLayers ( layerIds : string [ ] ) : EntityStore {
816+ // Remove all instances of the given id, not just the first one.
817+ const parent = this . parent . removeLayers ( layerIds ) ;
818+
819+ if ( layerIds . includes ( this . id ) ) {
820+ this . dirtyFieds ( parent ) ;
794821
795822 return parent ;
796823 }
@@ -849,6 +876,10 @@ class Stump extends Layer {
849876 return this ;
850877 }
851878
879+ public removeLayers ( ) {
880+ return this ;
881+ }
882+
852883 public merge ( older : string | StoreObject , newer : string | StoreObject ) {
853884 // We never want to write any data into the Stump, so we forward any merge
854885 // calls to the Root instead. Another option here would be to throw an
0 commit comments