Skip to content

Commit

Permalink
Allow the removal of multiple optimistics at once
Browse files Browse the repository at this point in the history
  • Loading branch information
Liam MILOR committed Jul 17, 2024
1 parent 3f6d023 commit f5c97db
Show file tree
Hide file tree
Showing 5 changed files with 88 additions and 41 deletions.
5 changes: 5 additions & 0 deletions .changeset/new-snails-love.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@apollo/client": patch
---

Add `removeOptimistics` to remove multiple optimistics at once
2 changes: 2 additions & 0 deletions src/cache/core/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,8 @@ export abstract class ApolloCache<TSerialized> implements DataProxy {

// Optimistic API

public abstract removeOptimistics(ids: string[]): void;

public abstract removeOptimistic(id: string): void;

// Transactional API
Expand Down
113 changes: 72 additions & 41 deletions src/cache/inmemory/entityStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ export abstract class EntityStore implements NormalizedCache {

public abstract removeLayer(layerId: string): EntityStore;

public abstract removeLayers(layersIds: string[]): EntityStore;

// Although the EntityStore class is abstract, it contains concrete
// implementations of the various NormalizedCache interface methods that
// are inherited by the Root and Layer subclasses.
Expand Down Expand Up @@ -721,6 +723,10 @@ export namespace EntityStore {
return this;
}

public removeLayers(): Root {
return this;
}

public readonly storageTrie = new Trie<StorageType>(canUseWeakMap);
public getStorage(): StorageType {
return this.storageTrie.lookupArray(arguments);
Expand All @@ -745,52 +751,73 @@ class Layer extends EntityStore {
return new Layer(layerId, this, replay, this.group);
}

private dirtyFieds(newParent: EntityStore): void {
if (this.group.caching) {
// Dirty every ID we're removing. Technically we might be able to avoid
// dirtying fields that have values in higher layers, but we don't have
// easy access to higher layers here, and we're about to recreate those
// layers anyway (see parent.addLayer below).
Object.keys(this.data).forEach((dataId) => {
const ownStoreObject = this.data[dataId];
const parentStoreObject = newParent["lookup"](dataId);
if (!parentStoreObject) {
// The StoreObject identified by dataId was defined in this layer
// but will be undefined in the parent layer, so we can delete the
// whole entity using this.delete(dataId). Since we're about to
// throw this layer away, the only goal of this deletion is to dirty
// the removed fields.
this.delete(dataId);
} else if (!ownStoreObject) {
// This layer had an entry for dataId but it was undefined, which
// means the entity was deleted in this layer, and it's about to
// become undeleted when we remove this layer, so we need to dirty
// all fields that are about to be reexposed.
this.group.dirty(dataId, "__exists");
Object.keys(parentStoreObject).forEach((storeFieldName) => {
this.group.dirty(dataId, storeFieldName);
});
} else if (ownStoreObject !== parentStoreObject) {
// If ownStoreObject is not exactly the same as parentStoreObject,
// dirty any fields whose values will change as a result of this
// removal.
Object.keys(ownStoreObject).forEach((storeFieldName) => {
if (
!equal(
ownStoreObject[storeFieldName],
parentStoreObject[storeFieldName]
)
) {
this.group.dirty(dataId, storeFieldName);
}
});
}
});
}
}

public removeLayer(layerId: string): EntityStore {
// Remove all instances of the given id, not just the first one.
const parent = this.parent.removeLayer(layerId);

if (layerId === this.id) {
if (this.group.caching) {
// Dirty every ID we're removing. Technically we might be able to avoid
// dirtying fields that have values in higher layers, but we don't have
// easy access to higher layers here, and we're about to recreate those
// layers anyway (see parent.addLayer below).
Object.keys(this.data).forEach((dataId) => {
const ownStoreObject = this.data[dataId];
const parentStoreObject = parent["lookup"](dataId);
if (!parentStoreObject) {
// The StoreObject identified by dataId was defined in this layer
// but will be undefined in the parent layer, so we can delete the
// whole entity using this.delete(dataId). Since we're about to
// throw this layer away, the only goal of this deletion is to dirty
// the removed fields.
this.delete(dataId);
} else if (!ownStoreObject) {
// This layer had an entry for dataId but it was undefined, which
// means the entity was deleted in this layer, and it's about to
// become undeleted when we remove this layer, so we need to dirty
// all fields that are about to be reexposed.
this.group.dirty(dataId, "__exists");
Object.keys(parentStoreObject).forEach((storeFieldName) => {
this.group.dirty(dataId, storeFieldName);
});
} else if (ownStoreObject !== parentStoreObject) {
// If ownStoreObject is not exactly the same as parentStoreObject,
// dirty any fields whose values will change as a result of this
// removal.
Object.keys(ownStoreObject).forEach((storeFieldName) => {
if (
!equal(
ownStoreObject[storeFieldName],
parentStoreObject[storeFieldName]
)
) {
this.group.dirty(dataId, storeFieldName);
}
});
}
});
}
this.dirtyFieds(parent);

return parent;
}

// No changes are necessary if the parent chain remains identical.
if (parent === this.parent) return this;

// Recreate this layer on top of the new parent.
return parent.addLayer(this.id, this.replay);
}

public removeLayers(layerIds: string[]): EntityStore {
// Remove all instances of the given id, not just the first one.
const parent = this.parent.removeLayers(layerIds);

if (layerIds.includes(this.id)) {
this.dirtyFieds(parent);

return parent;
}
Expand Down Expand Up @@ -849,6 +876,10 @@ class Stump extends Layer {
return this;
}

public removeLayers() {
return this;
}

public merge(older: string | StoreObject, newer: string | StoreObject) {
// We never want to write any data into the Stump, so we forward any merge
// calls to the Root instead. Another option here would be to throw an
Expand Down
8 changes: 8 additions & 0 deletions src/cache/inmemory/inMemoryCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -411,6 +411,14 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
}
}

public removeOptimistics(idsToRemove: string[]) {
const newOptimisticData = this.optimisticData.removeLayers(idsToRemove);
if (newOptimisticData !== this.optimisticData) {
this.optimisticData = newOptimisticData;
this.broadcastWatches();
}
}

private txCount = 0;

public batch<TUpdateResult>(
Expand Down
1 change: 1 addition & 0 deletions src/core/QueryManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,7 @@ export class QueryManager<TStore> {
{
...context,
optimisticResponse: isOptimistic ? optimisticResponse : void 0,
mutationId,
},
variables,
false
Expand Down

0 comments on commit f5c97db

Please sign in to comment.