Skip to content

Commit 06c403b

Browse files
authored
[BREAKING] Resolve cached values after batch dispatch (#222)
1 parent b5d7bf5 commit 06c403b

File tree

4 files changed

+176
-44
lines changed

4 files changed

+176
-44
lines changed

README.md

+32-10
Original file line numberDiff line numberDiff line change
@@ -140,16 +140,6 @@ DataLoader provides a memoization cache for all loads which occur in a single
140140
request to your application. After `.load()` is called once with a given key,
141141
the resulting value is cached to eliminate redundant loads.
142142

143-
In addition to relieving pressure on your data storage, caching results per-request
144-
also creates fewer objects which may relieve memory pressure on your application:
145-
146-
```js
147-
const userLoader = new DataLoader(...)
148-
const promise1A = userLoader.load(1)
149-
const promise1B = userLoader.load(1)
150-
assert(promise1A === promise1B)
151-
```
152-
153143
#### Caching Per-Request
154144

155145
DataLoader caching *does not* replace Redis, Memcache, or any other shared
@@ -183,6 +173,38 @@ app.get('/', function(req, res) {
183173
app.listen()
184174
```
185175

176+
#### Caching and Batching
177+
178+
Subsequent calls to `.load()` with the same key will result in that key not
179+
appearing in the keys provided to your batch function. *However*, the resulting
180+
Promise will still wait on the current batch to complete. This way both cached
181+
and uncached requests will resolve at the same time, allowing DataLoader
182+
optimizations for subsequent dependent loads.
183+
184+
In the example below, User `1` happens to be cached. However, because User `1`
185+
and `2` are loaded in the same tick, they will resolve at the same time. This
186+
means both `user.bestFriendID` loads will also happen in the same tick which
187+
results in two total requests (the same as if User `1` had not been cached).
188+
189+
```js
190+
userLoader.prime(1, { bestFriend: 3 })
191+
192+
async function getBestFriend(userID) {
193+
const user = await userLoader.load(userID)
194+
return await userLoader.load(user.bestFriendID)
195+
}
196+
197+
// In one part of your application
198+
getBestFriend(1)
199+
200+
// Elsewhere
201+
getBestFriend(2)
202+
```
203+
204+
Without this optimization, if the cached User `1` resolved immediately, this
205+
could result in three total requests since each `user.bestFriendID` load would
206+
happen at different times.
207+
186208
#### Clearing Cache
187209

188210
In certain uncommon cases, clearing the request cache may be necessary.

src/__tests__/dataloader.test.js

+83-31
Original file line numberDiff line numberDiff line change
@@ -105,14 +105,95 @@ describe('Primary API', () => {
105105
expect(loadCalls).toEqual([ [ 1, 2 ], [ 3 ] ]);
106106
});
107107

108+
it('batches cached requests', async () => {
109+
const loadCalls = [];
110+
let resolveBatch = () => {};
111+
const identityLoader = new DataLoader<number, number>(keys => {
112+
loadCalls.push(keys);
113+
return new Promise(resolve => {
114+
resolveBatch = () => resolve(keys);
115+
});
116+
});
117+
118+
identityLoader.prime(1, 1);
119+
120+
const promise1 = identityLoader.load(1);
121+
const promise2 = identityLoader.load(2);
122+
123+
// Track when each resolves.
124+
let promise1Resolved = false;
125+
let promise2Resolved = false;
126+
promise1.then(() => { promise1Resolved = true; });
127+
promise2.then(() => { promise2Resolved = true; });
128+
129+
// Move to next macro-task (tick)
130+
await new Promise(setImmediate);
131+
132+
expect(promise1Resolved).toBe(false);
133+
expect(promise2Resolved).toBe(false);
134+
135+
resolveBatch();
136+
// Move to next macro-task (tick)
137+
await new Promise(setImmediate);
138+
139+
expect(promise1Resolved).toBe(true);
140+
expect(promise2Resolved).toBe(true);
141+
142+
const [ value1, value2 ] = await Promise.all([ promise1, promise2 ]);
143+
expect(value1).toBe(1);
144+
expect(value2).toBe(2);
145+
146+
expect(loadCalls).toEqual([ [ 2 ] ]);
147+
});
148+
149+
it('max batch size respects cached results', async () => {
150+
const loadCalls = [];
151+
let resolveBatch = () => {};
152+
const identityLoader = new DataLoader<number, number>(keys => {
153+
loadCalls.push(keys);
154+
return new Promise(resolve => {
155+
resolveBatch = () => resolve(keys);
156+
});
157+
}, { maxBatchSize: 1 });
158+
159+
identityLoader.prime(1, 1);
160+
161+
const promise1 = identityLoader.load(1);
162+
const promise2 = identityLoader.load(2);
163+
164+
// Track when each resolves.
165+
let promise1Resolved = false;
166+
let promise2Resolved = false;
167+
promise1.then(() => { promise1Resolved = true; });
168+
promise2.then(() => { promise2Resolved = true; });
169+
170+
// Move to next macro-task (tick)
171+
await new Promise(setImmediate);
172+
173+
// Promise 1 resolves first since max batch size is 1
174+
expect(promise1Resolved).toBe(true);
175+
expect(promise2Resolved).toBe(false);
176+
177+
resolveBatch();
178+
// Move to next macro-task (tick)
179+
await new Promise(setImmediate);
180+
181+
expect(promise1Resolved).toBe(true);
182+
expect(promise2Resolved).toBe(true);
183+
184+
const [ value1, value2 ] = await Promise.all([ promise1, promise2 ]);
185+
expect(value1).toBe(1);
186+
expect(value2).toBe(2);
187+
188+
expect(loadCalls).toEqual([ [ 2 ] ]);
189+
});
190+
108191
it('coalesces identical requests', async () => {
109192
const [ identityLoader, loadCalls ] = idLoader<number>();
110193

111194
const promise1a = identityLoader.load(1);
112195
const promise1b = identityLoader.load(1);
113196

114-
expect(promise1a).toBe(promise1b);
115-
116197
const [ value1a, value1b ] = await Promise.all([ promise1a, promise1b ]);
117198
expect(value1a).toBe(1);
118199
expect(value1b).toBe(1);
@@ -388,35 +469,6 @@ describe('Represents Errors', () => {
388469
expect(loadCalls).toEqual([]);
389470
});
390471

391-
// TODO: #224
392-
/*
393-
it('Not catching a primed error is an unhandled rejection', async () => {
394-
let hadUnhandledRejection = false;
395-
function onUnhandledRejection() {
396-
hadUnhandledRejection = true;
397-
}
398-
process.on('unhandledRejection', onUnhandledRejection);
399-
try {
400-
const [ identityLoader ] = idLoader<number>();
401-
402-
identityLoader.prime(1, new Error('Error: 1'));
403-
404-
// Wait a bit.
405-
await new Promise(setImmediate);
406-
407-
// Ignore result.
408-
identityLoader.load(1);
409-
410-
// Wait a bit.
411-
await new Promise(setImmediate);
412-
413-
expect(hadUnhandledRejection).toBe(true);
414-
} finally {
415-
process.removeListener('unhandledRejection', onUnhandledRejection);
416-
}
417-
});
418-
*/
419-
420472
it('Can clear values from cache after errors', async () => {
421473
const loadCalls = [];
422474
const errorLoader = new DataLoader(keys => {

src/__tests__/unhandled.test.js

+31
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
/**
2+
* Copyright (c) 2019-present, GraphQL Foundation
3+
*
4+
* This source code is licensed under the MIT license found in the
5+
* LICENSE file in the root directory of this source tree.
6+
*
7+
* @flow
8+
*/
9+
10+
const DataLoader = require('..');
11+
12+
describe('Unhandled rejections', () => {
13+
it('Not catching a primed error is an unhandled rejection', async () => {
14+
let hadUnhandledRejection = false;
15+
// Override Jest's unhandled detection
16+
global.jasmine.process.removeAllListeners('unhandledRejection');
17+
global.jasmine.process.on('unhandledRejection', () => {
18+
hadUnhandledRejection = true;
19+
});
20+
21+
const identityLoader = new DataLoader<number, number>(async keys => keys);
22+
23+
identityLoader.prime(1, new Error('Error: 1'));
24+
25+
// Ignore result.
26+
identityLoader.load(1);
27+
28+
await new Promise(resolve => setTimeout(resolve, 10));
29+
expect(hadUnhandledRejection).toBe(true);
30+
});
31+
});

src/index.js

+30-3
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,10 @@ class DataLoader<K, V, C = K> {
8484
if (cache) {
8585
var cachedPromise = cache.get(cacheKey);
8686
if (cachedPromise) {
87-
return cachedPromise;
87+
var cacheHits = batch.cacheHits || (batch.cacheHits = []);
88+
return new Promise(resolve => {
89+
cacheHits.push(() => resolve(cachedPromise));
90+
});
8891
}
8992
}
9093

@@ -241,7 +244,8 @@ type Batch<K, V> = {
241244
callbacks: Array<{
242245
resolve: (value: V) => void;
243246
reject: (error: Error) => void;
244-
}>
247+
}>,
248+
cacheHits?: Array<() => void>
245249
}
246250

247251
// Private: Either returns the current batch, or creates and schedules a
@@ -258,7 +262,10 @@ function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
258262
if (
259263
existingBatch !== null &&
260264
!existingBatch.hasDispatched &&
261-
(maxBatchSize === 0 || existingBatch.keys.length < maxBatchSize)
265+
(maxBatchSize === 0 ||
266+
(existingBatch.keys.length < maxBatchSize &&
267+
(!existingBatch.cacheHits ||
268+
existingBatch.cacheHits.length < maxBatchSize)))
262269
) {
263270
return existingBatch;
264271
}
@@ -282,6 +289,12 @@ function dispatchBatch<K, V>(
282289
// Mark this batch as having been dispatched.
283290
batch.hasDispatched = true;
284291

292+
// If there's nothing to load, resolve any cache hits and return early.
293+
if (batch.keys.length === 0) {
294+
resolveCacheHits(batch);
295+
return;
296+
}
297+
285298
// Call the provided batchLoadFn for this loader with the batch's keys and
286299
// with the loader as the `this` context.
287300
var batchPromise = loader._batchLoadFn(batch.keys);
@@ -317,6 +330,9 @@ function dispatchBatch<K, V>(
317330
);
318331
}
319332

333+
// Resolve all cache hits in the same micro-task as freshly loaded values.
334+
resolveCacheHits(batch);
335+
320336
// Step through values, resolving or rejecting each Promise in the batch.
321337
for (var i = 0; i < batch.callbacks.length; i++) {
322338
var value = values[i];
@@ -336,12 +352,23 @@ function failedDispatch<K, V>(
336352
batch: Batch<K, V>,
337353
error: Error
338354
) {
355+
// Cache hits are resolved, even though the batch failed.
356+
resolveCacheHits(batch);
339357
for (var i = 0; i < batch.keys.length; i++) {
340358
loader.clear(batch.keys[i]);
341359
batch.callbacks[i].reject(error);
342360
}
343361
}
344362

363+
// Private: Resolves the Promises for any cache hits in this batch.
364+
function resolveCacheHits(batch: Batch<any, any>) {
365+
if (batch.cacheHits) {
366+
for (var i = 0; i < batch.cacheHits.length; i++) {
367+
batch.cacheHits[i]();
368+
}
369+
}
370+
}
371+
345372
// Private: produce a cache key for a given key (and options)
346373
function getCacheKey<K, V, C>(
347374
options: ?Options<K, V, C>,

0 commit comments

Comments
 (0)