Skip to content

Commit 6592183

Browse files
authored
Support custom schedulers (#228)
1 parent 3b2192c commit 6592183

File tree

5 files changed

+136
-1
lines changed

5 files changed

+136
-1
lines changed

README.md

+54
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,58 @@ with the original keys `[ 2, 9, 6, 1 ]`:
133133
]
134134
```
135135

136+
#### Batch Scheduling
137+
138+
By default DataLoader will coalesce all individual loads which occur within a
139+
single frame of execution before calling your batch function with all requested
140+
keys. This ensures no additional latency while capturing many related requests
141+
into a single batch. In fact, this is the same behavior used in Facebook's
142+
original PHP implementation in 2010. See `enqueuePostPromiseJob` in the
143+
[source code][] for more details about how this works.
144+
145+
However sometimes this behavior is not desirable or optimal. Perhaps you expect
146+
requests to be spread out over a few subsequent ticks because of an existing use
147+
of `setTimeout`, or you just want manual control over dispatching regardless of
148+
the run loop. DataLoader allows providing a custom batch scheduler to provide
149+
these or any other behaviors.
150+
151+
A custom scheduler is provided as `batchScheduleFn` in options. It must be a
152+
function which is passed a callback and is expected to call that callback in the
153+
immediate future to execute the batch request.
154+
155+
As an example, here is a batch scheduler which collects all requests over a
156+
100ms window of time (and as a consequence, adds 100ms of latency):
157+
158+
```js
159+
const myLoader = new DataLoader(myBatchFn, {
160+
batchScheduleFn: callback => setTimeout(callback, 100)
161+
})
162+
```
163+
164+
As another example, here is a manually dispatched batch scheduler:
165+
166+
```js
167+
function createScheduler() {
168+
let callbacks = []
169+
return {
170+
schedule(callback) {
171+
callbacks.push(callback)
172+
},
173+
dispatch() {
174+
callbacks.forEach(callback => callback())
175+
callbacks = []
176+
}
177+
}
178+
}
179+
180+
const { schedule, dispatch } = createScheduler()
181+
const myLoader = new DataLoader(myBatchFn, { batchScheduleFn: schedule })
182+
183+
myLoader.load(1)
184+
myLoader.load(2)
185+
dispatch()
186+
```
187+
136188

137189
## Caching
138190

@@ -345,6 +397,7 @@ Create a new `DataLoader` given a batch loading function and options.
345397
| ---------- | ---- | ------- | ----------- |
346398
| *batch* | Boolean | `true` | Set to `false` to disable batching, invoking `batchLoadFn` with a single load key. This is equivalent to setting `maxBatchSize` to `1`.
347399
| *maxBatchSize* | Number | `Infinity` | Limits the number of items that get passed in to the `batchLoadFn`. May be set to `1` to disable batching.
400+
| *batchScheduleFn* | Function | See [Batch scheduling](#batch-scheduling) | A function to schedule the later execution of a batch. The function is expected to call the provided callback in the immediate future.
348401
| *cache* | Boolean | `true` | Set to `false` to disable memoization caching, creating a new Promise and new key in the `batchLoadFn` for every load of the same key. This is equivalent to setting `cacheMap` to `null`.
349402
| *cacheKeyFn* | Function | `key => key` | Produces cache key for a given load key. Useful when objects are keys and two objects should be considered equivalent.
350403
| *cacheMap* | Object | `new Map()` | Instance of [Map][] (or an object with a similar API) to be used as cache. May be set to `null` to disable caching.
@@ -603,3 +656,4 @@ DataLoader and how it works.
603656
[express]: http://expressjs.com/
604657
[babel/polyfill]: https://babeljs.io/docs/usage/polyfill/
605658
[lru_map]: https://github.com/rsms/js-lru
659+
[source code]: https://github.com/graphql/dataloader/blob/master/src/index.js

src/__tests__/abuse.test.js

+7
Original file line numberDiff line numberDiff line change
@@ -190,4 +190,11 @@ describe('Provides descriptive error messages for API abuse', () => {
190190
new DataLoader(async keys => keys, { cacheKeyFn: null })
191191
).toThrow('cacheKeyFn must be a function: null');
192192
});
193+
194+
it('Requires a function for batchScheduleFn', () => {
195+
expect(() =>
196+
// $FlowExpectError
197+
new DataLoader(async keys => keys, { batchScheduleFn: null })
198+
).toThrow('batchScheduleFn must be a function: null');
199+
});
193200
});

src/__tests__/dataloader.test.js

+48
Original file line numberDiff line numberDiff line change
@@ -887,6 +887,54 @@ describe('Accepts options', () => {
887887

888888
});
889889

890+
describe('It allows custom schedulers', () => {
891+
892+
it('Supports manual dispatch', () => {
893+
function createScheduler() {
894+
let callbacks = [];
895+
return {
896+
schedule(callback) {
897+
callbacks.push(callback);
898+
},
899+
dispatch() {
900+
callbacks.forEach(callback => callback());
901+
callbacks = [];
902+
}
903+
};
904+
}
905+
906+
const { schedule, dispatch } = createScheduler();
907+
const [ identityLoader, loadCalls ] = idLoader<string>({
908+
batchScheduleFn: schedule
909+
});
910+
911+
identityLoader.load('A');
912+
identityLoader.load('B');
913+
dispatch();
914+
identityLoader.load('A');
915+
identityLoader.load('C');
916+
dispatch();
917+
// Note: never dispatched!
918+
identityLoader.load('D');
919+
920+
expect(loadCalls).toEqual([ [ 'A', 'B' ], [ 'C' ] ]);
921+
});
922+
923+
it('Custom batch scheduler is provided loader as this context', () => {
924+
let that;
925+
function batchScheduleFn(callback) {
926+
that = this;
927+
callback();
928+
}
929+
930+
const [ identityLoader ] = idLoader<string>({ batchScheduleFn });
931+
932+
identityLoader.load('A');
933+
expect(that).toBe(identityLoader);
934+
});
935+
936+
});
937+
890938
describe('It is resilient to job queue ordering', () => {
891939

892940
it('batches loads occuring within promises', async () => {

src/index.d.ts

+7
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,13 @@ declare namespace DataLoader {
9090
*/
9191
maxBatchSize?: number;
9292

93+
/**
94+
* Default see https://github.com/graphql/dataloader#batch-scheduling.
95+
* A function to schedule the later execution of a batch. The function is
96+
* expected to call the provided callback in the immediate future.
97+
*/
98+
batchScheduleFn?: (callback: () => void) => void;
99+
93100
/**
94101
* Default `true`. Set to `false` to disable memoization caching, creating a
95102
* new Promise and new key in the `batchLoadFn` for every load of the same

src/index.js

+20-1
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ export type BatchLoadFn<K, V> =
1717
export type Options<K, V, C = K> = {
1818
batch?: boolean;
1919
maxBatchSize?: number;
20+
batchScheduleFn?: (callback: () => void) => void;
2021
cache?: boolean;
2122
cacheKeyFn?: (key: K) => C;
2223
cacheMap?: CacheMap<C, Promise<V>> | null;
@@ -53,6 +54,7 @@ class DataLoader<K, V, C = K> {
5354
}
5455
this._batchLoadFn = batchLoadFn;
5556
this._maxBatchSize = getValidMaxBatchSize(options);
57+
this._batchScheduleFn = getValidBatchScheduleFn(options);
5658
this._cacheKeyFn = getValidCacheKeyFn(options);
5759
this._cacheMap = getValidCacheMap(options);
5860
this._batch = null;
@@ -61,6 +63,7 @@ class DataLoader<K, V, C = K> {
6163
// Private
6264
_batchLoadFn: BatchLoadFn<K, V>;
6365
_maxBatchSize: number;
66+
_batchScheduleFn: (() => void) => void;
6467
_cacheKeyFn: K => C;
6568
_cacheMap: CacheMap<C, Promise<V>> | null;
6669
_batch: Batch<K, V> | null;
@@ -271,7 +274,7 @@ function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
271274
loader._batch = newBatch;
272275

273276
// Then schedule a task to dispatch this batch of requests.
274-
enqueuePostPromiseJob(() => dispatchBatch(loader, newBatch));
277+
loader._batchScheduleFn(() => dispatchBatch(loader, newBatch));
275278

276279
return newBatch;
277280
}
@@ -381,6 +384,22 @@ function getValidMaxBatchSize(options: ?Options<any, any, any>): number {
381384
return maxBatchSize;
382385
}
383386

387+
// Private
388+
function getValidBatchScheduleFn(
389+
options: ?Options<any, any, any>
390+
): (() => void) => void {
391+
var batchScheduleFn = options && options.batchScheduleFn;
392+
if (batchScheduleFn === undefined) {
393+
return enqueuePostPromiseJob;
394+
}
395+
if (typeof batchScheduleFn !== 'function') {
396+
throw new TypeError(
397+
`batchScheduleFn must be a function: ${(batchScheduleFn: any)}`
398+
);
399+
}
400+
return batchScheduleFn;
401+
}
402+
384403
// Private: given the DataLoader's options, produce a cache key function.
385404
function getValidCacheKeyFn<K, C>(options: ?Options<K, any, C>): (K => C) {
386405
var cacheKeyFn = options && options.cacheKeyFn;

0 commit comments

Comments
 (0)