@@ -79,15 +79,15 @@ class DataLoader<K, V, C = K> {
79
79
) ;
80
80
}
81
81
82
- var batch = getCurrentBatch ( this ) ;
83
- var cacheMap = this . _cacheMap ;
84
- var cacheKey = this . _cacheKeyFn ( key ) ;
82
+ const batch = getCurrentBatch ( this ) ;
83
+ const cacheMap = this . _cacheMap ;
84
+ const cacheKey = this . _cacheKeyFn ( key ) ;
85
85
86
86
// If caching and there is a cache-hit, return cached Promise.
87
87
if ( cacheMap ) {
88
- var cachedPromise = cacheMap . get ( cacheKey ) ;
88
+ const cachedPromise = cacheMap . get ( cacheKey ) ;
89
89
if ( cachedPromise ) {
90
- var cacheHits = batch . cacheHits || ( batch . cacheHits = [ ] ) ;
90
+ const cacheHits = batch . cacheHits || ( batch . cacheHits = [ ] ) ;
91
91
return new Promise ( resolve => {
92
92
cacheHits . push ( ( ) => {
93
93
resolve ( cachedPromise ) ;
@@ -99,7 +99,7 @@ class DataLoader<K, V, C = K> {
99
99
// Otherwise, produce a new Promise for this key, and enqueue it to be
100
100
// dispatched along with the current batch.
101
101
batch . keys . push ( key ) ;
102
- var promise = new Promise ( ( resolve , reject ) => {
102
+ const promise = new Promise ( ( resolve , reject ) => {
103
103
batch . callbacks . push ( { resolve, reject } ) ;
104
104
} ) ;
105
105
@@ -151,9 +151,9 @@ class DataLoader<K, V, C = K> {
151
151
* method chaining.
152
152
*/
153
153
clear ( key : K ) : this {
154
- var cacheMap = this . _cacheMap ;
154
+ const cacheMap = this . _cacheMap ;
155
155
if ( cacheMap ) {
156
- var cacheKey = this . _cacheKeyFn ( key ) ;
156
+ const cacheKey = this . _cacheKeyFn ( key ) ;
157
157
cacheMap . delete ( cacheKey ) ;
158
158
}
159
159
return this ;
@@ -165,7 +165,7 @@ class DataLoader<K, V, C = K> {
165
165
* method chaining.
166
166
*/
167
167
clearAll ( ) : this {
168
- var cacheMap = this . _cacheMap ;
168
+ const cacheMap = this . _cacheMap ;
169
169
if ( cacheMap ) {
170
170
cacheMap . clear ( ) ;
171
171
}
@@ -179,15 +179,15 @@ class DataLoader<K, V, C = K> {
179
179
* To prime the cache with an error at a key, provide an Error instance.
180
180
*/
181
181
prime ( key : K , value : V | Promise < V > | Error ) : this {
182
- var cacheMap = this . _cacheMap ;
182
+ const cacheMap = this . _cacheMap ;
183
183
if ( cacheMap ) {
184
- var cacheKey = this . _cacheKeyFn ( key ) ;
184
+ const cacheKey = this . _cacheKeyFn ( key ) ;
185
185
186
186
// Only add the key if it does not already exist.
187
187
if ( cacheMap . get ( cacheKey ) === undefined ) {
188
188
// Cache a rejected promise if the value is an Error, in order to match
189
189
// the behavior of load(key).
190
- var promise ;
190
+ let promise ;
191
191
if ( value instanceof Error ) {
192
192
promise = Promise . reject ( value ) ;
193
193
// Since this is a case where an Error is intentionally being primed
@@ -236,7 +236,7 @@ class DataLoader<K, V, C = K> {
236
236
// for enqueuing a job to be performed after promise microtasks and before the
237
237
// next macrotask. For browser environments, a macrotask is used (via
238
238
// setImmediate or setTimeout) at a potential performance penalty.
239
- var enqueuePostPromiseJob =
239
+ const enqueuePostPromiseJob =
240
240
typeof process === 'object' && typeof process . nextTick === 'function'
241
241
? function ( fn ) {
242
242
if ( ! resolvedPromise ) {
@@ -255,7 +255,7 @@ var enqueuePostPromiseJob =
255
255
} ;
256
256
257
257
// Private: cached resolved Promise instance
258
- var resolvedPromise ;
258
+ let resolvedPromise ;
259
259
260
260
// Private: Describes a batch of requests
261
261
type Batch < K , V > = {
@@ -273,7 +273,7 @@ type Batch<K, V> = {
273
273
function getCurrentBatch < K , V > ( loader : DataLoader < K , V , any > ) : Batch < K , V > {
274
274
// If there is an existing batch which has not yet dispatched and is within
275
275
// the limit of the batch size, then return it.
276
- var existingBatch = loader . _batch ;
276
+ const existingBatch = loader . _batch ;
277
277
if (
278
278
existingBatch !== null &&
279
279
! existingBatch . hasDispatched &&
@@ -283,7 +283,7 @@ function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
283
283
}
284
284
285
285
// Otherwise, create a new batch for this loader.
286
- var newBatch = { hasDispatched : false , keys : [ ] , callbacks : [ ] } ;
286
+ const newBatch = { hasDispatched : false , keys : [ ] , callbacks : [ ] } ;
287
287
288
288
// Store it on the loader so it may be reused.
289
289
loader . _batch = newBatch ;
@@ -311,7 +311,7 @@ function dispatchBatch<K, V>(
311
311
312
312
// Call the provided batchLoadFn for this loader with the batch's keys and
313
313
// with the loader as the `this` context.
314
- var batchPromise;
314
+ let batchPromise;
315
315
try {
316
316
batchPromise = loader . _batchLoadFn ( batch . keys ) ;
317
317
} catch (e) {
@@ -365,8 +365,8 @@ function dispatchBatch<K, V>(
365
365
resolveCacheHits(batch);
366
366
367
367
// Step through values, resolving or rejecting each Promise in the batch.
368
- for (var i = 0; i < batch . callbacks . length ; i ++ ) {
369
- var value = values [ i ] ;
368
+ for (let i = 0; i < batch . callbacks . length ; i ++ ) {
369
+ const value = values [ i ] ;
370
370
if ( value instanceof Error ) {
371
371
batch . callbacks [ i ] . reject ( value ) ;
372
372
} else {
@@ -388,7 +388,7 @@ function failedDispatch<K, V>(
388
388
) {
389
389
// Cache hits are resolved, even though the batch failed.
390
390
resolveCacheHits ( batch ) ;
391
- for ( var i = 0 ; i < batch . keys . length ; i ++ ) {
391
+ for ( let i = 0 ; i < batch . keys . length ; i ++ ) {
392
392
loader . clear ( batch . keys [ i ] ) ;
393
393
batch . callbacks [ i ] . reject ( error ) ;
394
394
}
@@ -397,19 +397,19 @@ function failedDispatch<K, V>(
397
397
// Private: Resolves the Promises for any cache hits in this batch.
398
398
function resolveCacheHits ( batch : Batch < any , any > ) {
399
399
if ( batch . cacheHits ) {
400
- for ( var i = 0 ; i < batch . cacheHits . length ; i ++ ) {
400
+ for ( let i = 0 ; i < batch . cacheHits . length ; i ++ ) {
401
401
batch . cacheHits [ i ] ( ) ;
402
402
}
403
403
}
404
404
}
405
405
406
406
// Private: given the DataLoader's options, produce a valid max batch size.
407
407
function getValidMaxBatchSize ( options : ?Options < any , any , any > ): number {
408
- var shouldBatch = ! options || options . batch !== false ;
408
+ const shouldBatch = ! options || options . batch !== false ;
409
409
if ( ! shouldBatch ) {
410
410
return 1 ;
411
411
}
412
- var maxBatchSize = options && options . maxBatchSize ;
412
+ const maxBatchSize = options && options . maxBatchSize ;
413
413
if ( maxBatchSize === undefined ) {
414
414
return Infinity ;
415
415
}
@@ -425,7 +425,7 @@ function getValidMaxBatchSize(options: ?Options<any, any, any>): number {
425
425
function getValidBatchScheduleFn(
426
426
options: ?Options< any , any , any > ,
427
427
): (() => void ) => void {
428
- var batchScheduleFn = options && options . batchScheduleFn ;
428
+ const batchScheduleFn = options && options . batchScheduleFn ;
429
429
if ( batchScheduleFn === undefined ) {
430
430
return enqueuePostPromiseJob ;
431
431
}
@@ -439,7 +439,7 @@ function getValidBatchScheduleFn(
439
439
440
440
// Private: given the DataLoader's options, produce a cache key function.
441
441
function getValidCacheKeyFn< K , C > (options: ?Options< K , any , C > ): K => C {
442
- var cacheKeyFn = options && options . cacheKeyFn ;
442
+ const cacheKeyFn = options && options . cacheKeyFn ;
443
443
if ( cacheKeyFn === undefined ) {
444
444
return ( key => key : any ) ;
445
445
}
@@ -453,17 +453,17 @@ function getValidCacheKeyFn<K, C>(options: ?Options<K, any, C>): K => C {
453
453
function getValidCacheMap < K , V , C > (
454
454
options : ?Options < K , V , C > ,
455
455
) : CacheMap < C , Promise < V >> | null {
456
- var shouldCache = ! options || options . cache !== false ;
456
+ const shouldCache = ! options || options . cache !== false ;
457
457
if ( ! shouldCache ) {
458
458
return null ;
459
459
}
460
- var cacheMap = options && options . cacheMap ;
460
+ const cacheMap = options && options . cacheMap ;
461
461
if ( cacheMap === undefined ) {
462
462
return new Map ( ) ;
463
463
}
464
464
if ( cacheMap !== null ) {
465
- var cacheFunctions = [ 'get' , 'set' , 'delete' , 'clear' ] ;
466
- var missingFunctions = cacheFunctions . filter (
465
+ const cacheFunctions = [ 'get' , 'set' , 'delete' , 'clear' ] ;
466
+ const missingFunctions = cacheFunctions . filter (
467
467
fnName => cacheMap && typeof cacheMap [ fnName ] !== 'function' ,
468
468
) ;
469
469
if ( missingFunctions . length !== 0 ) {
0 commit comments