@@ -120,6 +120,21 @@ describe('Primary API', () => {
120
120
expect ( loadCalls ) . toEqual ( [ [ 1 , 2 ] , [ 3 ] ] ) ;
121
121
} ) ;
122
122
123
+ it ( 'applies maxBatchSize correctly with duplicate keys' , async ( ) => {
124
+ const [ identityLoader , loadCalls ] = idLoader < string > ( {
125
+ maxBatchSize : 3 ,
126
+ batchScheduleFn : callback => { setTimeout ( callback , 100 ) ; } ,
127
+ } ) ;
128
+
129
+ const values = [ 'a' , 'b' , 'a' , 'a' , 'a' , 'b' , 'c' ] ;
130
+ const results = await Promise . all ( values . map (
131
+ value => identityLoader . load ( value )
132
+ ) ) ;
133
+
134
+ expect ( results ) . toEqual ( values ) ;
135
+ expect ( loadCalls ) . toEqual ( [ [ 'a' , 'b' , 'c' ] ] ) ;
136
+ } ) ;
137
+
123
138
it ( 'batches cached requests' , async ( ) => {
124
139
const loadCalls = [ ] ;
125
140
let resolveBatch = ( ) => { } ;
@@ -185,8 +200,9 @@ describe('Primary API', () => {
185
200
// Move to next macro-task (tick)
186
201
await new Promise ( setImmediate ) ;
187
202
188
- // Promise 1 resolves first since max batch size is 1
189
- expect ( promise1Resolved ) . toBe ( true ) ;
203
+ // Promise 1 resolves first since max batch size is 1,
204
+ // but it still hasn't resolved yet.
205
+ expect ( promise1Resolved ) . toBe ( false ) ;
190
206
expect ( promise2Resolved ) . toBe ( false ) ;
191
207
192
208
resolveBatch ( ) ;
0 commit comments