Skip to content

Commit c8e78bd

Browse files
authored
@tus/s3-store: fix unhandled promise rejection (#728)
1 parent 274a0d1 commit c8e78bd

File tree

4 files changed

+52
-4
lines changed

4 files changed

+52
-4
lines changed

.changeset/twenty-crews-punch.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@tus/s3-store": patch
3+
---
4+
5+
Fix unhandled promise rejection when uploading a part fails, in which case we returned too early, leaving other parts running in the background.

packages/s3-store/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
],
1717
"scripts": {
1818
"build": "tsc --build",
19+
"pretest": "tsc --build",
1920
"test": "mocha --timeout 40000 --exit --extension ts --require ts-node/register"
2021
},
2122
"dependencies": {

packages/s3-store/src/index.ts

+12-2
Original file line numberDiff line numberDiff line change
@@ -409,19 +409,25 @@ export class S3Store extends DataStore {
409409
bytesUploaded += partSize
410410
resolve()
411411
} catch (error) {
412+
// If there's an error, destroy the splitter to stop processing more chunks
413+
splitterStream.destroy(error)
412414
reject(error)
413415
} finally {
414416
fsProm.rm(path).catch(() => {
415417
/* ignore */
416418
})
417-
acquiredPermit?.release()
419+
acquiredPermit?.release().catch(() => {
420+
/* ignore */
421+
})
418422
}
419423
})
420424

421425
promises.push(deferred)
422426
})
423427
.on('chunkError', () => {
424-
permit?.release()
428+
permit?.release().catch(() => {
429+
/* ignore */
430+
})
425431
})
426432

427433
try {
@@ -437,6 +443,10 @@ export class S3Store extends DataStore {
437443

438444
promises.push(Promise.reject(error))
439445
} finally {
446+
// Wait for all promises. We don't want to return
447+
// early have have promises running in the background.
448+
await Promise.allSettled(promises)
449+
// But we do want to reject the promise if any of the promises reject.
440450
await Promise.all(promises)
441451
}
442452

packages/s3-store/test/index.ts

+34-2
Original file line numberDiff line numberDiff line change
@@ -278,14 +278,14 @@ describe('S3DataStore', () => {
278278
}
279279
})
280280

281-
it('should use default maxMultipartParts when not specified', function () {
281+
it('should use default maxMultipartParts when not specified', () => {
282282
const store = new S3Store({
283283
s3ClientConfig,
284284
})
285285
assert.equal(store.maxMultipartParts, 10000)
286286
})
287287

288-
it('should use custom maxMultipartParts when specified', function () {
288+
it('should use custom maxMultipartParts when specified', () => {
289289
const customMaxParts = 5000
290290
const store = new S3Store({
291291
s3ClientConfig,
@@ -294,6 +294,38 @@ describe('S3DataStore', () => {
294294
assert.equal(store.maxMultipartParts, customMaxParts)
295295
})
296296

297+
it('should handle multiple concurrent part uploads with some failures', async () => {
298+
const store = new S3Store({
299+
partSize: 5 * 1024 * 1024,
300+
maxConcurrentPartUploads: 3, // Allow 3 concurrent uploads
301+
s3ClientConfig,
302+
})
303+
304+
const size = 15 * 1024 * 1024 // 15MB will be split into 3 parts
305+
const upload = new Upload({
306+
id: shared.testId('concurrent-failures'),
307+
size,
308+
offset: 0,
309+
})
310+
311+
// @ts-expect-error private method
312+
const uploadPartStub = sinon.stub(store, 'uploadPart')
313+
314+
// Make the second part upload fail
315+
uploadPartStub.onCall(0).resolves('etag1')
316+
uploadPartStub.onCall(1).rejects(new Error('Part 2 upload failed'))
317+
uploadPartStub.onCall(2).resolves('etag3')
318+
319+
await store.create(upload)
320+
321+
try {
322+
await store.write(Readable.from(Buffer.alloc(size)), upload.id, upload.offset)
323+
assert.fail('Expected write to fail but it succeeded')
324+
} catch (error) {
325+
assert.equal(uploadPartStub.callCount, 2, '2 parts should have been attempted')
326+
}
327+
})
328+
297329
shared.shouldHaveStoreMethods()
298330
shared.shouldCreateUploads()
299331
shared.shouldRemoveUploads() // Termination extension

0 commit comments

Comments
 (0)