Skip to content
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions source/core/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,22 @@ export const responseTypes = {
// The maximum value of a 32bit int (see issue #117)
export const maxSafeTimeout = 2_147_483_647;

// Size in bytes of a typical form boundary, used to help estimate upload size
export const usualFormBoundarySize = new TextEncoder().encode('------WebKitFormBoundaryaxpyiPgbbPti10Rw').length;
// Used to help estimate upload size
async function getFormBodySize(formData: FormData) {
const responseText = await new Response(formData).text();

return responseText.length;
}

const formData = new FormData();

export const initialFormSize = await getFormBodySize(formData);

formData.append('', '');

const largerFormSize = await getFormBodySize(formData);

export const formBoundarySize = largerFormSize - initialFormSize;

export const stop = Symbol('stop');

Expand Down
44 changes: 18 additions & 26 deletions source/utils/body.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import type {Options} from '../types/options.js';
import {usualFormBoundarySize} from '../core/constants.js';
import {initialFormSize, formBoundarySize} from '../core/constants.js';

const encoder = new TextEncoder();

// eslint-disable-next-line @typescript-eslint/ban-types
export const getBodySize = (body?: BodyInit | null): number => {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could we simplify this whole function by using a temporary response, like you did for forms?

const buffer = await new Response(body).arrayBuffer();
return buffer.byteLength;

Copy link
Contributor Author

@Richienb Richienb Sep 12, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using Response to get the size consumes the entire body, as opposed to using size/length properties which we currently do when possible.
We don't want ky to consume the entire request body twice, as opposed to the underlying fetch API that doesn't do that. If we had to, and this might not be so bad, is we might as well pass our constructed buffer right on through to the fetch request.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You're right that we don't want to consume the stream if the body is a stream. But we can't get totalBytes for a stream anyway. So I was thinking if the body is a stream, short-circuit with a size of 0.

Copy link
Contributor Author

@Richienb Richienb Sep 12, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, that's what we already do - ReadableStreams pass right through and return 0.
What I'm referring to is ArrayBuffers, or Files/Blobs in FormData, which contain a size property, and are also iterable. The latter is done by new Response, the former is what we do.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I know that we already skip size checks for streams. And you have maintained that with these changes, which is great. But that's why your reply about consuming the body is confusing to me. All non-stream body types are simple objects that are not permanently consumed the way streams are.

If you mean that you want to avoid extra iterations on the body, as in O(n) vs O(n*2) performance, I think that's reasonable. But our main problem at the moment is correctness, not performance, and the network is going to be the limiting factor for performance. IMO, if we can leverage Response to calculate the size, then we should.

There are also performance optimizations we could make, like only using Response to calculate the size for complex types like FormData. We could also re-use the ArrayBuffer that it generates in the process.

Expand All @@ -8,14 +10,19 @@ export const getBodySize = (body?: BodyInit | null): number => {
}

if (body instanceof FormData) {
// This is an approximation, as FormData size calculation is not straightforward
let size = 0;
let size = initialFormSize;

for (const [key, value] of body) {
size += usualFormBoundarySize;
size += new TextEncoder().encode(`Content-Disposition: form-data; name="${key}"`).length;
size += formBoundarySize;
size += encoder.encode(key).length;

if (value instanceof Blob) {
size += encoder.encode(`; filename="${value.name ?? 'blob'}"`).length;
size += encoder.encode(`\r\nContent-Type: ${value.type || 'application/octet-stream'}`).length;
}

size += typeof value === 'string'
? new TextEncoder().encode(value).length
? encoder.encode(value).length
: value.size;
}

Expand All @@ -26,29 +33,16 @@ export const getBodySize = (body?: BodyInit | null): number => {
return body.size;
}

if (body instanceof ArrayBuffer) {
if (body instanceof ArrayBuffer || ArrayBuffer.isView(body)) {
return body.byteLength;
}

if (typeof body === 'string') {
return new TextEncoder().encode(body).length;
return encoder.encode(body).length;
}

if (body instanceof URLSearchParams) {
return new TextEncoder().encode(body.toString()).length;
}

if ('byteLength' in body) {
return (body).byteLength;
}

if (typeof body === 'object' && body !== null) {
try {
const jsonString = JSON.stringify(body);
return new TextEncoder().encode(jsonString).length;
} catch {
return 0;
}
return encoder.encode(body.toString()).length;
}

return 0; // Default case, unable to determine size
Expand Down Expand Up @@ -127,6 +121,7 @@ export const streamRequest = (request: Request, onUploadProgress: Options['onUpl
const {done, value} = await reader.read();
if (done) {
// Ensure 100% progress is reported when the upload is complete
// TODO: Don't report duplicate completion events
if (onUploadProgress) {
onUploadProgress({percent: 1, transferredBytes, totalBytes: Math.max(totalBytes, transferredBytes)}, new Uint8Array());
}
Expand All @@ -136,10 +131,7 @@ export const streamRequest = (request: Request, onUploadProgress: Options['onUpl
}

transferredBytes += value.byteLength;
let percent = totalBytes === 0 ? 0 : transferredBytes / totalBytes;
if (totalBytes < transferredBytes || percent === 1) {
percent = 0.99;
}
const percent = totalBytes === 0 ? 0 : transferredBytes / totalBytes;

if (onUploadProgress) {
onUploadProgress({percent: Number(percent.toFixed(2)), transferredBytes, totalBytes}, value);
Expand Down
146 changes: 146 additions & 0 deletions test/body-size.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
import test, {type ExecutionContext} from 'ava';
import {getBodySize} from '../source/utils/body.js';
import {createLargeBlob} from './helpers/create-large-file.js';

async function testBodySize(t: ExecutionContext, body: unknown) {
const actualSize = getBodySize(body);
const expectedText = await new Response(body).text();
Copy link
Collaborator

@sholladay sholladay Sep 11, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These tests are mostly just mimicking the current implementation to test the internal utilities. I would prefer that they use a server and Ky itself, as most of the other tests do, to echo back the de-serialized body length that it receives and maybe also the Content-Length, for example, and then run assertions for different body types based on that and the totalBytes parameter. Thus making the assertions more realistic and less coupled to the implementation.

const expectedSize = expectedText.length;

t.is(actualSize, expectedSize, `\`${expectedText}\` predicted body size (${actualSize}) not actual size ${expectedSize}`);
}

const encoder = new TextEncoder();
const encoded = encoder.encode('unicorn');
const encoded2 = encoder.encode('abcd');
const encoded4 = encoder.encode('abcdefgh');
const encoded8 = encoder.encode('abcdefghabcdefgh');

// Test all supported body types (https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch#setting_a_body)
test('string', async t => {
await testBodySize(t, 'unicorn');
});

test('ArrayBuffer', async t => {
await testBodySize(t, encoded.buffer);
});

test('TypedArray', async t => {
await testBodySize(t, encoded);
await testBodySize(t, new Uint8Array(encoded));
await testBodySize(t, new Uint8ClampedArray(encoded));
await testBodySize(t, new Int8Array(encoded));

await testBodySize(t, new Uint16Array(encoded2.buffer));
await testBodySize(t, new Int16Array(encoded2.buffer));

await testBodySize(t, new Uint32Array(encoded4.buffer));
await testBodySize(t, new Int32Array(encoded4.buffer));
await testBodySize(t, new Float32Array(encoded4.buffer));

await testBodySize(t, new Float64Array(encoded8.buffer));

await testBodySize(t, new BigInt64Array(encoded8.buffer));
await testBodySize(t, new BigUint64Array(encoded8.buffer));
});

test('DataView', async t => {
await testBodySize(t, new DataView(encoded.buffer));
});

test('Blob', async t => {
// Test with different combinations of parameters, file type, content type, filename, etc.
await testBodySize(t, new Blob(['unicorn'], {type: 'text/plain'}));
await testBodySize(t, new Blob(['unicorn'], {type: 'customtype'}));
await testBodySize(t, new Blob(['unicorn']));
});

test('File', async t => {
await testBodySize(t, new File(['unicorn'], 'unicorn.txt', {type: 'text/plain'}));
await testBodySize(t, new File(['unicorn'], 'unicorn.txt'));
});

test('URLSearchParams', async t => {
await testBodySize(t, new URLSearchParams({foo: 'bar', baz: 'qux'}));
});

test('FormData - string', async t => {
const formData = new FormData();
formData.append('field', 'value');
await testBodySize(t, formData);
});

test('FormData - multiple strings', async t => {
const formData = new FormData();
formData.append('field1', 'value1');
formData.append('field2', 'value2');
await testBodySize(t, formData);
});

test('FormData - blob', async t => {
const formData = new FormData();
formData.append('file', new Blob(['test content']));
await testBodySize(t, formData);
});

test('FormData - blob with filename', async t => {
const formData = new FormData();
formData.append('file', new Blob(['test content']), 'test.txt');
await testBodySize(t, formData);
});

test('FormData - multiple fields', async t => {
const formData = new FormData();
formData.append('file', new Blob(['test content']), 'test.txt');
formData.append('field1', 'value1');
formData.append('field2', 'value2');
await testBodySize(t, formData);
});

test('FormData - blob from buffer', async t => {
const formData = new FormData();
formData.append('file', new Blob([encoded]), 'test.txt');
await testBodySize(t, formData);
});

test('FormData - blob with content type', async t => {
const formData = new FormData();
formData.append('file', new Blob(['test content'], {type: 'text/plain'}), 'test.txt');
await testBodySize(t, formData);
});

test('FormData - multiple blobs', async t => {
const formData = new FormData();
formData.append('file1', new Blob(['file content 1'], {type: 'text/plain'}), 'file1.txt');
formData.append('file2', new Blob(['file content 2'], {type: 'text/plain'}), 'file2.txt');
await testBodySize(t, formData);
});

test('FormData - file', async t => {
const formData = new FormData();
formData.append('file', new File(['test content'], 'test.txt', {type: 'text/plain'}));
await testBodySize(t, formData);
});

test('FormData - large blob', async t => {
const largeBlob = createLargeBlob(10); // 10MB Blob
const formData = new FormData();
formData.append('file', largeBlob, 'large-file.bin');
await testBodySize(t, formData);
});

test.failing('ReadableStream', async t => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(encoder.encode('unicorn'));
controller.close();
},
});

await testBodySize(t, stream);
});

test('null and undefined (no body)', async t => {
await testBodySize(t, null);
await testBodySize(t, undefined);
});
6 changes: 3 additions & 3 deletions test/stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ test('POST JSON with upload progress', async t => {

// Check the first progress update
t.true(
data[0].percent >= 0 && data[0].percent < 1,
data[0].percent >= 0 && data[0].percent <= 1,
'First update should have progress between 0 and 100%',
);
t.true(
Expand Down Expand Up @@ -118,11 +118,11 @@ test('POST FormData with 10MB file upload progress', async t => {
for (let i = 1; i < data.length - 1; i++) {
t.true(
data[i].percent >= data[i - 1].percent,
`Update ${i} should have higher or equal percent than previous`,
`Update ${i} should have higher or equal percent than previous (${data[i].percent} < ${data[i - 1].percent})`,
);
t.true(
data[i].transferredBytes >= data[i - 1].transferredBytes,
`Update ${i} should have more or equal transferred bytes than previous`,
`Update ${i} should have more or equal transferred bytes than previous (${data[i].transferredBytes} < ${data[i - 1].transferredBytes})`,
);
}

Expand Down