Skip to content
This repository was archived by the owner on Mar 3, 2026. It is now read-only.

Commit 666402a

Browse files
authored
fix: Determine Content-Length Before Attempting Multi-chunk Upload (#2074)
* test: Add system-tests for resumable multi-chunk uploads * test: streamline multi-chunk upload system tests * chore: verify `Content-Range`: `bytes ${this.offset}-*/*` behavior * style: typo * style: typo * fix: determine content-length before attempting multi-chunk upload * test: Use bigger file for multi-chunk test since chunks need to be large * feat: dynamically determine Content-Length when unspecified * feat: dynamically determine Content-Length when unspecified * docs: clarifications, make it easier for the future team * docs: Another clarification
1 parent 4fb3e00 commit 666402a

3 files changed

Lines changed: 134 additions & 16 deletions

File tree

src/resumable-upload.ts

Lines changed: 37 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -665,7 +665,7 @@ export class Upload extends Writable {
665665

666666
let expectedUploadSize: number | undefined = undefined;
667667

668-
// Set `expectedUploadSize` to `contentLength` if available
668+
// Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available
669669
if (typeof this.contentLength === 'number') {
670670
expectedUploadSize = this.contentLength - this.numBytesWritten;
671671
}
@@ -718,13 +718,37 @@ export class Upload extends Writable {
718718
};
719719

720720
// If using multiple chunk upload, set appropriate header
721-
if (multiChunkMode && expectedUploadSize) {
722-
// The '-1' is because the ending byte is inclusive in the request.
723-
const endingByte = expectedUploadSize + this.numBytesWritten - 1;
724-
headers['Content-Length'] = expectedUploadSize;
721+
if (multiChunkMode) {
722+
// We need to know how much data is available upstream to set the `Content-Range` header.
723+
const oneChunkIterator = this.upstreamIterator(expectedUploadSize, true);
724+
const {value} = await oneChunkIterator.next();
725+
726+
const bytesToUpload = value!.chunk.byteLength;
727+
728+
// Important: we want to know if the upstream has ended and the queue is empty before
729+
// unshifting data back into the queue. This way we will know if this is the last request or not.
730+
const isLastChunkOfUpload = !(await this.waitForNextChunk());
731+
732+
// Important: put the data back in the queue for the actual upload iterator
733+
this.unshiftChunkBuffer(value!.chunk);
734+
735+
let totalObjectSize = this.contentLength;
736+
737+
if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) {
738+
// Let's let the server know this is the last chunk since
739+
// we didn't know the content-length beforehand.
740+
totalObjectSize = bytesToUpload + this.numBytesWritten;
741+
}
742+
743+
// `- 1` as the ending byte is inclusive in the request.
744+
const endingByte = bytesToUpload + this.numBytesWritten - 1;
745+
746+
// `Content-Length` for multiple chunk uploads is the size of the chunk,
747+
// not the overall object
748+
headers['Content-Length'] = bytesToUpload;
725749
headers[
726750
'Content-Range'
727-
] = `bytes ${this.offset}-${endingByte}/${this.contentLength}`;
751+
] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`;
728752
} else {
729753
headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`;
730754
}
@@ -798,11 +822,13 @@ export class Upload extends Writable {
798822
// continue uploading next chunk
799823
this.continueUploading();
800824
} else if (!this.isSuccessfulResponse(resp.status)) {
801-
const err: ApiError = {
802-
code: resp.status,
803-
name: 'Upload failed',
804-
message: 'Upload failed',
805-
};
825+
const err: ApiError = new Error('Upload failed');
826+
err.code = resp.status;
827+
err.name = 'Upload failed';
828+
if (resp?.data) {
829+
err.errors = [resp?.data];
830+
}
831+
806832
this.destroy(err);
807833
} else {
808834
// remove the last chunk sent to free memory

system-test/storage.ts

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ import {
3232
Notification,
3333
DeleteBucketCallback,
3434
CRC32C,
35+
UploadOptions,
3536
} from '../src';
3637
import * as nock from 'nock';
3738
import {Transform} from 'stream';
@@ -2688,6 +2689,87 @@ describe('storage', () => {
26882689
});
26892690
});
26902691

2692+
describe('resumable upload', () => {
2693+
describe('multi-chunk upload', () => {
2694+
describe('upload configurations', () => {
2695+
const filePath: string = FILES.big.path;
2696+
const fileSize = fs.statSync(filePath).size;
2697+
let crc32c: string;
2698+
2699+
before(async () => {
2700+
// get a CRC32C value from the file
2701+
crc32c = await new Promise((resolve, reject) => {
2702+
const crc32c = new CRC32C();
2703+
2704+
fs.createReadStream(filePath)
2705+
.on('data', (d: Buffer) => crc32c.update(d))
2706+
.on('end', () => resolve(crc32c.toString()))
2707+
.on('error', reject);
2708+
});
2709+
});
2710+
2711+
async function uploadAndVerify(
2712+
file: File,
2713+
options: Omit<UploadOptions, 'destination'>
2714+
) {
2715+
await bucket.upload(filePath, {
2716+
destination: file,
2717+
...options,
2718+
});
2719+
2720+
const [metadata] = await file.getMetadata();
2721+
2722+
// assert we uploaded the expected data
2723+
assert.equal(metadata.crc32c, crc32c);
2724+
}
2725+
2726+
it('should support uploads where `contentLength < chunkSize`', async () => {
2727+
const file = bucket.file(generateName());
2728+
2729+
const metadata = {contentLength: fileSize};
2730+
// off by +1 to ensure `contentLength < chunkSize`
2731+
const chunkSize = fileSize + 1;
2732+
2733+
await uploadAndVerify(file, {chunkSize, metadata});
2734+
});
2735+
2736+
it('should support uploads where `contentLength % chunkSize != 0`', async () => {
2737+
const file = bucket.file(generateName());
2738+
2739+
const metadata = {contentLength: fileSize};
2740+
// off by -1 to ensure `contentLength % chunkSize != 0`
2741+
const chunkSize = fileSize - 1;
2742+
2743+
await uploadAndVerify(file, {chunkSize, metadata});
2744+
});
2745+
2746+
it('should support uploads where `fileSize % chunkSize != 0` && `!contentLength`', async () => {
2747+
const file = bucket.file(generateName());
2748+
// off by +1 to ensure `fileSize % chunkSize != 0`
2749+
const chunkSize = fileSize + 1;
2750+
2751+
await uploadAndVerify(file, {chunkSize});
2752+
});
2753+
2754+
it('should support uploads where `fileSize < chunkSize && `!contentLength`', async () => {
2755+
const file = bucket.file(generateName());
2756+
// off by `* 2 +1` to ensure `fileSize < chunkSize`
2757+
const chunkSize = fileSize * 2 + 1;
2758+
2759+
await uploadAndVerify(file, {chunkSize});
2760+
});
2761+
2762+
it('should support uploads where `fileSize > chunkSize` && `!contentLength`', async () => {
2763+
const file = bucket.file(generateName());
2764+
// off by -1 to ensure `fileSize > chunkSize`
2765+
const chunkSize = fileSize - 1;
2766+
2767+
await uploadAndVerify(file, {chunkSize});
2768+
});
2769+
});
2770+
});
2771+
});
2772+
26912773
describe('bucket upload with progress', () => {
26922774
it('show bytes sent with resumable upload', async () => {
26932775
const fileSize = fs.statSync(FILES.big.path).size;

test/resumable-upload.ts

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1045,6 +1045,9 @@ describe('resumable-upload', () => {
10451045
});
10461046

10471047
describe('request preparation', () => {
1048+
// Simulating the amount of data written from upstream (exhaustive)
1049+
const UPSTREAM_BUFFER_SIZE = 512;
1050+
const UPSTREAM_ENDED = true;
10481051
// a convenient handle for getting the request options
10491052
let reqOpts: GaxiosOptions;
10501053

@@ -1074,8 +1077,8 @@ describe('resumable-upload', () => {
10741077

10751078
reqOpts = requestOptions;
10761079
};
1077-
up.upstreamChunkBuffer = Buffer.alloc(512);
1078-
up.upstreamEnded = true;
1080+
up.upstreamChunkBuffer = Buffer.alloc(UPSTREAM_BUFFER_SIZE);
1081+
up.upstreamEnded = UPSTREAM_ENDED;
10791082
});
10801083

10811084
describe('single chunk', () => {
@@ -1153,18 +1156,25 @@ describe('resumable-upload', () => {
11531156

11541157
it('should prepare a valid request if `contentLength` is unknown', async () => {
11551158
const OFFSET = 100;
1159+
const EXPECTED_STREAM_AMOUNT = Math.min(
1160+
UPSTREAM_BUFFER_SIZE - OFFSET,
1161+
CHUNK_SIZE
1162+
);
1163+
const ENDING_BYTE = EXPECTED_STREAM_AMOUNT + OFFSET - 1;
11561164

11571165
up.offset = OFFSET;
11581166
up.contentLength = '*';
11591167

11601168
await up.startUploading();
11611169

1162-
const endByte = OFFSET + CHUNK_SIZE - 1;
11631170
assert(reqOpts.headers);
1164-
assert.equal(reqOpts.headers['Content-Length'], CHUNK_SIZE);
1171+
assert.equal(
1172+
reqOpts.headers['Content-Length'],
1173+
EXPECTED_STREAM_AMOUNT
1174+
);
11651175
assert.equal(
11661176
reqOpts.headers['Content-Range'],
1167-
`bytes ${OFFSET}-${endByte}/*`
1177+
`bytes ${OFFSET}-${ENDING_BYTE}/*`
11681178
);
11691179
assert.ok(
11701180
X_GOOG_API_HEADER_REGEX.test(reqOpts.headers['x-goog-api-client'])

0 commit comments

Comments
 (0)