Skip to content
This repository was archived by the owner on Mar 3, 2026. It is now read-only.

Commit aa80374

Browse files
committed
test: Add system-tests for resumable multi-chunk uploads
1 parent 4fb3e00 commit aa80374

1 file changed

Lines changed: 112 additions & 0 deletions

File tree

system-test/storage.ts

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2688,6 +2688,118 @@ describe('storage', () => {
26882688
});
26892689
});
26902690

2691+
describe('resumable upload', () => {
2692+
describe('multi-chunk upload', () => {
2693+
describe('upload configurations', () => {
2694+
const filePath: string = FILES.logo.path;
2695+
const fileSize = fs.statSync(filePath).size;
2696+
let crc32c: string;
2697+
2698+
before(async () => {
2699+
// get a CRC32C value from the file
2700+
crc32c = await new Promise((resolve, reject) => {
2701+
const crc32c = new CRC32C();
2702+
2703+
fs.createReadStream(filePath)
2704+
.on('data', (d: Buffer) => crc32c.update(d))
2705+
.on('end', () => resolve(crc32c.toString()))
2706+
.on('error', reject);
2707+
});
2708+
});
2709+
2710+
it('should support uploads where `contentLength < chunkSize`', async () => {
2711+
const file = bucket.file(generateName());
2712+
2713+
const contentLength = fileSize;
2714+
// off by +1 to ensure `contentLength < chunkSize`
2715+
const chunkSize = fileSize + 1;
2716+
2717+
await bucket.upload(filePath, {
2718+
destination: file,
2719+
chunkSize,
2720+
metadata: {
2721+
contentLength,
2722+
},
2723+
});
2724+
2725+
const [metadata] = await file.getMetadata();
2726+
2727+
// assert we uploaded the expected data
2728+
assert.equal(metadata.crc32c, crc32c);
2729+
});
2730+
2731+
it('should support uploads where `contentLength % chunkSize != 0`', async () => {
2732+
const file = bucket.file(generateName());
2733+
2734+
const contentLength = fileSize;
2735+
// off by -1 to ensure `contentLength % chunkSize != 0`
2736+
const chunkSize = fileSize - 1;
2737+
2738+
await bucket.upload(filePath, {
2739+
destination: file,
2740+
chunkSize,
2741+
metadata: {
2742+
contentLength,
2743+
},
2744+
});
2745+
2746+
const [metadata] = await file.getMetadata();
2747+
2748+
// assert we uploaded the expected data
2749+
assert.equal(metadata.crc32c, crc32c);
2750+
});
2751+
2752+
it('should support uploads where `fileSize % chunkSize != 0` && `!contentLength`', async () => {
2753+
const file = bucket.file(generateName());
2754+
// off by +1 to ensure `contentLength % chunkSize != 0`
2755+
const chunkSize = fileSize + 1;
2756+
2757+
await bucket.upload(filePath, {
2758+
destination: file,
2759+
chunkSize,
2760+
});
2761+
2762+
const [metadata] = await file.getMetadata();
2763+
2764+
// assert we uploaded the expected data
2765+
assert.equal(metadata.crc32c, crc32c);
2766+
});
2767+
2768+
it('should support uploads where `fileSize < chunkSize && `!contentLength`', async () => {
2769+
const file = bucket.file(generateName());
2770+
// off by `* 2 +1` to ensure `fileSize < chunkSize`
2771+
const chunkSize = fileSize * 2 + 1;
2772+
2773+
await bucket.upload(filePath, {
2774+
destination: file,
2775+
chunkSize,
2776+
});
2777+
2778+
const [metadata] = await file.getMetadata();
2779+
2780+
// assert we uploaded the expected data
2781+
assert.equal(metadata.crc32c, crc32c);
2782+
});
2783+
2784+
it('should support uploads where `fileSize > chunkSize && `!contentLength`', async () => {
2785+
const file = bucket.file(generateName());
2786+
// off by -1 to ensure `fileSize > chunkSize`
2787+
const chunkSize = fileSize - 1;
2788+
2789+
await bucket.upload(filePath, {
2790+
destination: file,
2791+
chunkSize,
2792+
});
2793+
2794+
const [metadata] = await file.getMetadata();
2795+
2796+
// assert we uploaded the expected data
2797+
assert.equal(metadata.crc32c, crc32c);
2798+
});
2799+
});
2800+
});
2801+
});
2802+
26912803
describe('bucket upload with progress', () => {
26922804
it('show bytes sent with resumable upload', async () => {
26932805
const fileSize = fs.statSync(FILES.big.path).size;

0 commit comments

Comments
 (0)