Skip to content

Commit 27ceb35

Browse files
box-sdk-buildbox-sdk-build
and
box-sdk-build
authored
fix: Update chunked upload (box/box-codegen#523) (#247)
Co-authored-by: box-sdk-build <[email protected]>
1 parent 5237972 commit 27ceb35

File tree

5 files changed

+46
-35
lines changed

5 files changed

+46
-35
lines changed

.codegen.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
{ "engineHash": "ef31179", "specHash": "e95d6fa", "version": "1.1.0" }
1+
{ "engineHash": "89557a9", "specHash": "e95d6fa", "version": "1.1.0" }

package-lock.json

Lines changed: 12 additions & 12 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/internal/utils.ts

Lines changed: 30 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -196,35 +196,45 @@ export async function readByteStream(byteStream: Readable) {
196196

197197
export async function* iterateChunks(
198198
stream: Readable,
199-
chunkSize: number
199+
chunkSize: number,
200+
fileSize: number
200201
): Iterator<Readable> {
201202
let buffers: Buffer[] = [];
202203
let totalSize = 0;
203-
for await (const data of stream) {
204-
if (!Buffer.isBuffer(data)) {
205-
throw new Error('Expecting a chunk of stream to be a Buffer');
206-
}
207-
buffers.push(data);
208-
totalSize += data.length;
204+
let consumedSize = 0;
205+
while (consumedSize < fileSize && !stream.readableEnded) {
206+
for await (const data of stream) {
207+
if (!Buffer.isBuffer(data)) {
208+
throw new Error('Expecting a chunk of stream to be a Buffer');
209+
}
210+
consumedSize += data.length;
211+
buffers.push(data);
212+
totalSize += data.length;
209213

210-
if (totalSize < chunkSize) {
211-
continue;
212-
}
214+
if (totalSize < chunkSize) {
215+
continue;
216+
}
213217

214-
const buffer = Buffer.concat(buffers);
218+
const buffer = Buffer.concat(buffers);
215219

216-
let start = 0;
217-
while (totalSize >= chunkSize) {
218-
yield generateByteStreamFromBuffer(
219-
buffer.subarray(start, start + chunkSize)
220-
);
221-
start += chunkSize;
222-
totalSize -= chunkSize;
223-
}
220+
let start = 0;
221+
while (totalSize >= chunkSize) {
222+
yield generateByteStreamFromBuffer(
223+
buffer.subarray(start, start + chunkSize)
224+
);
225+
start += chunkSize;
226+
totalSize -= chunkSize;
227+
}
224228

225-
buffers = totalSize > 0 ? [buffer.subarray(start)] : [];
229+
buffers = totalSize > 0 ? [buffer.subarray(start)] : [];
230+
}
226231
}
227232

233+
if (consumedSize !== fileSize) {
234+
throw new Error(
235+
`Stream size ${consumedSize} does not match expected file size ${fileSize}`
236+
);
237+
}
228238
if (totalSize > 0) {
229239
yield generateByteStreamFromBuffer(Buffer.concat(buffers));
230240
}

src/managers/chunkedUploads.generated.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -775,7 +775,7 @@ export class ChunkedUploadsManager {
775775
throw new Error('Assertion failed');
776776
}
777777
const fileHash: Hash = new Hash({ algorithm: 'sha1' as HashName });
778-
const chunksIterator: Iterator = iterateChunks(file, partSize);
778+
const chunksIterator: Iterator = iterateChunks(file, partSize, fileSize);
779779
const results: PartAccumulator = await reduceIterator(
780780
chunksIterator,
781781
this.reducer.bind(this),

src/networking/fetch.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -319,7 +319,8 @@ export async function fetch(
319319
url: resource,
320320
queryParams: params,
321321
headers: (requestInit.headers as { [key: string]: string }) ?? {},
322-
body: requestInit.body,
322+
body:
323+
typeof requestInit.body === 'string' ? requestInit.body : undefined,
323324
},
324325
responseInfo: {
325326
statusCode: fetchResponse.status,

0 commit comments

Comments
 (0)