diff --git a/lib/lib-storage/src/chunks/getChunkStream.ts b/lib/lib-storage/src/chunks/getChunkStream.ts index 21ac3c779aff6..86e33d0b27a5a 100644 --- a/lib/lib-storage/src/chunks/getChunkStream.ts +++ b/lib/lib-storage/src/chunks/getChunkStream.ts @@ -19,7 +19,7 @@ export async function* getChunkStream( currentBuffer.chunks.push(datum); currentBuffer.length += datum.byteLength; - while (currentBuffer.length >= partSize) { + while (currentBuffer.length > partSize) { /** * Concat all the buffers together once if there is more than one to concat. Attempt * to minimize concats as Buffer.Concat is an extremely expensive operation. diff --git a/lib/lib-storage/src/chunks/getDataReadableStream.spec.ts b/lib/lib-storage/src/chunks/getDataReadableStream.spec.ts index ad75495bb3306..bd382f0b7b987 100644 --- a/lib/lib-storage/src/chunks/getDataReadableStream.spec.ts +++ b/lib/lib-storage/src/chunks/getDataReadableStream.spec.ts @@ -44,6 +44,15 @@ describe("chunkFromReadable.name", () => { expect(chunks[0].lastPart).toBe(true); }); + it("should a single chunk if the stream is equal to partsize", async () => { + const chunks = await getChunks(20, 5, 100); + + expect(chunks.length).toBe(1); + expect(byteLength(chunks[0].data)).toEqual(100); + expect(chunks[0].partNumber).toEqual(1); + expect(chunks[0].lastPart).toBe(true); + }); + it("should return chunks of a specific size", async () => { const chunks = await getChunks(58, 1, 20);