From 0a554cc3572ba563a6ad04da7f9b570c3c66216a Mon Sep 17 00:00:00 2001 From: Stefano De Vuono Date: Wed, 14 Aug 2024 17:47:08 -0700 Subject: [PATCH] test(upload-client): allow large byte uploads There was an issue with large byte uploads in tests. This commit fixes the issue by first converting bytes to UnixFS stream before making CAR object in packages/upload-client/test/helpers/car.js --- packages/upload-client/test/helpers/car.js | 25 ++++++++++++---------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/packages/upload-client/test/helpers/car.js b/packages/upload-client/test/helpers/car.js index b802cc889..51a8b15df 100644 --- a/packages/upload-client/test/helpers/car.js +++ b/packages/upload-client/test/helpers/car.js @@ -1,23 +1,26 @@ import { CarWriter } from '@ipld/car' import * as CAR from '@ucanto/transport/car' -import { toBlock } from './block.js' +import { createFileEncoderStream } from '../../src/unixfs.js' /** * @param {Uint8Array} bytes */ export async function toCAR(bytes) { - const block = await toBlock(bytes) - // @ts-expect-error old multiformats in @ipld/car - const { writer, out } = CarWriter.create(block.cid) - writer.put(block) + const readableStream = ((createFileEncoderStream(new Blob([bytes]), {}))) + + let blocks = [], chunks = [] + // @ts-expect-error readable Stream is also an async iterable + for await (let block of readableStream) blocks.push(block) + const rootCID = blocks.at(-1)?.cid + + const { writer, out } = CarWriter.create(rootCID) + + for (const block of blocks) writer.put(block) writer.close() - const chunks = [] - for await (const chunk of out) { - chunks.push(chunk) - } + for await (const chunk of out) chunks.push(chunk) + const blob = new Blob(chunks) const cid = await CAR.codec.link(new Uint8Array(await blob.arrayBuffer())) - - return Object.assign(blob, { cid, roots: [block.cid] }) + return Object.assign(blob, { cid, roots: [rootCID] }) }