diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 45f6d30..226bbd7 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -46,7 +46,7 @@ jobs: run: simg2img system.img system-raw.img - name: run sparse benchmark - run: bun scripts/simg2img.js system.img /tmp/system-raw.img + run: timeout 15s bun scripts/simg2img.js system.img /tmp/system-raw.img - name: check output matches run: cmp system-raw.img /tmp/system-raw.img diff --git a/scripts/simg2img.js b/scripts/simg2img.js index 1782370..f0a9922 100755 --- a/scripts/simg2img.js +++ b/scripts/simg2img.js @@ -5,14 +5,14 @@ export async function simg2img(inputPath, outputPath) { const sparseImage = Bun.file(inputPath); const outputImage = Bun.file(outputPath); - const sparse = await Sparse.from(sparseImage); + const sparse = await Sparse.from(sparseImage.stream()); if (!sparse) throw "Failed to parse sparse file"; // FIXME: write out a "sparse" file? not supported by Bun const writer = outputImage.writer({ highWaterMark: 4 * 1024 * 1024 }); - for await (const [_, chunk, size] of sparse.read()) { - if (chunk) { - writer.write(await chunk.arrayBuffer()); + for await (const [_, data, size] of sparse) { + if (data) { + writer.write(data.buffer); } else { writer.write(new Uint8Array(size).buffer); } diff --git a/src/firehose.js b/src/firehose.js index fbfcda7..54d70ae 100644 --- a/src/firehose.js +++ b/src/firehose.js @@ -188,22 +188,23 @@ export class Firehose { /** * @param {number} physicalPartitionNumber * @param {number} startSector - * @param {Blob} blob + * @param {Uint8Array} data * @param {progressCallback|undefined} [onProgress] - Returns number of bytes written * @returns {Promise} */ - async cmdProgram(physicalPartitionNumber, startSector, blob, onProgress = undefined) { - const total = blob.size; - - const rsp = await this.xmlSend(toXml("program", { + async cmdProgram(physicalPartitionNumber, startSector, data, onProgress = undefined) { + const total = data.byteLength; + const attributes = { SECTOR_SIZE_IN_BYTES: this.cfg.SECTOR_SIZE_IN_BYTES, num_partition_sectors: Math.ceil(total / this.cfg.SECTOR_SIZE_IN_BYTES), physical_partition_number: physicalPartitionNumber, start_sector: startSector, - })); + }; + + const rsp = await this.xmlSend(toXml("program", attributes)); if (!rsp.resp) { - console.error("Firehose - Failed to program"); - return false; + console.error("Firehose - Failed to program", attributes, rsp); + throw new Error("Failed to program"); } let i = 0; @@ -212,11 +213,12 @@ export class Firehose { while (bytesToWrite > 0) { const wlen = Math.min(bytesToWrite, this.cfg.MaxPayloadSizeToTargetInBytes); - let wdata = new Uint8Array(await blob.slice(offset, offset + wlen).arrayBuffer()); + let wdata = data.subarray(offset, offset + wlen); if (wlen % this.cfg.SECTOR_SIZE_IN_BYTES !== 0) { - const fillLen = (Math.floor(wlen / this.cfg.SECTOR_SIZE_IN_BYTES) + 1) * this.cfg.SECTOR_SIZE_IN_BYTES; - const fillArray = new Uint8Array(fillLen - wlen).fill(0x00); - wdata = concatUint8Array([wdata, fillArray]); + const fillLen = Math.ceil(wlen / this.cfg.SECTOR_SIZE_IN_BYTES) * this.cfg.SECTOR_SIZE_IN_BYTES; + const fillArray = new Uint8Array(fillLen); + fillArray.set(wdata); + wdata = fillArray; } await this.cdc.write(wdata); await this.cdc.write(new Uint8Array(0)); diff --git a/src/qdl.js b/src/qdl.js index c942af3..9afafdf 100644 --- a/src/qdl.js +++ b/src/qdl.js @@ -116,24 +116,23 @@ export class qdlDevice { } console.info(`Flashing ${partitionName}...`); console.debug(`startSector ${partition.sector}, sectors ${partition.sectors}`); - const sparse = await Sparse.from(blob); + const sparse = await Sparse.from(blob.stream()); if (sparse === null) { - return await this.firehose.cmdProgram(lun, partition.sector, blob, onProgress); + return this.firehose.cmdProgram(lun, partition.sector, new Uint8Array(await blob.arrayBuffer()), onProgress); } console.debug(`Erasing ${partitionName}...`); if (!await this.firehose.cmdErase(lun, partition.sector, partition.sectors)) { - console.error("qdl - Failed to erase partition before sparse flashing"); - return false; + throw new Error("Failed to erase partition before sparse flashing"); } console.debug(`Writing chunks to ${partitionName}...`); - for await (const [offset, chunk] of sparse.read()) { - if (!chunk) continue; + for await (const [offset, data] of sparse) { + if (!data) continue; if (offset % this.firehose.cfg.SECTOR_SIZE_IN_BYTES !== 0) { throw "qdl - Offset not aligned to sector size"; } const sector = partition.sector + offset / this.firehose.cfg.SECTOR_SIZE_IN_BYTES; const onChunkProgress = (progress) => onProgress?.(offset + progress); - if (!await this.firehose.cmdProgram(lun, sector, chunk, onChunkProgress)) { + if (!await this.firehose.cmdProgram(lun, sector, data, onChunkProgress)) { console.debug("qdl - Failed to program chunk") return false; } @@ -319,11 +318,9 @@ export class qdlDevice { continue; } const writeOffset = this.firehose.cfg.SECTOR_SIZE_IN_BYTES; - const gptBlobA = new Blob([gptDataA.slice(writeOffset)]); - await this.firehose.cmdProgram(lunA, 1, gptBlobA); + await this.firehose.cmdProgram(lunA, 1, gptDataA.slice(writeOffset)); if (!sameLun) { - const gptBlobB = new Blob([gptDataB.slice(writeOffset)]); - await this.firehose.cmdProgram(lunB, 1, gptBlobB); + await this.firehose.cmdProgram(lunB, 1, gptDataB.slice(writeOffset)); } } const activeBootLunId = (slot === "a") ? 1 : 2; diff --git a/src/sparse.js b/src/sparse.js index 8d34dfc..ad03be0 100644 --- a/src/sparse.js +++ b/src/sparse.js @@ -25,109 +25,142 @@ const ChunkType = { /** - * @typedef {object} Chunk + * @typedef {object} SparseChunk + * @property {Header} header * @property {number} type * @property {number} blocks - * @property {Blob} data + * @property {Uint8Array} data */ -export class Sparse { - /** - * @param {Blob} blob - * @param {Header} header - */ - constructor(blob, header) { - this.blob = blob; - this.header = header; - } +function assert(condition) { + if (!condition) throw new Error("Assertion failed"); +} + + +/** + * @param {ReadableStream} stream + * @param {number} maxSize + * @returns {Promise | null>} + */ +export async function from(stream, maxSize = 1024 * 1024) { + let buffer = new ArrayBuffer(0, { maxByteLength: maxSize }); + let view = new Uint8Array(buffer); /** - * @returns {AsyncIterator} + * @param {number} byteLength */ - async* chunks() { - let blobOffset = FILE_HEADER_SIZE; - for (let i = 0; i < this.header.totalChunks; i++) { - if (blobOffset + CHUNK_HEADER_SIZE >= this.blob.size) { - throw "Sparse - Chunk header out of bounds"; - } - const chunk = await this.blob.slice(blobOffset, blobOffset + CHUNK_HEADER_SIZE).arrayBuffer(); - const view = new DataView(chunk); - const totalBytes = view.getUint32(8, true); - if (blobOffset + totalBytes > this.blob.size) { - throw "Sparse - Chunk data out of bounds"; + const readUntil = async (byteLength) => { + assert(byteLength <= buffer.maxByteLength); + if (buffer.byteLength >= byteLength) return; + const reader = stream.getReader(); + let offset = buffer.byteLength; + try { + while (offset < byteLength) { + const { value, done } = await reader.read(); + if (done) throw new Error("Unexpected end of stream"); + size += value.byteLength; } - yield { - type: view.getUint16(0, true), - blocks: view.getUint32(4, true), - data: this.blob.slice(blobOffset + CHUNK_HEADER_SIZE, blobOffset + totalBytes), - }; - blobOffset += totalBytes; + } finally { + reader.releaseLock(); } - if (blobOffset !== this.blob.size) { - console.warn("Sparse - Backing data larger expected"); + buffer = buffer.transfer(size); + view = new Uint8Array(buffer); + for (let j = 0; j < i; j++) { + const part = parts[j]; + view.set(part, offset); + offset += part.byteLength; } } + await readUntil(FILE_HEADER_SIZE); + const header = parseFileHeader(buffer.buffer); + if (!header) return null; + buffer = buffer.slice(FILE_HEADER_SIZE); + /** - * @returns {AsyncIterator<[number, Blob | null, number]>} + * @returns {AsyncGenerator<[number, Uint8Array | null, number], void, *>} */ - async *read() { + async function* inflateChunks() { let offset = 0; - for await (const { type, blocks, data } of this.chunks()) { - const size = blocks * this.header.blockSize; + for (let i = 0; i < header.totalChunks; i++) { + await readUntil(CHUNK_HEADER_SIZE); + const view = new DataView(buffer.buffer); + const type = view.getUint16(0, true); + const blockCount = view.getUint32(4, true); + const totalBytes = view.getUint32(8, true); + const size = blockCount * header.blockSize; + if (type === ChunkType.Raw) { - yield [offset, data, size]; - offset += size; + let readBytes = CHUNK_HEADER_SIZE; + while (readBytes < totalBytes) { + const dataChunkSize = Math.min(totalBytes - readBytes, maxSize); + await readUntil(readBytes + dataChunkSize); // TODO: maybe read smaller chunks? + const data = buffer.subarray(readBytes, readBytes + dataChunkSize); + assert(data.byteLength === dataChunkSize); + yield [offset, data, dataChunkSize]; + // buffer = buffer.slice(dataChunkSize); + readBytes += dataChunkSize; + offset += dataChunkSize; + } + assert(readBytes === size); + buffer = buffer.slice(totalBytes); } else if (type === ChunkType.Fill) { - const fill = new Uint8Array(await data.arrayBuffer()); - if (fill.some((byte) => byte !== 0)) { - const buffer = new Uint8Array(size); - for (let i = 0; i < buffer.byteLength; i += 4) buffer.set(fill, i); - yield [offset, new Blob([buffer]), size]; + await readUntil(totalBytes); + const data = buffer.slice(CHUNK_HEADER_SIZE, totalBytes); + buffer = buffer.slice(totalBytes); + if (data.some((byte) => byte !== 0)) { + assert(data.byteLength === 4); + let readBytes = 0; + while (readBytes < size) { + const fillSize = Math.min(size - readBytes, maxSize); + const fill = new Uint8Array(fillSize); + for (let i = 0; i < fillSize; i += 4) fill.set(data, i); + yield [offset, fill, fillSize]; + offset += fillSize; + readBytes += fillSize; + } + assert(readBytes === size); } else { yield [offset, null, size]; + offset += size; + } + } else { + if (type === ChunkType.Skip) { + yield [offset, null, size]; + offset += size; } - offset += size; - } else if (type === ChunkType.Skip) { - yield [offset, null, size]; - offset += size; + await readUntil(totalBytes); + buffer = buffer.slice(totalBytes); } } + if (buffer.byteLength > 0) { + console.warn("Sparse - Backing data larger than expected"); + } } -} - -/** - * @param {Blob} blob - * @returns {Promise} - */ -export async function from(blob) { - const header = await parseFileHeader(blob); - if (!header) return null; - return new Sparse(blob, header); + return inflateChunks(); } /** - * @param {Blob} blob - * @returns {Promise} + * @param {ArrayBufferLike} buffer + * @returns {Header | null} */ -export async function parseFileHeader(blob) { - const view = new DataView(await blob.slice(0, FILE_HEADER_SIZE).arrayBuffer()); +export function parseFileHeader(buffer) { + const view = new DataView(buffer); const magic = view.getUint32(0, true); if (magic !== FILE_MAGIC) { + // Not a sparse file. return null; } const fileHeaderSize = view.getUint16(8, true); const chunkHeaderSize = view.getUint16(10, true); if (fileHeaderSize !== FILE_HEADER_SIZE) { - console.error(`The file header size was expected to be 28, but is ${fileHeaderSize}.`); - return null; + throw new Error(`The file header size was expected to be 28, but is ${fileHeaderSize}.`); } if (chunkHeaderSize !== CHUNK_HEADER_SIZE) { - console.error(`The chunk header size was expected to be 12, but is ${chunkHeaderSize}.`); - return null; + throw new Error(`The chunk header size was expected to be 12, but is ${chunkHeaderSize}.`); } return { magic, diff --git a/src/sparse.spec.js b/src/sparse.spec.js index 8e7d404..925a9e4 100644 --- a/src/sparse.spec.js +++ b/src/sparse.spec.js @@ -1,5 +1,5 @@ import * as Bun from "bun"; -import { beforeAll, describe, expect, test } from "bun:test"; +import { describe, expect, test } from "bun:test"; import * as Sparse from "./sparse"; import { simg2img } from "../scripts/simg2img.js"; @@ -8,42 +8,36 @@ const inputData = Bun.file("./test/fixtures/sparse.img"); const expectedPath = "./test/fixtures/raw.img"; describe("sparse", () => { - test("parseFileHeader", async () => { - expect(await Sparse.parseFileHeader(inputData)).toEqual({ - magic: 0xED26FF3A, - majorVersion: 1, - minorVersion: 0, - fileHeaderSize: 28, - chunkHeaderSize: 12, - blockSize: 4096, - totalBlocks: 9, - totalChunks: 6, - crc32: 0, - }); - }); - - describe("Sparse", () => { - /** @type {Sparse.Sparse} */ - let sparse; - - beforeAll(async () => { - sparse = await Sparse.from(inputData); + describe("parseFileHeader", () => { + test("valid sparse file", async () => { + expect(Sparse.parseFileHeader(await inputData.arrayBuffer())).toEqual({ + magic: 0xED26FF3A, + majorVersion: 1, + minorVersion: 0, + fileHeaderSize: 28, + chunkHeaderSize: 12, + blockSize: 4096, + totalBlocks: 9, + totalChunks: 6, + crc32: 0, + }); }); - test("chunks", async () => { - const chunks = await Array.fromAsync(sparse.chunks()); - expect(chunks.length).toBe(sparse.header.totalChunks); + test("invalid sparse file", async () => { + expect(Sparse.parseFileHeader(await Bun.file(expectedPath).arrayBuffer())).toBeNull(); }); + }); - test("read", async () => { - let prevOffset = undefined; - for await (const [offset, chunk, size] of sparse.read()) { - expect(offset).toBeGreaterThanOrEqual(prevOffset ?? 0); - if (chunk) expect(chunk.size).toBe(size); - expect(size).toBeGreaterThan(0); - prevOffset = offset + size; - } - }); + test("from", async () => { + const sparse = await Sparse.from(inputData.stream()); + if (!sparse) throw "Failed to parse sparse"; + let expectedOffset = 0; + for await (const [offset, data, size] of sparse) { + expect(offset).toBe(expectedOffset); + if (data) expect(data.byteLength).toBe(size); + expect(size).toBeGreaterThan(0); + expectedOffset = offset + size; + } }); test("simg2img", async () => { diff --git a/src/usblib.js b/src/usblib.js index 0adb821..84625dd 100644 --- a/src/usblib.js +++ b/src/usblib.js @@ -121,7 +121,7 @@ export class usbClass { async write(data, wait = true) { let offset = 0; do { - const chunk = data.slice(offset, offset + constants.BULK_TRANSFER_SIZE); + const chunk = data.subarray(offset, offset + constants.BULK_TRANSFER_SIZE); offset += chunk.byteLength; const promise = this.device?.transferOut(this.epOut?.endpointNumber, chunk); // this is a hack, webusb doesn't have timed out catching