Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

sparse streams #63

Draft
wants to merge 22 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
run: simg2img system.img system-raw.img

- name: run sparse benchmark
run: bun scripts/simg2img.js system.img /tmp/system-raw.img
run: timeout 15s bun scripts/simg2img.js system.img /tmp/system-raw.img

- name: check output matches
run: cmp system-raw.img /tmp/system-raw.img
Expand Down
8 changes: 4 additions & 4 deletions scripts/simg2img.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@ export async function simg2img(inputPath, outputPath) {
const sparseImage = Bun.file(inputPath);
const outputImage = Bun.file(outputPath);

const sparse = await Sparse.from(sparseImage);
const sparse = await Sparse.from(sparseImage.stream());
if (!sparse) throw "Failed to parse sparse file";

// FIXME: write out a "sparse" file? not supported by Bun
const writer = outputImage.writer({ highWaterMark: 4 * 1024 * 1024 });
for await (const [_, chunk, size] of sparse.read()) {
if (chunk) {
writer.write(await chunk.arrayBuffer());
for await (const [_, data, size] of sparse) {
if (data) {
writer.write(data.buffer);
} else {
writer.write(new Uint8Array(size).buffer);
}
Expand Down
26 changes: 14 additions & 12 deletions src/firehose.js
Original file line number Diff line number Diff line change
Expand Up @@ -188,22 +188,23 @@ export class Firehose {
/**
* @param {number} physicalPartitionNumber
* @param {number} startSector
* @param {Blob} blob
* @param {Uint8Array} data
* @param {progressCallback|undefined} [onProgress] - Returns number of bytes written
* @returns {Promise<boolean>}
*/
async cmdProgram(physicalPartitionNumber, startSector, blob, onProgress = undefined) {
const total = blob.size;

const rsp = await this.xmlSend(toXml("program", {
async cmdProgram(physicalPartitionNumber, startSector, data, onProgress = undefined) {
const total = data.byteLength;
const attributes = {
SECTOR_SIZE_IN_BYTES: this.cfg.SECTOR_SIZE_IN_BYTES,
num_partition_sectors: Math.ceil(total / this.cfg.SECTOR_SIZE_IN_BYTES),
physical_partition_number: physicalPartitionNumber,
start_sector: startSector,
}));
};

const rsp = await this.xmlSend(toXml("program", attributes));
if (!rsp.resp) {
console.error("Firehose - Failed to program");
return false;
console.error("Firehose - Failed to program", attributes, rsp);
throw new Error("Failed to program");
}

let i = 0;
Expand All @@ -212,11 +213,12 @@ export class Firehose {

while (bytesToWrite > 0) {
const wlen = Math.min(bytesToWrite, this.cfg.MaxPayloadSizeToTargetInBytes);
let wdata = new Uint8Array(await blob.slice(offset, offset + wlen).arrayBuffer());
let wdata = data.subarray(offset, offset + wlen);
if (wlen % this.cfg.SECTOR_SIZE_IN_BYTES !== 0) {
const fillLen = (Math.floor(wlen / this.cfg.SECTOR_SIZE_IN_BYTES) + 1) * this.cfg.SECTOR_SIZE_IN_BYTES;
const fillArray = new Uint8Array(fillLen - wlen).fill(0x00);
wdata = concatUint8Array([wdata, fillArray]);
const fillLen = Math.ceil(wlen / this.cfg.SECTOR_SIZE_IN_BYTES) * this.cfg.SECTOR_SIZE_IN_BYTES;
const fillArray = new Uint8Array(fillLen);
fillArray.set(wdata);
wdata = fillArray;
}
await this.cdc.write(wdata);
await this.cdc.write(new Uint8Array(0));
Expand Down
19 changes: 8 additions & 11 deletions src/qdl.js
Original file line number Diff line number Diff line change
Expand Up @@ -116,24 +116,23 @@ export class qdlDevice {
}
console.info(`Flashing ${partitionName}...`);
console.debug(`startSector ${partition.sector}, sectors ${partition.sectors}`);
const sparse = await Sparse.from(blob);
const sparse = await Sparse.from(blob.stream());
if (sparse === null) {
return await this.firehose.cmdProgram(lun, partition.sector, blob, onProgress);
return this.firehose.cmdProgram(lun, partition.sector, new Uint8Array(await blob.arrayBuffer()), onProgress);
}
console.debug(`Erasing ${partitionName}...`);
if (!await this.firehose.cmdErase(lun, partition.sector, partition.sectors)) {
console.error("qdl - Failed to erase partition before sparse flashing");
return false;
throw new Error("Failed to erase partition before sparse flashing");
}
console.debug(`Writing chunks to ${partitionName}...`);
for await (const [offset, chunk] of sparse.read()) {
if (!chunk) continue;
for await (const [offset, data] of sparse) {
if (!data) continue;
if (offset % this.firehose.cfg.SECTOR_SIZE_IN_BYTES !== 0) {
throw "qdl - Offset not aligned to sector size";
}
const sector = partition.sector + offset / this.firehose.cfg.SECTOR_SIZE_IN_BYTES;
const onChunkProgress = (progress) => onProgress?.(offset + progress);
if (!await this.firehose.cmdProgram(lun, sector, chunk, onChunkProgress)) {
if (!await this.firehose.cmdProgram(lun, sector, data, onChunkProgress)) {
console.debug("qdl - Failed to program chunk")
return false;
}
Expand Down Expand Up @@ -319,11 +318,9 @@ export class qdlDevice {
continue;
}
const writeOffset = this.firehose.cfg.SECTOR_SIZE_IN_BYTES;
const gptBlobA = new Blob([gptDataA.slice(writeOffset)]);
await this.firehose.cmdProgram(lunA, 1, gptBlobA);
await this.firehose.cmdProgram(lunA, 1, gptDataA.slice(writeOffset));
if (!sameLun) {
const gptBlobB = new Blob([gptDataB.slice(writeOffset)]);
await this.firehose.cmdProgram(lunB, 1, gptBlobB);
await this.firehose.cmdProgram(lunB, 1, gptDataB.slice(writeOffset));
}
}
const activeBootLunId = (slot === "a") ? 1 : 2;
Expand Down
161 changes: 97 additions & 64 deletions src/sparse.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,109 +25,142 @@


/**
* @typedef {object} Chunk
* @typedef {object} SparseChunk
* @property {Header} header
* @property {number} type
* @property {number} blocks
* @property {Blob} data
* @property {Uint8Array} data
*/


export class Sparse {
/**
* @param {Blob} blob
* @param {Header} header
*/
constructor(blob, header) {
this.blob = blob;
this.header = header;
}
function assert(condition) {
if (!condition) throw new Error("Assertion failed");
}


/**
* @param {ReadableStream<Uint8Array>} stream
* @param {number} maxSize
* @returns {Promise<AsyncGenerator<[number, Uint8Array | null, number], void, *> | null>}
*/
export async function from(stream, maxSize = 1024 * 1024) {
let buffer = new ArrayBuffer(0, { maxByteLength: maxSize });
let view = new Uint8Array(buffer);

/**
* @returns {AsyncIterator<Chunk>}
* @param {number} byteLength
*/
async* chunks() {
let blobOffset = FILE_HEADER_SIZE;
for (let i = 0; i < this.header.totalChunks; i++) {
if (blobOffset + CHUNK_HEADER_SIZE >= this.blob.size) {
throw "Sparse - Chunk header out of bounds";
}
const chunk = await this.blob.slice(blobOffset, blobOffset + CHUNK_HEADER_SIZE).arrayBuffer();
const view = new DataView(chunk);
const totalBytes = view.getUint32(8, true);
if (blobOffset + totalBytes > this.blob.size) {
throw "Sparse - Chunk data out of bounds";
const readUntil = async (byteLength) => {
assert(byteLength <= buffer.maxByteLength);
if (buffer.byteLength >= byteLength) return;
const reader = stream.getReader();
let offset = buffer.byteLength;
try {
while (offset < byteLength) {
const { value, done } = await reader.read();
if (done) throw new Error("Unexpected end of stream");
size += value.byteLength;

Check failure on line 62 in src/sparse.js

View workflow job for this annotation

GitHub Actions / test

ReferenceError: Can't find variable: size

at <anonymous> (/home/runner/work/qdl.js/qdl.js/src/sparse.js:62:9)

Check failure on line 62 in src/sparse.js

View workflow job for this annotation

GitHub Actions / test

ReferenceError: Can't find variable: size

at <anonymous> (/home/runner/work/qdl.js/qdl.js/src/sparse.js:62:9)
}
yield {
type: view.getUint16(0, true),
blocks: view.getUint32(4, true),
data: this.blob.slice(blobOffset + CHUNK_HEADER_SIZE, blobOffset + totalBytes),
};
blobOffset += totalBytes;
} finally {
reader.releaseLock();
}
if (blobOffset !== this.blob.size) {
console.warn("Sparse - Backing data larger expected");
buffer = buffer.transfer(size);
view = new Uint8Array(buffer);
for (let j = 0; j < i; j++) {
const part = parts[j];
view.set(part, offset);
offset += part.byteLength;
}
}

await readUntil(FILE_HEADER_SIZE);
const header = parseFileHeader(buffer.buffer);
if (!header) return null;
buffer = buffer.slice(FILE_HEADER_SIZE);

/**
* @returns {AsyncIterator<[number, Blob | null, number]>}
* @returns {AsyncGenerator<[number, Uint8Array | null, number], void, *>}
*/
async *read() {
async function* inflateChunks() {
let offset = 0;
for await (const { type, blocks, data } of this.chunks()) {
const size = blocks * this.header.blockSize;
for (let i = 0; i < header.totalChunks; i++) {
await readUntil(CHUNK_HEADER_SIZE);
const view = new DataView(buffer.buffer);
const type = view.getUint16(0, true);
const blockCount = view.getUint32(4, true);
const totalBytes = view.getUint32(8, true);
const size = blockCount * header.blockSize;

if (type === ChunkType.Raw) {
yield [offset, data, size];
offset += size;
let readBytes = CHUNK_HEADER_SIZE;
while (readBytes < totalBytes) {
const dataChunkSize = Math.min(totalBytes - readBytes, maxSize);
await readUntil(readBytes + dataChunkSize); // TODO: maybe read smaller chunks?
const data = buffer.subarray(readBytes, readBytes + dataChunkSize);
assert(data.byteLength === dataChunkSize);
yield [offset, data, dataChunkSize];
// buffer = buffer.slice(dataChunkSize);
readBytes += dataChunkSize;
offset += dataChunkSize;
}
assert(readBytes === size);
buffer = buffer.slice(totalBytes);
} else if (type === ChunkType.Fill) {
const fill = new Uint8Array(await data.arrayBuffer());
if (fill.some((byte) => byte !== 0)) {
const buffer = new Uint8Array(size);
for (let i = 0; i < buffer.byteLength; i += 4) buffer.set(fill, i);
yield [offset, new Blob([buffer]), size];
await readUntil(totalBytes);
const data = buffer.slice(CHUNK_HEADER_SIZE, totalBytes);
buffer = buffer.slice(totalBytes);
if (data.some((byte) => byte !== 0)) {
assert(data.byteLength === 4);
let readBytes = 0;
while (readBytes < size) {
const fillSize = Math.min(size - readBytes, maxSize);
const fill = new Uint8Array(fillSize);
for (let i = 0; i < fillSize; i += 4) fill.set(data, i);
yield [offset, fill, fillSize];
offset += fillSize;
readBytes += fillSize;
}
assert(readBytes === size);
} else {
yield [offset, null, size];
offset += size;
}
} else {
if (type === ChunkType.Skip) {
yield [offset, null, size];
offset += size;
}
offset += size;
} else if (type === ChunkType.Skip) {
yield [offset, null, size];
offset += size;
await readUntil(totalBytes);
buffer = buffer.slice(totalBytes);
}
}
if (buffer.byteLength > 0) {
console.warn("Sparse - Backing data larger than expected");
}
}
}


/**
* @param {Blob} blob
* @returns {Promise<Sparse|null>}
*/
export async function from(blob) {
const header = await parseFileHeader(blob);
if (!header) return null;
return new Sparse(blob, header);
return inflateChunks();
}


/**
* @param {Blob} blob
* @returns {Promise<Header|null>}
* @param {ArrayBufferLike} buffer
* @returns {Header | null}
*/
export async function parseFileHeader(blob) {
const view = new DataView(await blob.slice(0, FILE_HEADER_SIZE).arrayBuffer());
export function parseFileHeader(buffer) {
const view = new DataView(buffer);
const magic = view.getUint32(0, true);
if (magic !== FILE_MAGIC) {
// Not a sparse file.
return null;
}
const fileHeaderSize = view.getUint16(8, true);
const chunkHeaderSize = view.getUint16(10, true);
if (fileHeaderSize !== FILE_HEADER_SIZE) {
console.error(`The file header size was expected to be 28, but is ${fileHeaderSize}.`);
return null;
throw new Error(`The file header size was expected to be 28, but is ${fileHeaderSize}.`);
}
if (chunkHeaderSize !== CHUNK_HEADER_SIZE) {
console.error(`The chunk header size was expected to be 12, but is ${chunkHeaderSize}.`);
return null;
throw new Error(`The chunk header size was expected to be 12, but is ${chunkHeaderSize}.`);
}
return {
magic,
Expand Down
Loading
Loading