diff --git a/README.md b/README.md
index a18abbc..06070b6 100644
--- a/README.md
+++ b/README.md
@@ -240,6 +240,15 @@ be directly fed to a
* [`async CarWriter.createAppender()`](#CarWriter__createAppender)
* [`async CarWriter.updateRootsInBytes(bytes, roots)`](#CarWriter__updateRootsInBytes)
* [`async CarWriter.updateRootsInFile(fd, roots)`](#CarWriter__updateRootsInFile)
+ * [`class CarBufferWriter`](#CarBufferWriter)
+ * [`CarBufferWriter#addRoot(root, options)`](#CarBufferWriter_addRoot)
+ * [`CarBufferWriter#write(block)`](#CarBufferWriter_write)
+ * [`CarBufferWriter#close([options])`](#CarBufferWriter_close)
+ * [`CarBufferWriter.blockLength(Block)`](#CarBufferWriter__blockLength__Block__)
+ * [`CarBufferWriter.calculateHeaderLength(rootLengths)`](#CarBufferWriter__calculateHeaderLength__rootLengths__)
+ * [`CarBufferWriter.headerLength({ roots })`](#CarBufferWriter__headerLength______roots______)
+ * [`CarBufferWriter.estimateHeaderLength(rootCount[, rootByteLength])`](#CarBufferWriter__estimateHeaderLength__rootCount______rootByteLength____)
+ * [`CarBufferWriter.createWriter(buffer[, options])`](#CarBufferWriter__createWriter__buffer______options____)
* [`async decoder.readHeader(reader)`](#async__decoder__readHeader__reader__)
* [`async decoder.readBlockHead(reader)`](#async__decoder__readBlockHead__reader__)
* [`decoder.createDecoder(reader)`](#decoder__createDecoder__reader__)
@@ -766,6 +775,106 @@ replaced encode as the same length as the new roots.
This function is **only available in Node.js** and not a browser
environment.
+
+### `class CarBufferWriter`
+
+A simple CAR writer that writes to a pre-allocated buffer.
+
+
+### `CarBufferWriter#addRoot(root, options)`
+
+* `root` `(CID)`
+* `options`
+
+* Returns: `CarBufferWriter`
+
+Add a root to this writer, to be used to create a header when the CAR is
+finalized with [`close()`](#CarBufferWriter__close)
+
+
+### `CarBufferWriter#write(block)`
+
+* `block` `(Block)`: A `{ cid:CID, bytes:Uint8Array }` pair.
+
+* Returns: `CarBufferWriter`
+
+Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
+Throws if there is not enough capacity.
+
+
+### `CarBufferWriter#close([options])`
+
+* `options` `(object, optional)`
+ * `options.resize` `(boolean, optional)`
+
+* Returns: `Uint8Array`
+
+Finalize the CAR and return it as a `Uint8Array`.
+
+
+### `CarBufferWriter.blockLength(Block)`
+
+* `block` `(Block)`
+
+* Returns: `number`
+
+Calculates number of bytes required for storing given block in CAR. Useful in
+estimating size of an `ArrayBuffer` for the `CarBufferWriter`.
+
+
+### `CarBufferWriter.calculateHeaderLength(rootLengths)`
+
+* `rootLengths` `(number[])`
+
+* Returns: `number`
+
+Calculates header size given the array of byteLength for roots.
+
+
+### `CarBufferWriter.headerLength({ roots })`
+
+* `options` `(object)`
+ * `options.roots` `(CID[])`
+
+* Returns: `number`
+
+Calculates header size given the array of roots.
+
+
+### `CarBufferWriter.estimateHeaderLength(rootCount[, rootByteLength])`
+
+* `rootCount` `(number)`
+* `rootByteLength` `(number, optional)`
+
+* Returns: `number`
+
+Estimates header size given a count of the roots and the expected byte length
+of the root CIDs. The default length works for a standard CIDv1 with a
+single-byte multihash code, such as SHA2-256 (i.e. the most common CIDv1).
+
+
+### `CarBufferWriter.createWriter(buffer[, options])`
+
+* `buffer` `(ArrayBuffer)`
+* `options` `(object, optional)`
+ * `options.roots` `(CID[], optional)`
+ * `options.byteOffset` `(number, optional)`
+ * `options.byteLength` `(number, optional)`
+ * `options.headerSize` `(number, optional)`
+
+* Returns: `CarBufferWriter`
+
+Creates synchronous CAR writer that can be used to encode blocks into a given
+buffer. Optionally you could pass `byteOffset` and `byteLength` to specify a
+range inside buffer to write into. If car file is going to have `roots` you
+need to either pass them under `options.roots` (from which header size will
+be calculated) or provide `options.headerSize` to allocate required space
+in the buffer. You may also provide known `roots` and `headerSize` to
+allocate space for the roots that may not be known ahead of time.
+
+Note: Incorrect `headerSize` may lead to copying bytes inside a buffer
+which will have a negative impact on performance.
+
### `async decoder.readHeader(reader)`
diff --git a/api.ts b/api.ts
index 378c32f..44882b7 100644
--- a/api.ts
+++ b/api.ts
@@ -1,17 +1,21 @@
import { CID } from 'multiformats/cid'
+export type { CID }
/* Generic types for interfacing with block storage */
-export type Block = { cid: CID, bytes: Uint8Array }
+export type Block = {
+ cid: CID
+ bytes: Uint8Array
+}
export type BlockHeader = {
- cid: CID,
- length: number,
+ cid: CID
+ length: number
blockLength: number
}
export type BlockIndex = BlockHeader & {
- offset: number,
+ offset: number
blockOffset: number
}
@@ -36,6 +40,20 @@ export interface BlockWriter {
close(): Promise
}
+export interface CarBufferWriter {
+ addRoot(root:CID, options?:{ resize?: boolean }):CarBufferWriter
+ write(block: Block): CarBufferWriter
+ close(options?:{ resize?: boolean }): Uint8Array
+}
+
+export interface CarBufferWriterOptions {
+ roots?: CID[] // defaults to []
+ byteOffset?: number // defaults to 0
+ byteLength?: number // defaults to buffer.byteLength
+
+ headerSize?: number // defaults to size needed for provided roots
+}
+
export interface WriterChannel {
writer: BlockWriter
out: AsyncIterable
diff --git a/lib/buffer-writer.js b/lib/buffer-writer.js
new file mode 100644
index 0000000..916d5b3
--- /dev/null
+++ b/lib/buffer-writer.js
@@ -0,0 +1,286 @@
+import varint from 'varint'
+import { Token, Type } from 'cborg'
+import { tokensToLength } from 'cborg/length'
+import * as CBOR from '@ipld/dag-cbor'
+
+/**
+ * @typedef {import('../api').CID} CID
+ * @typedef {import('../api').Block} Block
+ * @typedef {import('../api').CarBufferWriter} Writer
+ * @typedef {import('../api').CarBufferWriterOptions} Options
+ * @typedef {import('./coding').CarEncoder} CarEncoder
+ */
+
+/**
+ * A simple CAR writer that writes to a pre-allocated buffer.
+ *
+ * @class
+ * @name CarBufferWriter
+ * @implements {Writer}
+ */
+class CarBufferWriter {
+ /**
+ * @param {Uint8Array} bytes
+ * @param {number} headerSize
+ */
+ constructor (bytes, headerSize) {
+ /** @readonly */
+ this.bytes = bytes
+ this.byteOffset = headerSize
+
+ /**
+ * @readonly
+ * @type {CID[]}
+ */
+ this.roots = []
+ this.headerSize = headerSize
+ }
+
+ /**
+ * Add a root to this writer, to be used to create a header when the CAR is
+ * finalized with {@link CarBufferWriter.close `close()`}
+ *
+ * @param {CID} root
+ * @param {{resize?:boolean}} [options]
+ * @returns {CarBufferWriter}
+ */
+ addRoot (root, options) {
+ addRoot(this, root, options)
+ return this
+ }
+
+ /**
+ * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
+ * Throws if there is not enough capacity.
+ *
+ * @param {Block} block A `{ cid:CID, bytes:Uint8Array }` pair.
+ * @returns {CarBufferWriter}
+ */
+ write (block) {
+ addBlock(this, block)
+ return this
+ }
+
+ /**
+ * Finalize the CAR and return it as a `Uint8Array`.
+ *
+ * @param {object} [options]
+ * @param {boolean} [options.resize]
+ * @returns {Uint8Array}
+ */
+ close (options) {
+ return close(this, options)
+ }
+}
+
+/**
+ * @param {CarBufferWriter} writer
+ * @param {CID} root
+ * @param {{resize?:boolean}} [options]
+ */
+export const addRoot = (writer, root, { resize = false } = {}) => {
+ const { bytes, headerSize, byteOffset, roots } = writer
+ writer.roots.push(root)
+ const size = headerLength(writer)
+ // If there is not enough space for the new root
+ if (size > headerSize) {
+ // Check if we root would fit if we were to resize the head.
+ if (size - headerSize + byteOffset < bytes.byteLength) {
+ // If resize is enabled resize head
+ if (resize) {
+ resizeHeader(writer, size)
+ // otherwise remove head and throw an error suggesting to resize
+ } else {
+ roots.pop()
+ throw new RangeError(`Header of size ${headerSize} has no capacity for new root ${root}.
+ However there is a space in the buffer and you could call addRoot(root, { resize: root }) to resize header to make a space for this root.`)
+ }
+ // If head would not fit even with resize pop new root and throw error
+ } else {
+ roots.pop()
+ throw new RangeError(`Buffer has no capacity for a new root ${root}`)
+ }
+ }
+}
+
+/**
+ * Calculates number of bytes required for storing given block in CAR. Useful in
+ * estimating size of an `ArrayBuffer` for the `CarBufferWriter`.
+ *
+ * @name CarBufferWriter.blockLength(Block)
+ * @param {Block} block
+ * @returns {number}
+ */
+export const blockLength = ({ cid, bytes }) => {
+ const size = cid.bytes.byteLength + bytes.byteLength
+ return varint.encodingLength(size) + size
+}
+
+/**
+ * @param {CarBufferWriter} writer
+ * @param {Block} block
+ */
+export const addBlock = (writer, { cid, bytes }) => {
+ const byteLength = cid.bytes.byteLength + bytes.byteLength
+ const size = varint.encode(byteLength)
+ if (writer.byteOffset + size.length + byteLength > writer.bytes.byteLength) {
+ throw new RangeError('Buffer has no capacity for this block')
+ } else {
+ writeBytes(writer, size)
+ writeBytes(writer, cid.bytes)
+ writeBytes(writer, bytes)
+ }
+}
+
+/**
+ * @param {CarBufferWriter} writer
+ * @param {object} [options]
+ * @param {boolean} [options.resize]
+ */
+export const close = (writer, { resize = false } = {}) => {
+ const { roots, bytes, byteOffset, headerSize } = writer
+
+ const headerBytes = CBOR.encode({ version: 1, roots })
+ const varintBytes = varint.encode(headerBytes.length)
+
+ const size = varintBytes.length + headerBytes.byteLength
+ const offset = headerSize - size
+
+ // If header size estimate was accurate we just write header and return
+ // view into buffer.
+ if (offset === 0) {
+ writeHeader(writer, varintBytes, headerBytes)
+ return bytes.subarray(0, byteOffset)
+ // If header was overestimated and `{resize: true}` is passed resize header
+ } else if (resize) {
+ resizeHeader(writer, size)
+ writeHeader(writer, varintBytes, headerBytes)
+ return bytes.subarray(0, writer.byteOffset)
+ } else {
+ throw new RangeError(`Header size was overestimated.
+You can use close({ resize: true }) to resize header`)
+ }
+}
+
+/**
+ * @param {CarBufferWriter} writer
+ * @param {number} byteLength
+ */
+export const resizeHeader = (writer, byteLength) => {
+ const { bytes, headerSize } = writer
+ // Move data section to a new offset
+ bytes.set(bytes.subarray(headerSize, writer.byteOffset), byteLength)
+ // Update header size & byteOffset
+ writer.byteOffset += byteLength - headerSize
+ writer.headerSize = byteLength
+}
+
+/**
+ * @param {CarBufferWriter} writer
+ * @param {number[]|Uint8Array} bytes
+ */
+
+const writeBytes = (writer, bytes) => {
+ writer.bytes.set(bytes, writer.byteOffset)
+ writer.byteOffset += bytes.length
+}
+/**
+ * @param {{bytes:Uint8Array}} writer
+ * @param {number[]} varint
+ * @param {Uint8Array} header
+ */
+const writeHeader = ({ bytes }, varint, header) => {
+ bytes.set(varint)
+ bytes.set(header, varint.length)
+}
+
+const headerPreludeTokens = [
+ new Token(Type.map, 2),
+ new Token(Type.string, 'version'),
+ new Token(Type.uint, 1),
+ new Token(Type.string, 'roots')
+]
+
+const CID_TAG = new Token(Type.tag, 42)
+
+/**
+ * Calculates header size given the array of byteLength for roots.
+ *
+ * @name CarBufferWriter.calculateHeaderLength(rootLengths)
+ * @param {number[]} rootLengths
+ * @returns {number}
+ */
+export const calculateHeaderLength = (rootLengths) => {
+ const tokens = [...headerPreludeTokens]
+ tokens.push(new Token(Type.array, rootLengths.length))
+ for (const rootLength of rootLengths) {
+ tokens.push(CID_TAG)
+ tokens.push(new Token(Type.bytes, { length: rootLength + 1 }))
+ }
+ const length = tokensToLength(tokens) // no options needed here because we have simple tokens
+ return varint.encodingLength(length) + length
+}
+
+/**
+ * Calculates header size given the array of roots.
+ *
+ * @name CarBufferWriter.headerLength({ roots })
+ * @param {object} options
+ * @param {CID[]} options.roots
+ * @returns {number}
+ */
+export const headerLength = ({ roots }) =>
+ calculateHeaderLength(roots.map(cid => cid.bytes.byteLength))
+
+/**
+ * Estimates header size given a count of the roots and the expected byte length
+ * of the root CIDs. The default length works for a standard CIDv1 with a
+ * single-byte multihash code, such as SHA2-256 (i.e. the most common CIDv1).
+ *
+ * @name CarBufferWriter.estimateHeaderLength(rootCount[, rootByteLength])
+ * @param {number} rootCount
+ * @param {number} [rootByteLength]
+ * @returns {number}
+ */
+export const estimateHeaderLength = (rootCount, rootByteLength = 36) =>
+ calculateHeaderLength(new Array(rootCount).fill(rootByteLength))
+
+/**
+ * Creates synchronous CAR writer that can be used to encode blocks into a given
+ * buffer. Optionally you could pass `byteOffset` and `byteLength` to specify a
+ * range inside buffer to write into. If car file is going to have `roots` you
+ * need to either pass them under `options.roots` (from which header size will
+ * be calculated) or provide `options.headerSize` to allocate required space
+ * in the buffer. You may also provide known `roots` and `headerSize` to
+ * allocate space for the roots that may not be known ahead of time.
+ *
+ * Note: Incorrect `headerSize` may lead to copying bytes inside a buffer
+ * which will have a negative impact on performance.
+ *
+ * @name CarBufferWriter.createWriter(buffer[, options])
+ * @param {ArrayBuffer} buffer
+ * @param {object} [options]
+ * @param {CID[]} [options.roots]
+ * @param {number} [options.byteOffset]
+ * @param {number} [options.byteLength]
+ * @param {number} [options.headerSize]
+ * @returns {CarBufferWriter}
+ */
+export const createWriter = (
+ buffer,
+ {
+ roots = [],
+ byteOffset = 0,
+ byteLength = buffer.byteLength,
+ headerSize = headerLength({ roots })
+ } = {}
+) => {
+ const bytes = new Uint8Array(buffer, byteOffset, byteLength)
+
+ const writer = new CarBufferWriter(bytes, headerSize)
+ for (const root of roots) {
+ writer.addRoot(root)
+ }
+
+ return writer
+}
diff --git a/package.json b/package.json
index 3f7430b..1a09c74 100644
--- a/package.json
+++ b/package.json
@@ -19,7 +19,7 @@
"test": "npm run lint && npm run test:node && npm run test:cjs && npm run test --prefix examples/",
"test:ci": "npm run lint && npm run test:node && npm run test:esm && npm run test:cjs && npm run test --prefix examples/",
"coverage": "c8 --reporter=html --reporter=text mocha test/test-*.js && npx st -d coverage -p 8888",
- "docs": "jsdoc4readme --readme --description-only lib/reader*.js lib/indexed-reader.js lib/iterator.js lib/indexer.js lib/writer*.js lib/decoder.js"
+ "docs": "jsdoc4readme --readme --description-only lib/reader*.js lib/indexed-reader.js lib/iterator.js lib/indexer.js lib/writer*.js lib/buffer-writer.js lib/decoder.js"
},
"keywords": [
"car",
@@ -54,10 +54,14 @@
"./writer": {
"browser": "./lib/writer-browser.js",
"import": "./lib/writer.js"
+ },
+ "./buffer-writer": {
+ "import": "./lib/buffer-writer.js"
}
},
"dependencies": {
"@ipld/dag-cbor": "^7.0.0",
+ "cborg": "^1.9.0",
"multiformats": "^9.5.4",
"varint": "^6.0.0"
},
@@ -117,6 +121,9 @@
"writer": [
"types/lib/writer.d.ts"
],
+ "buffer-writer": [
+ "types/lib/buffer-writer.d.ts"
+ ],
"*": [
"types/*"
],
diff --git a/test/test-buffer-writer.js b/test/test-buffer-writer.js
new file mode 100644
index 0000000..1142a20
--- /dev/null
+++ b/test/test-buffer-writer.js
@@ -0,0 +1,256 @@
+/* eslint-env mocha */
+
+import * as CarBufferWriter from '@ipld/car/buffer-writer'
+import { CarReader } from '@ipld/car/reader'
+import { createHeader } from '../lib/encoder.js'
+import { assert } from './common.js'
+import { CID, varint } from 'multiformats'
+import * as CBOR from '@ipld/dag-cbor'
+import { sha256, sha512 } from 'multiformats/hashes/sha2'
+import { identity } from 'multiformats/hashes/identity'
+import * as Raw from 'multiformats/codecs/raw'
+import * as Block from 'multiformats/block'
+
+describe('CarBufferWriter', () => {
+ const cid = CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu')
+ describe('calculateHeaderLength', async () => {
+ for (const count of [0, 1, 10, 18, 24, 48, 124, 255, 258, 65536 - 1, 65536]) {
+ it(`calculateHeaderLength(new Array(${count}).fill(36))`, () => {
+ const roots = new Array(count).fill(cid)
+ const sizes = new Array(count).fill(cid.bytes.byteLength)
+ assert.deepEqual(
+ CarBufferWriter.calculateHeaderLength(sizes),
+ createHeader(roots).byteLength
+ )
+ })
+ it(`calculateHeaderLength(new Array(${count}).fill(36))`, () => {
+ const roots = new Array(count).fill(cid)
+ const rootLengths = roots.map((c) => c.bytes.byteLength)
+ assert.deepEqual(CarBufferWriter.calculateHeaderLength(rootLengths), createHeader(roots).byteLength)
+ })
+ }
+ it('estimate on large CIDs', () => {
+ const largeCID = CID.parse(`bafkqbbac${'a'.repeat(416)}`)
+ assert.equal(
+ CarBufferWriter.calculateHeaderLength([
+ cid.bytes.byteLength,
+ largeCID.bytes.byteLength
+ ]),
+ createHeader([
+ cid,
+ largeCID
+ ]).byteLength
+ )
+ })
+
+ it('estimate on large CIDs 2', () => {
+ const largeCID = CID.createV1(Raw.code, identity.digest(new Uint8Array(512).fill(1)))
+ assert.equal(
+ CarBufferWriter.calculateHeaderLength([
+ cid.bytes.byteLength,
+ largeCID.bytes.byteLength
+ ]),
+ createHeader([cid, largeCID]).byteLength
+ )
+ })
+ })
+
+ describe('writer', () => {
+ it('estimate header and write blocks', async () => {
+ const headerSize = CarBufferWriter.estimateHeaderLength(1)
+ const dataSize = 256
+ const buffer = new ArrayBuffer(headerSize + dataSize)
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize })
+ const b1 = await Block.encode({
+ value: { hello: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+
+ writer.write(b1)
+
+ const b2 = await Block.encode({
+ value: { bye: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+ writer.write(b2)
+
+ writer.addRoot(b1.cid)
+ const bytes = writer.close()
+
+ const reader = await CarReader.fromBytes(bytes)
+ assert.deepEqual(await reader.getRoots(), [b1.cid])
+ assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }])
+ })
+
+ it('overestimate header', async () => {
+ const headerSize = CarBufferWriter.estimateHeaderLength(2)
+ const dataSize = 256
+ const buffer = new ArrayBuffer(headerSize + dataSize)
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize })
+ const b1 = await Block.encode({
+ value: { hello: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+
+ writer.write(b1)
+
+ const b2 = await Block.encode({
+ value: { bye: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+ writer.write(b2)
+
+ writer.addRoot(b1.cid)
+ assert.throws(() => writer.close(), /Header size was overestimate/)
+ const bytes = writer.close({ resize: true })
+
+ const reader = await CarReader.fromBytes(bytes)
+ assert.deepEqual(await reader.getRoots(), [b1.cid])
+ assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }])
+ })
+
+ it('underestimate header', async () => {
+ const headerSize = CarBufferWriter.estimateHeaderLength(2)
+ const dataSize = 300
+ const buffer = new ArrayBuffer(headerSize + dataSize)
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize })
+ const b1 = await Block.encode({
+ value: { hello: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+
+ writer.write(b1)
+ writer.addRoot(b1.cid)
+
+ const b2 = await Block.encode({
+ value: { bye: 'world' },
+ codec: CBOR,
+ hasher: sha512
+ })
+ writer.write(b2)
+ assert.throws(() => writer.addRoot(b2.cid), /has no capacity/)
+ writer.addRoot(b2.cid, { resize: true })
+
+ const bytes = writer.close()
+
+ const reader = await CarReader.fromBytes(bytes)
+ assert.deepEqual(await reader.getRoots(), [b1.cid, b2.cid])
+ assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }])
+ })
+ })
+
+ it('has no space for the root', async () => {
+ const headerSize = CarBufferWriter.estimateHeaderLength(1)
+ const dataSize = 100
+ const buffer = new ArrayBuffer(headerSize + dataSize)
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize })
+ const b1 = await Block.encode({
+ value: { hello: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+
+ writer.write(b1)
+ writer.addRoot(b1.cid)
+
+ const b2 = await Block.encode({
+ value: { bye: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+ writer.write(b2)
+ assert.throws(() => writer.addRoot(b2.cid), /Buffer has no capacity for a new root/)
+ assert.throws(() => writer.addRoot(b2.cid, { resize: true }), /Buffer has no capacity for a new root/)
+
+ const bytes = writer.close()
+
+ const reader = await CarReader.fromBytes(bytes)
+ assert.deepEqual(await reader.getRoots(), [b1.cid])
+ assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }])
+ })
+
+ it('has no space for the block', async () => {
+ const headerSize = CarBufferWriter.estimateHeaderLength(1)
+ const dataSize = 58
+ const buffer = new ArrayBuffer(headerSize + dataSize)
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize })
+ const b1 = await Block.encode({
+ value: { hello: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+
+ writer.write(b1)
+ writer.addRoot(b1.cid)
+
+ const b2 = await Block.encode({
+ value: { bye: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+ assert.throws(() => writer.write(b2), /Buffer has no capacity for this block/)
+
+ const bytes = writer.close()
+
+ const reader = await CarReader.fromBytes(bytes)
+ assert.deepEqual(await reader.getRoots(), [b1.cid])
+ assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }])
+ })
+
+ it('provide roots', async () => {
+ const b1 = await Block.encode({
+ value: { hello: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+ const b2 = await Block.encode({
+ value: { bye: 'world' },
+ codec: CBOR,
+ hasher: sha512
+ })
+
+ const buffer = new ArrayBuffer(300)
+ const writer = CarBufferWriter.createWriter(buffer, { roots: [b1.cid, b2.cid] })
+
+ writer.write(b1)
+ writer.write(b2)
+
+ const bytes = writer.close()
+
+ const reader = await CarReader.fromBytes(bytes)
+ assert.deepEqual(await reader.getRoots(), [b1.cid, b2.cid])
+ assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }])
+ })
+
+ it('provide large CID root', async () => {
+ const bytes = new Uint8Array(512).fill(1)
+ const b1 = await Block.encode({
+ value: { hello: 'world' },
+ codec: CBOR,
+ hasher: sha256
+ })
+
+ const b2 = {
+ cid: CID.createV1(Raw.code, identity.digest(bytes)),
+ bytes
+ }
+
+ const headerSize = CBOR.encode({ version: 1, roots: [b1.cid, b2.cid] }).byteLength
+ const bodySize = CarBufferWriter.blockLength(b1) + CarBufferWriter.blockLength(b2)
+ const varintSize = varint.encodingLength(headerSize)
+
+ const writer = CarBufferWriter.createWriter(new ArrayBuffer(varintSize + headerSize + bodySize), { roots: [b1.cid, b2.cid] })
+
+ writer.write(b1)
+ writer.write(b2)
+ const car = writer.close()
+ const reader = await CarReader.fromBytes(car)
+ assert.deepEqual(await reader.getRoots(), [b1.cid, b2.cid])
+ assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }])
+ })
+})
diff --git a/tsconfig.json b/tsconfig.json
index 9eb53ca..b34d5fc 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -31,6 +31,7 @@
"paths": {
"@ipld/car": [ "car.js", "car-browser.js", "lib/" ],
"@ipld/car/writer": [ "./lib/writer.js" ],
+ "@ipld/car/buffer-writer": ["./lib/buffer-writer.js"],
"@ipld/car/reader": [ "./lib/reader.js" ],
"@ipld/car/indexed-reader": [ "./lib/indexed-reader.js" ],
"@ipld/car/iterator": [ "./lib/iterator.js" ],