Skip to content

Commit

Permalink
feat: configure UnixFS encoder (storacha#1509)
Browse files Browse the repository at this point in the history
This allows a UnixFS DAG to be encoded that uses v0 CIDs.

I don't really want to encourage this, but the alternative is asking
users to build their own DAGs, encode their own CAR and then pass it to
`uploadCAR`.
  • Loading branch information
Alan Shaw authored Jun 19, 2024
1 parent fd74cbb commit 1a5e648
Show file tree
Hide file tree
Showing 6 changed files with 49 additions and 14 deletions.
4 changes: 2 additions & 2 deletions packages/upload-client/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@ export * as Receipt from './receipts.js'
* The issuer needs the `blob/add`, `index/add`, `filecoin/offer` and
* `upload/add` delegated capability.
* @param {import('./types.js').BlobLike} file File data.
* @param {import('./types.js').UploadOptions} [options]
* @param {import('./types.js').UploadFileOptions} [options]
*/
export async function uploadFile(conf, file, options = {}) {
return await uploadBlockStream(
conf,
UnixFS.createFileEncoderStream(file),
UnixFS.createFileEncoderStream(file, options),
options
)
}
Expand Down
23 changes: 19 additions & 4 deletions packages/upload-client/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import type {
ProgressStatus as XHRProgressStatus,
} from 'ipfs-utils/src/types.js'
import { Link, UnknownLink, Version, MultihashHasher } from 'multiformats'
import { Block } from '@ipld/unixfs'
import { Block, EncoderSettings } from '@ipld/unixfs'
import {
ServiceMethod,
ConnectionView,
Expand Down Expand Up @@ -347,6 +347,13 @@ export interface UnixFSDirectoryEncoderOptions {
onDirectoryEntryLink?: (link: DirectoryEntryLink) => void
}

export interface UnixFSEncoderSettingsOptions {
/**
* Settings for UnixFS encoding.
*/
settings?: EncoderSettings
}

export interface ShardingOptions {
/**
* The target shard size. Actual size of CAR output may be bigger due to CAR
Expand Down Expand Up @@ -379,11 +386,19 @@ export interface UploadOptions
pieceHasher?: MultihashHasher<typeof pieceHashCode>
}

export interface UploadFileOptions
extends UploadOptions,
UnixFSEncoderSettingsOptions {}

export interface UploadDirectoryOptions
extends UploadOptions,
UnixFSDirectoryEncoderOptions,
UploadProgressTrackable {
/** whether the directory files have already been ordered in a custom way. indicates that the upload must not use a different order than the one provided. */
UnixFSEncoderSettingsOptions,
UnixFSDirectoryEncoderOptions {
/**
* Whether the directory files have already been ordered in a custom way.
* Indicates that the upload must not use a different order than the one
* provided.
*/
customOrder?: boolean
}

Expand Down
16 changes: 10 additions & 6 deletions packages/upload-client/src/unixfs.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { withWidth } from '@ipld/unixfs/file/layout/balanced'
const SHARD_THRESHOLD = 1000 // shard directory after > 1,000 items
const queuingStrategy = UnixFS.withCapacity()

const settings = UnixFS.configure({
const defaultSettings = UnixFS.configure({
fileChunkEncoder: raw,
smallFileEncoder: raw,
chunker: withMaxChunkSize(1024 * 1024),
Expand All @@ -15,22 +15,25 @@ const settings = UnixFS.configure({

/**
* @param {import('./types.js').BlobLike} blob
* @param {import('./types.js').UnixFSEncoderSettingsOptions} [options]
* @returns {Promise<import('./types.js').UnixFSEncodeResult>}
*/
export async function encodeFile(blob) {
const readable = createFileEncoderStream(blob)
export async function encodeFile(blob, options) {
const readable = createFileEncoderStream(blob, options)
const blocks = await collect(readable)
// @ts-expect-error There is always a root block
return { cid: blocks.at(-1).cid, blocks }
}

/**
* @param {import('./types.js').BlobLike} blob
* @param {import('./types.js').UnixFSEncoderSettingsOptions} [options]
* @returns {ReadableStream<import('@ipld/unixfs').Block>}
*/
export function createFileEncoderStream(blob) {
export function createFileEncoderStream(blob, options) {
/** @type {TransformStream<import('@ipld/unixfs').Block, import('@ipld/unixfs').Block>} */
const { readable, writable } = new TransformStream({}, queuingStrategy)
const settings = options?.settings ?? defaultSettings
const unixfsWriter = UnixFS.createWriter({ writable, settings })
const fileBuilder = new UnixFSFileBuilder('', blob)
void (async () => {
Expand Down Expand Up @@ -101,7 +104,7 @@ class UnixFSDirectoryBuilder {

/**
* @param {Iterable<import('./types.js').FileLike>} files
* @param {import('./types.js').UnixFSDirectoryEncoderOptions} [options]
* @param {import('./types.js').UnixFSEncoderSettingsOptions & import('./types.js').UnixFSDirectoryEncoderOptions} [options]
* @returns {Promise<import('./types.js').UnixFSEncodeResult>}
*/
export async function encodeDirectory(files, options) {
Expand All @@ -113,7 +116,7 @@ export async function encodeDirectory(files, options) {

/**
* @param {Iterable<import('./types.js').FileLike>} files
* @param {import('./types.js').UnixFSDirectoryEncoderOptions} [options]
* @param {import('./types.js').UnixFSEncoderSettingsOptions & import('./types.js').UnixFSDirectoryEncoderOptions} [options]
* @returns {ReadableStream<import('@ipld/unixfs').Block>}
*/
export function createDirectoryEncoderStream(files, options) {
Expand Down Expand Up @@ -145,6 +148,7 @@ export function createDirectoryEncoderStream(files, options) {

/** @type {TransformStream<import('@ipld/unixfs').Block, import('@ipld/unixfs').Block>} */
const { readable, writable } = new TransformStream({}, queuingStrategy)
const settings = options?.settings ?? defaultSettings
const unixfsWriter = UnixFS.createWriter({ writable, settings })
void (async () => {
const link = await rootDir.finalize(unixfsWriter)
Expand Down
17 changes: 16 additions & 1 deletion packages/upload-client/test/unixfs.test.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import assert from 'assert'
import { decode, NodeType } from '@ipld/unixfs'
import { decode, NodeType, defaults } from '@ipld/unixfs'
import { exporter } from 'ipfs-unixfs-exporter'
// @ts-expect-error this version of blockstore-core doesn't point to correct types file in package.json, and upgrading to latest version that fixes that leads to api changes
import { MemoryBlockstore } from 'blockstore-core/memory'
import * as raw from 'multiformats/codecs/raw'
import * as Link from 'multiformats/link'
import path from 'path'
import { encodeFile, encodeDirectory } from '../src/unixfs.js'
import { File } from './helpers/shims.js'
Expand Down Expand Up @@ -105,6 +106,20 @@ describe('UnixFS', () => {
assert.equal(cid.code, raw.code)
})

it('configured to output v0 CIDs', async () => {
const file = new Blob(['test'])
const { cid } = await encodeFile(file, {
settings: {
...defaults(),
linker: {
// @ts-expect-error
createLink: (_, digest) => Link.createLegacy(digest),
},
},
})
assert.equal(cid.version, 0)
})

it('callback for each directory entry link', async () => {
const files = [
new File(['file'], 'file.txt'),
Expand Down
2 changes: 1 addition & 1 deletion packages/w3up-client/src/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ export class Client extends Base {
* - `upload/add`
*
* @param {import('./types.js').BlobLike} file - File data.
* @param {import('./types.js').UploadOptions} [options]
* @param {import('./types.js').UploadFileOptions} [options]
*/
async uploadFile(file, options = {}) {
const conf = await this._invocationConfig([
Expand Down
1 change: 1 addition & 0 deletions packages/w3up-client/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ export type {
ShardingOptions,
ShardStoringOptions,
UploadOptions,
UploadFileOptions,
UploadDirectoryOptions,
FileLike,
BlobLike,
Expand Down

0 comments on commit 1a5e648

Please sign in to comment.