diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1105264c01..05be9120ae 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -366,9 +366,9 @@ jobs: - name: ipfs custom ipfs repo repo: https://github.com/ipfs-examples/js-ipfs-custom-ipfs-repo.git deps: ipfs-core@$PWD/packages/ipfs-core/dist - - name: ipfs custom ipld formats - repo: https://github.com/ipfs-examples/js-ipfs-custom-ipld-formats.git - deps: ipfs-core@$PWD/packages/ipfs-core/dist,ipfs-daemon@$PWD/packages/ipfs-daemon/dist,ipfs-http-client@$PWD/packages/ipfs-http-client/dist + #- name: ipfs custom ipld formats + # repo: https://github.com/ipfs-examples/js-ipfs-custom-ipld-formats.git + # deps: ipfs-core@$PWD/packages/ipfs-core/dist,ipfs-daemon@$PWD/packages/ipfs-daemon/dist,ipfs-http-client@$PWD/packages/ipfs-http-client/dist - name: ipfs custom libp2p repo: https://github.com/ipfs-examples/js-ipfs-custom-libp2p.git deps: ipfs-core@$PWD/packages/ipfs-core/dist @@ -396,9 +396,9 @@ jobs: - name: ipfs running multiple nodes repo: https://github.com/ipfs-examples/js-ipfs-running-multiple-nodes.git deps: ipfs@$PWD/packages/ipfs/dist - - name: ipfs traverse ipld graphs - repo: https://github.com/ipfs-examples/js-ipfs-traverse-ipld-graphs.git - deps: ipfs-core@$PWD/packages/ipfs-core/dist + #- name: ipfs traverse ipld graphs + # repo: https://github.com/ipfs-examples/js-ipfs-traverse-ipld-graphs.git + # deps: ipfs-core@$PWD/packages/ipfs-core/dist - name: types with typescript repo: https://github.com/ipfs-examples/js-ipfs-types-use-ipfs-from-ts.git deps: ipfs-core@$PWD/packages/ipfs-core/dist diff --git a/docs/core-api/DAG.md b/docs/core-api/DAG.md index c617d4630d..4004796d67 100644 --- a/docs/core-api/DAG.md +++ b/docs/core-api/DAG.md @@ -88,12 +88,13 @@ An optional object which may have the following keys: | Name | Type | Default | Description | | ---- | ---- | ------- | ----------- | -| format | `String` | `'dag-cbor'` | The IPLD format multicodec | +| storeCodec | `String` | `'dag-cbor'` | The codec that the stored object will be encoded with | +| inputCodec | `String` | `undefined` | If an already encoded object is provided (as a `Uint8Array`), the codec that the object is encoded with, otherwise it is assumed the `dagNode` argument is an object to be encoded | | hashAlg | `String` | `'sha2-256'` | The hash algorithm to be used over the serialized DAG node | | cid | [CID][] | `'dag-cbor'` | The IPLD format multicodec | | pin | `boolean` | `false` | Pin this node when adding to the blockstore | | timeout | `Number` | `undefined` | A timeout in ms | -| signal | [AbortSignal][] | `undefined` | Can be used to cancel any long running requests started as a result of this call | +| signal | [AbortSignal][] | `undefined` | Can be used to cancel any long running requests started as a result of this call | **Note**: You should pass `cid` or the `format` & `hashAlg` pair but _not both_. @@ -107,7 +108,7 @@ An optional object which may have the following keys: ```JavaScript const obj = { simple: 'object' } -const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-512' }) +const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-512' }) console.log(cid.toString()) // zBwWX9ecx5F4X54WAjmFLErnBT6ByfNxStr5ovowTL7AhaUR98RWvXPS1V3HqV1qs3r5Ec5ocv7eCdbqYQREXNUfYNuKG @@ -160,7 +161,7 @@ const obj = { } } -const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) +const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) console.log(cid.toString()) // zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5 @@ -272,7 +273,7 @@ const obj = { } } -const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) +const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) console.log(cid.toString()) // bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq diff --git a/packages/interface-ipfs-core/src/block/rm.js b/packages/interface-ipfs-core/src/block/rm.js index 924c0dc069..e7248c7c11 100644 --- a/packages/interface-ipfs-core/src/block/rm.js +++ b/packages/interface-ipfs-core/src/block/rm.js @@ -39,7 +39,7 @@ export function testRm (factory, options) { it('should remove by CID object', async () => { const cid = await ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) @@ -61,15 +61,15 @@ export function testRm (factory, options) { it('should remove multiple CIDs', async () => { const cids = await Promise.all([ ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }), ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }), ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) ]) @@ -86,7 +86,7 @@ export function testRm (factory, options) { it('should error when removing non-existent blocks', async () => { const cid = await ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) @@ -102,7 +102,7 @@ export function testRm (factory, options) { it('should not error when force removing non-existent blocks', async () => { const cid = await ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) @@ -119,7 +119,7 @@ export function testRm (factory, options) { it('should return empty output when removing blocks quietly', async () => { const cid = await ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) const result = await all(ipfs.block.rm(cid, { quiet: true })) @@ -129,7 +129,7 @@ export function testRm (factory, options) { it('should error when removing pinned blocks', async () => { const cid = await ipfs.dag.put(uint8ArrayFromString(nanoid()), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) await ipfs.pin.add(cid) diff --git a/packages/interface-ipfs-core/src/dag/get.js b/packages/interface-ipfs-core/src/dag/get.js index e95d65a5a4..3f81031df2 100644 --- a/packages/interface-ipfs-core/src/dag/get.js +++ b/packages/interface-ipfs-core/src/dag/get.js @@ -73,7 +73,7 @@ export function testGet (factory, options) { Data: uint8ArrayFromString('I am inside a Protobuf'), Links: [] } - cidPb = CID.createV0(await sha256.digest(dagPB.encode(nodePb))) + cidPb = CID.createV1(dagPB.code, await sha256.digest(dagPB.encode(nodePb))) nodeCbor = { someData: 'I am inside a Cbor object', pb: cidPb @@ -81,8 +81,8 @@ export function testGet (factory, options) { cidCbor = CID.createV1(dagCBOR.code, await sha256.digest(dagCBOR.encode(nodeCbor))) - await ipfs.dag.put(nodePb, { format: 'dag-pb', hashAlg: 'sha2-256' }) - await ipfs.dag.put(nodeCbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + await ipfs.dag.put(nodePb, { storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) + await ipfs.dag.put(nodeCbor, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) }) it('should respect timeout option when getting a DAG node', () => { @@ -93,7 +93,7 @@ export function testGet (factory, options) { it('should get a dag-pb node', async () => { const cid = await ipfs.dag.put(pbNode, { - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) @@ -105,7 +105,7 @@ export function testGet (factory, options) { it('should get a dag-cbor node', async () => { const cid = await ipfs.dag.put(cborNode, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) @@ -122,7 +122,7 @@ export function testGet (factory, options) { const node = result.value - const cid = CID.createV0(await sha256.digest(dagPB.encode(node))) + const cid = CID.createV1(dagPB.code, await sha256.digest(dagPB.encode(node))) expect(cid.equals(cidPb)).to.be.true() }) @@ -192,7 +192,7 @@ export function testGet (factory, options) { } const cid = await ipfs.dag.put(node, { - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-256', version: 0 }) @@ -226,7 +226,7 @@ export function testGet (factory, options) { foo: 'dag-cbor-bar' } - const cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(cbor, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) expect(cid.code).to.equal(dagCBOR.code) expect(cid.toString(base32)).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') @@ -241,10 +241,10 @@ export function testGet (factory, options) { foo: 'dag-cbor-bar' } - const cid1 = await ipfs.dag.put(cbor1, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid1 = await ipfs.dag.put(cbor1, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const cbor2 = { other: cid1 } - const cid2 = await ipfs.dag.put(cbor2, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid2 = await ipfs.dag.put(cbor2, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const result = await ipfs.dag.get(cid2, { path: 'other/foo' @@ -256,7 +256,7 @@ export function testGet (factory, options) { const buf = Uint8Array.from([0, 1, 2, 3]) const cid = await ipfs.dag.put(buf, { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) diff --git a/packages/interface-ipfs-core/src/dag/index.js b/packages/interface-ipfs-core/src/dag/index.js index 42ad8907bb..fe401be7d0 100644 --- a/packages/interface-ipfs-core/src/dag/index.js +++ b/packages/interface-ipfs-core/src/dag/index.js @@ -5,13 +5,15 @@ import { testGet } from './get.js' import { testPut } from './put.js' import { testImport } from './import.js' import { testResolve } from './resolve.js' +import { testDagSharnessT0053 } from './sharness-t0053-dag.js' const tests = { export: testExport, get: testGet, put: testPut, import: testImport, - resolve: testResolve + resolve: testResolve, + dagSharnessT0053: testDagSharnessT0053 } export default createSuite(tests) diff --git a/packages/interface-ipfs-core/src/dag/put.js b/packages/interface-ipfs-core/src/dag/put.js index 891d8afe85..cd7b45067c 100644 --- a/packages/interface-ipfs-core/src/dag/put.js +++ b/packages/interface-ipfs-core/src/dag/put.js @@ -37,35 +37,35 @@ export function testPut (factory, options) { it('should put dag-pb with default hash func (sha2-256)', () => { return ipfs.dag.put(pbNode, { - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) }) it('should put dag-pb with non-default hash func (sha2-512)', () => { return ipfs.dag.put(pbNode, { - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-512' }) }) it('should put dag-cbor with default hash func (sha2-256)', () => { return ipfs.dag.put(cborNode, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) }) it('should put dag-cbor with non-default hash func (sha2-512)', () => { return ipfs.dag.put(cborNode, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-512' }) }) it('should return the cid', async () => { const cid = await ipfs.dag.put(cborNode, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) expect(cid).to.exist() @@ -90,7 +90,7 @@ export function testPut (factory, options) { it('should override hash algorithm default and resolve with it', async () => { const cid = await ipfs.dag.put(cborNode, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-512' }) expect(cid.code).to.equal(dagCBOR.code) diff --git a/packages/interface-ipfs-core/src/dag/resolve.js b/packages/interface-ipfs-core/src/dag/resolve.js index 1767cc76c8..a45db89a1a 100644 --- a/packages/interface-ipfs-core/src/dag/resolve.js +++ b/packages/interface-ipfs-core/src/dag/resolve.js @@ -26,7 +26,7 @@ export function testResolve (factory, options) { after(() => factory.clean()) it('should respect timeout option when resolving a path within a DAG node', async () => { - const cid = await ipfs.dag.put({}, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put({}, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) return testTimeout(() => ipfs.dag.resolve(cid, { timeout: 1 @@ -43,7 +43,7 @@ export function testResolve (factory, options) { } } - const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(`${cid}/c/cb`) expect(result).to.have.deep.property('cid', cid) @@ -60,7 +60,7 @@ export function testResolve (factory, options) { } } - const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(cid, { path: '/c/cb' }) expect(result).to.have.deep.property('cid', cid) @@ -72,7 +72,7 @@ export function testResolve (factory, options) { ca: [5, 6, 7], cb: 'foo' } - const cid0 = await ipfs.dag.put(obj0, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid0 = await ipfs.dag.put(obj0, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const obj1 = { a: 1, @@ -80,7 +80,7 @@ export function testResolve (factory, options) { c: cid0 } - const cid1 = await ipfs.dag.put(obj1, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid1 = await ipfs.dag.put(obj1, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(`/ipfs/${cid1}/c/cb`) expect(result).to.have.deep.property('cid', cid0) @@ -92,7 +92,7 @@ export function testResolve (factory, options) { ca: [5, 6, 7], cb: 'foo' } - const cid0 = await ipfs.dag.put(obj0, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid0 = await ipfs.dag.put(obj0, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const obj1 = { a: 1, @@ -100,7 +100,7 @@ export function testResolve (factory, options) { c: cid0 } - const cid1 = await ipfs.dag.put(obj1, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid1 = await ipfs.dag.put(obj1, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(cid1, { path: '/c/cb' }) expect(result).to.have.deep.property('cid', cid0) @@ -109,7 +109,7 @@ export function testResolve (factory, options) { it('should resolve a raw node', async () => { const node = uint8ArrayFromString('hello world') - const cid = await ipfs.dag.put(node, { format: 'raw', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(node, { storeCodec: 'raw', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(cid, { path: '/' }) expect(result).to.have.deep.property('cid', cid) @@ -122,7 +122,7 @@ export function testResolve (factory, options) { Data: someData, Links: [] } - const childCid = await ipfs.dag.put(childNode, { format: 'dag-pb', hashAlg: 'sha2-256' }) + const childCid = await ipfs.dag.put(childNode, { storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) const linkToChildNode = { Name: 'foo', @@ -133,7 +133,7 @@ export function testResolve (factory, options) { Data: uint8ArrayFromString('derp'), Links: [linkToChildNode] } - const parentCid = await ipfs.dag.put(parentNode, { format: 'dag-pb', hashAlg: 'sha2-256' }) + const parentCid = await ipfs.dag.put(parentNode, { storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) const result = await ipfs.dag.resolve(parentCid, { path: '/foo' }) expect(result).to.have.deep.property('cid', childCid) diff --git a/packages/interface-ipfs-core/src/dag/sharness-t0053-dag.js b/packages/interface-ipfs-core/src/dag/sharness-t0053-dag.js new file mode 100644 index 0000000000..53648ae216 --- /dev/null +++ b/packages/interface-ipfs-core/src/dag/sharness-t0053-dag.js @@ -0,0 +1,182 @@ +/* eslint-env mocha */ + +import { expect } from 'aegir/utils/chai.js' +import { getDescribe, getIt } from '../utils/mocha.js' +import { base64pad } from 'multiformats/bases/base64' +import { base58btc } from 'multiformats/bases/base58' +import { CID } from 'multiformats' + +/** + * @typedef {import('ipfsd-ctl').Factory} Factory + */ + +/** + * @param {Factory} factory + * @param {Object} options + */ +export function testDagSharnessT0053 (factory, options) { + const describe = getDescribe(options) + const it = getIt(options) + + describe('.dag (sharness-t0053-dag)', () => { + /** @type {import('ipfs-core-types').IPFS} */ + let ipfs + before(async () => { ipfs = (await factory.spawn()).api }) + + after(() => factory.clean()) + + /** @type {CID} */ + let hash1 + /** @type {CID} */ + let hash2 + /** @type {CID} */ + let hash3 + /** @type {CID} */ + let hash4 + /** @type {Uint8Array} */ + let ipldObject + /** @type {Uint8Array} */ + let ipldObjectDagCbor + /** @type {Uint8Array} */ + let ipldObjectDagPb + /** @type {Uint8Array} */ + let ipldObjectDagJson + const ipldHash = 'bafyreiblwimnjbqcdoeafiobk6q27jcw64ew7n2fmmhdpldd63edmjecde' + const ipldDagCborHash = 'bafyreieculsmrexh3ty5jentbvuku452o27mst4h2tq2rb2zntqhgcstji' + const ipldDagJsonHash = 'baguqeerajwksxu3lxpomdwxvosl542zl3xknhjgxtq3277gafrhl6vdw5tcq' + const ipldDagPbHash = 'bafybeibazl2z4vqp2tmwcfag6wirmtpnomxknqcgrauj7m2yisrz3qjbom' + + before(async () => { + hash1 = (await ipfs.add({ content: 'foo\n', path: 'file1' })).cid + hash2 = (await ipfs.add({ content: 'bar\n', path: 'file2' })).cid + hash3 = (await ipfs.add({ content: 'baz\n', path: 'file3' })).cid + hash4 = (await ipfs.add({ content: 'qux\n', path: 'file4' })).cid + + ipldObject = new TextEncoder().encode(`{"hello":"world","cats":[{"/":"${hash1}"},{"water":{"/":"${hash2}"}}],"magic":{"/":"${hash3}"},"sub":{"dict":"ionary","beep":[0,"bop"]}}`) + ipldObjectDagCbor = base64pad.decode('MomREYXRhRQABAgMEZUxpbmtzgA==') + ipldObjectDagPb = base64pad.decode('MCgUAAQIDBA==') + ipldObjectDagJson = new TextEncoder().encode('{"Data":{"/":{"bytes":"AAECAwQ"}},"Links":[]}') + }) + + it('sanity check', () => { + expect(hash1.toString()).to.equal('QmYNmQKp6SuaVrpgWRsPTgCQCnpxUYGq76YEKBXuj2N4H6') + expect(hash2.toString()).to.equal('QmTz3oc4gdpRMKP2sdGUPZTAGRngqjsi99BPoztyP53JMM') + expect(hash3.toString()).to.equal('QmWLdkp93sNxGRjnFHPaYg8tCQ35NBY3XPn6KiETd3Z4WR') + expect(hash4.toString()).to.equal('QmZCoKN8vvRbxfn4BMG9678UQTSUwPXRJsRA9jnjoucHUj') + }) + + it('can add an ipld object using defaults (dag-json to dag-cbor)', async () => { + // dag-json is default on CLI, force it to interpret our bytes here + const cid = await ipfs.dag.put(ipldObject, { inputCodec: 'dag-json' }) + expect(cid.toString()).to.equal(ipldHash) + }) + + it('can add an ipld object using dag-json to dag-json', async () => { + const cid = await ipfs.dag.put(ipldObject, { inputCodec: 'dag-json', storeCodec: 'dag-json' }) + expect(cid.toString()).to.equal('baguqeera6gviseelmbzn2ugoddo5vulxlshqs3kw5ymgsb6w4cabnoh4ldpa') + }) + + it('can add an ipld object using dag-json to dag-cbor', async () => { + const cid = await ipfs.dag.put(ipldObject, { inputCodec: 'dag-json', storeCodec: 'dag-cbor' }) + expect(cid.toString()).to.equal(ipldHash) + }) + + // this is not testing what the upstream sharness is testing since we're converting it locally + // and not asking the CLI for it, but it's included for completeness + it('can add an ipld object using cid-base=base58btc', async () => { + const cid = await ipfs.dag.put(ipldObject, { inputCodec: 'dag-json' }) + expect(cid.toString(base58btc)).to.equal('zdpuAoN1XJ3GsrxEzMuCbRKZzRUVJekJUCbPVgCgE4D9yYqVi') + }) + + // (1) dag-cbor input + + it('can add a dag-cbor input block stored as dag-cbor', async () => { + const cid = await ipfs.dag.put(ipldObjectDagCbor, { inputCodec: 'dag-cbor', storeCodec: 'dag-cbor' }) + expect(cid.toString()).to.equal(ipldDagCborHash) + }) + + it('can add a dag-cbor input block stored as dag-pb', async () => { + const cid = await ipfs.dag.put(ipldObjectDagCbor, { inputCodec: 'dag-cbor', storeCodec: 'dag-pb' }) + expect(cid.toString()).to.equal(ipldDagPbHash) + }) + + it('can add a dag-cbor input block stored as dag-json', async () => { + const cid = await ipfs.dag.put(ipldObjectDagCbor, { inputCodec: 'dag-cbor', storeCodec: 'dag-json' }) + expect(cid.toString()).to.equal(ipldDagJsonHash) + }) + + // (2) dag-json input + + it('can add a dag-json input block stored as dag-cbor', async () => { + const cid = await ipfs.dag.put(ipldObjectDagJson, { inputCodec: 'dag-json', storeCodec: 'dag-cbor' }) + expect(cid.toString()).to.equal(ipldDagCborHash) + }) + + it('can add a dag-json input block stored as dag-pb', async () => { + const cid = await ipfs.dag.put(ipldObjectDagJson, { inputCodec: 'dag-json', storeCodec: 'dag-pb' }) + expect(cid.toString()).to.equal(ipldDagPbHash) + }) + + it('can add a dag-json input block stored as dag-json', async () => { + const cid = await ipfs.dag.put(ipldObjectDagJson, { inputCodec: 'dag-json', storeCodec: 'dag-json' }) + expect(cid.toString()).to.equal(ipldDagJsonHash) + }) + + // (3) dag-pb input + + it('can add a dag-pb input block stored as dag-cbor', async () => { + const cid = await ipfs.dag.put(ipldObjectDagPb, { inputCodec: 'dag-pb', storeCodec: 'dag-cbor' }) + expect(cid.toString()).to.equal(ipldDagCborHash) + }) + + it('can add a dag-pb input block stored as dag-pb', async () => { + const cid = await ipfs.dag.put(ipldObjectDagPb, { inputCodec: 'dag-pb', storeCodec: 'dag-pb' }) + expect(cid.toString()).to.equal(ipldDagPbHash) + }) + + it('can add a dag-pb input block stored as dag-json', async () => { + const cid = await ipfs.dag.put(ipldObjectDagPb, { inputCodec: 'dag-pb', storeCodec: 'dag-json' }) + expect(cid.toString()).to.equal(ipldDagJsonHash) + }) + + it('can get dag-cbor, dag-json, dag-pb blocks as dag-json', async () => { + const resultCbor = await ipfs.dag.get(CID.parse(ipldDagCborHash)) + const resultJson = await ipfs.dag.get(CID.parse(ipldDagJsonHash)) + const resultPb = await ipfs.dag.get(CID.parse(ipldDagPbHash)) + expect(resultCbor).to.deep.equal(resultJson) + expect(resultCbor).to.deep.equal(resultPb) + }) + + /* + This is illustrative only - it's not testing anything meaningful. It's supposed to test + `outputCodec` which isn't supported for the http client or core since we get the decoded JS + form of the node. But this test code as it's written is doing the encode locally and + asserting on that .. which is just testing the codec. + + it('can get dag-pb block transcoded as dag-cbor', async () => { + const { value } = await ipfs.dag.get(CID.parse(ipldDagPbHash), { outputCodec: 'dag-cbor' }) + const block = await Block.encode({ value, codec: dagCbor, hasher: sha256 }) + expect(bytes.toHex(block.cid.multihash.bytes)).to.equal('122082a2e4c892e7dcf1d491b30d68aa73ba76bec94f87d4e1a887596ce0730a534a') + }) + */ + + // Skipped: 'dag put and dag get transcodings match' - tests the round-trip of the above + + it('resolving sub-objects works', async () => { + let result = await ipfs.dag.get(CID.parse(ipldHash), { path: 'hello' }) + expect(result.value).to.equal('world') + result = await ipfs.dag.get(CID.parse(ipldHash), { path: 'sub' }) + expect(result.value).to.deep.equal({ beep: [0, 'bop'], dict: 'ionary' }) + result = await ipfs.dag.get(CID.parse(ipldHash), { path: 'sub/beep' }) + expect(result.value).to.deep.equal([0, 'bop']) + result = await ipfs.dag.get(CID.parse(ipldHash), { path: 'sub/beep/0' }) + expect(result.value).to.equal(0) + result = await ipfs.dag.get(CID.parse(ipldHash), { path: 'sub/beep/1' }) + expect(result.value).to.equal('bop') + }) + + // Skipped: 'traversals using /ipld/ work' - not implemented here, yet? + + // Skipped additional pin, resolve and other tests + }) +} diff --git a/packages/interface-ipfs-core/src/files/stat.js b/packages/interface-ipfs-core/src/files/stat.js index b27cff19d1..d5fc7ec91e 100644 --- a/packages/interface-ipfs-core/src/files/stat.js +++ b/packages/interface-ipfs-core/src/files/stat.js @@ -156,7 +156,7 @@ export function testStat (factory, options) { const path = '/cbor.node' const node = {} const cid = await ipfs.dag.put(node, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) await ipfs.files.cp(`/ipfs/${cid}`, path) diff --git a/packages/interface-ipfs-core/src/miscellaneous/resolve.js b/packages/interface-ipfs-core/src/miscellaneous/resolve.js index 551ea72354..8821f5b167 100644 --- a/packages/interface-ipfs-core/src/miscellaneous/resolve.js +++ b/packages/interface-ipfs-core/src/miscellaneous/resolve.js @@ -67,7 +67,7 @@ export function testResolve (factory, options) { it('should resolve up to the last node', async () => { const content = { path: { to: { file: nanoid() } } } - const options = { format: 'dag-cbor', hashAlg: 'sha2-256' } + const options = { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' } const cid = await ipfs.dag.put(content, options) const path = `/ipfs/${cid}/path/to/file` const resolved = await ipfs.resolve(path) @@ -76,7 +76,7 @@ export function testResolve (factory, options) { }) it('should resolve up to the last node across multiple nodes', async () => { - const options = { format: 'dag-cbor', hashAlg: 'sha2-256' } + const options = { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' } const childCid = await ipfs.dag.put({ node: { with: { file: nanoid() } } }, options) const parentCid = await ipfs.dag.put({ path: { to: childCid } }, options) const resolved = await ipfs.resolve(`/ipfs/${parentCid}/path/to/node/with/file`) diff --git a/packages/interface-ipfs-core/src/object/stat.js b/packages/interface-ipfs-core/src/object/stat.js index bb715fb29f..80fdb65388 100644 --- a/packages/interface-ipfs-core/src/object/stat.js +++ b/packages/interface-ipfs-core/src/object/stat.js @@ -41,7 +41,7 @@ export function testStat (factory, options) { const cid = await ipfs.object.put(testObj) const stats = await ipfs.object.stat(cid) const expected = { - Hash: CID.parse('QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ'), + Hash: CID.parse('QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ').toV1(), NumLinks: 0, BlockSize: 17, LinksSize: 2, diff --git a/packages/interface-ipfs-core/src/pin/add.js b/packages/interface-ipfs-core/src/pin/add.js index 1f042829cd..b9e397de1f 100644 --- a/packages/interface-ipfs-core/src/pin/add.js +++ b/packages/interface-ipfs-core/src/pin/add.js @@ -125,7 +125,7 @@ export function testAdd (factory, options) { it('should pin dag-cbor', async () => { const cid = await ipfs.dag.put({}, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) @@ -141,7 +141,7 @@ export function testAdd (factory, options) { it('should pin raw', async () => { const cid = await ipfs.dag.put(new Uint8Array(0), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) @@ -160,13 +160,13 @@ export function testAdd (factory, options) { Data: uint8ArrayFromString(`${Math.random()}`), Links: [] }, { - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) const parent = await ipfs.dag.put({ child }, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) diff --git a/packages/interface-ipfs-core/src/pin/remote/add.js b/packages/interface-ipfs-core/src/pin/remote/add.js index 8b0e389cf3..ab6481c8f5 100644 --- a/packages/interface-ipfs-core/src/pin/remote/add.js +++ b/packages/interface-ipfs-core/src/pin/remote/add.js @@ -106,7 +106,7 @@ export function testAdd (factory, options) { }) it('should pin dag-cbor', async () => { const cid = await ipfs.dag.put({}, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) @@ -125,7 +125,7 @@ export function testAdd (factory, options) { it('should pin raw', async () => { const cid = await ipfs.dag.put(new Uint8Array(0), { - format: 'raw', + storeCodec: 'raw', hashAlg: 'sha2-256' }) diff --git a/packages/ipfs-cli/package.json b/packages/ipfs-cli/package.json index b6af9b801a..fbac0cb7dd 100644 --- a/packages/ipfs-cli/package.json +++ b/packages/ipfs-cli/package.json @@ -59,6 +59,7 @@ }, "dependencies": { "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-json": "^8.0.1", "@ipld/dag-pb": "^2.1.3", "byteman": "^1.3.5", "debug": "^4.1.1", diff --git a/packages/ipfs-cli/src/commands/dag/get.js b/packages/ipfs-cli/src/commands/dag/get.js index ec37667cf3..ebcaffa0f8 100644 --- a/packages/ipfs-cli/src/commands/dag/get.js +++ b/packages/ipfs-cli/src/commands/dag/get.js @@ -1,15 +1,21 @@ import parseDuration from 'parse-duration' import { toCidAndPath } from 'ipfs-core-utils/to-cid-and-path' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { - stripControlCharacters, - makeEntriesPrintable, - escapeControlCharacters -} from '../../utils.js' import * as dagPB from '@ipld/dag-pb' import * as dagCBOR from '@ipld/dag-cbor' +import * as dagJSON from '@ipld/dag-json' import * as raw from 'multiformats/codecs/raw' +/** + * @template T + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec + */ + +const codecs = [dagCBOR, dagJSON, dagPB, raw].reduce((/** @type {Record>} */ m, codec) => { + m[codec.name] = codec + return m +}, /** @type {Record>} */ {}) + export default { command: 'get ', @@ -20,16 +26,16 @@ export default { type: 'boolean', default: false }, - 'cid-base': { - describe: 'Number base to display CIDs in.', + 'output-codec': { + describe: 'Codec to encode data in before displaying.', type: 'string', - default: 'base58btc' + choices: ['dag-json', 'dag-cbor', 'dag-pb', 'raw'], + default: 'dag-json' }, 'data-enc': { - describe: 'String encoding to display data in.', + describe: 'String encoding to display raw node data in if using "raw" output-codec.', type: 'string', - choices: ['base16', 'base64', 'base58btc'], - default: 'base64' + choices: ['base16', 'base64', 'base58btc'] }, timeout: { type: 'string', @@ -41,12 +47,12 @@ export default { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {string} argv.cidpath - * @param {string} argv.cidBase + * @param {'dag-json' | 'dag-cbor' | 'dag-pb' | 'raw'} argv.outputCodec * @param {'base16' | 'base64' | 'base58btc'} argv.dataEnc * @param {boolean} argv.localResolve * @param {number} argv.timeout */ - async handler ({ ctx: { ipfs, print }, cidpath, cidBase, dataEnc, localResolve, timeout }) { + async handler ({ ctx: { ipfs, print }, cidpath, dataEnc, outputCodec, localResolve, timeout }) { const options = { localResolve, timeout @@ -73,26 +79,25 @@ export default { } const node = result.value - const base = await ipfs.bases.getBase(cidBase) - - if (cid.code === dagPB.code) { - /** @type {import('@ipld/dag-pb').PBNode} */ - const dagNode = node - print(JSON.stringify({ - data: dagNode.Data ? uint8ArrayToString(node.Data, dataEnc) : undefined, - links: (dagNode.Links || []).map(link => ({ - Name: stripControlCharacters(link.Name), - Size: link.Tsize, - Cid: { '/': link.Hash.toString(base.encoder) } - })) - })) - } else if (cid.code === raw.code) { - print(uint8ArrayToString(node, dataEnc)) - } else if (cid.code === dagCBOR.code) { - print(JSON.stringify(makeEntriesPrintable(node, base))) + if (outputCodec === 'raw') { + if (!(node instanceof Uint8Array)) { + print('dag get cannot print a non-bytes node as "raw"') + return + } + if (dataEnc) { + print(uint8ArrayToString(node, dataEnc), false) + } else { + print.write(node) + } } else { - print(escapeControlCharacters(node.toString())) + const codec = codecs[outputCodec] + if (!codec) { + print(`unsupported codec "${outputCodec}"`) + return + } + const output = codec.encode(node) + print(output, false) } } } diff --git a/packages/ipfs-cli/src/commands/dag/put.js b/packages/ipfs-cli/src/commands/dag/put.js index f2e9efdea6..c17c096e2a 100644 --- a/packages/ipfs-cli/src/commands/dag/put.js +++ b/packages/ipfs-cli/src/commands/dag/put.js @@ -1,29 +1,22 @@ import * as dagCBOR from '@ipld/dag-cbor' import * as dagPB from '@ipld/dag-pb' +import * as dagJSON from '@ipld/dag-json' +import * as raw from 'multiformats/codecs/raw' import concat from 'it-concat' -import { CID } from 'multiformats/cid' import parseDuration from 'parse-duration' /** - * @type {Record any>} + * @template T + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec */ -const inputDecoders = { - json: (buf) => JSON.parse(buf.toString()), - cbor: (buf) => dagCBOR.decode(buf), - protobuf: (buf) => dagPB.decode(buf), - raw: (buf) => buf -} /** - * @type {Record} + * @type {Record>} */ -const formats = { - cbor: 'dag-cbor', - raw: 'raw', - protobuf: 'dag-pb', - 'dag-cbor': 'dag-cbor', - 'dag-pb': 'dag-pb' -} +const codecs = [dagCBOR, dagJSON, dagPB, raw].reduce((/** @type {Record>} */ m, codec) => { + m[codec.name] = codec + return m +}, /** @type {Record>} */ {}) export default { command: 'put [data]', @@ -34,19 +27,17 @@ export default { data: { type: 'string' }, - format: { + 'store-codec': { type: 'string', - alias: 'f', - default: 'cbor', - describe: 'Format that the object will be added as', - choices: ['dag-cbor', 'dag-pb', 'raw', 'cbor', 'protobuf'] + default: 'dag-cbor', + describe: 'The codec that the stored object will be encoded with', + choices: ['dag-cbor', 'dag-json', 'dag-pb', 'raw'] }, - 'input-encoding': { + 'input-codec': { type: 'string', - alias: 'input-enc', - default: 'json', - describe: 'Format that the input object will be', - choices: ['json', 'cbor', 'raw', 'protobuf'] + default: 'dag-json', + describe: 'The codec that the input object is encoded with', + choices: ['dag-cbor', 'dag-json', 'dag-pb', 'raw'] }, pin: { type: 'boolean', @@ -89,8 +80,8 @@ export default { * @param {object} argv * @param {import('../../types').Context} argv.ctx * @param {string} argv.data - * @param {'dag-cbor' | 'dag-pb' | 'raw' | 'cbor' | 'protobuf'} argv.format - * @param {'json' | 'cbor' | 'raw' | 'protobuf'} argv.inputEncoding + * @param {'dag-cbor' | 'dag-json' | 'dag-pb' | 'raw'} argv.inputCodec + * @param {'dag-cbor' | 'dag-json' | 'dag-pb' | 'raw'} argv.storeCodec * @param {import('multiformats/cid').CIDVersion} argv.cidVersion * @param {boolean} argv.pin * @param {string} argv.hashAlg @@ -99,16 +90,12 @@ export default { * @param {boolean} argv.onlyHash * @param {number} argv.timeout */ - async handler ({ ctx: { ipfs, print, getStdin }, data, format, inputEncoding, pin, hashAlg, cidVersion, cidBase, preload, onlyHash, timeout }) { - if (inputEncoding === 'cbor') { - format = 'dag-cbor' - } else if (inputEncoding === 'protobuf') { - format = 'dag-pb' + async handler ({ ctx: { ipfs, print, getStdin }, data, inputCodec, storeCodec, pin, hashAlg, cidVersion, cidBase, preload, onlyHash, timeout }) { + if (!codecs[inputCodec]) { + throw new Error(`Unknown input-codec ${inputCodec}`) } - format = formats[format] - - if (format !== 'dag-pb') { + if (storeCodec !== 'dag-pb') { cidVersion = 1 } @@ -122,16 +109,10 @@ export default { source = Buffer.from(data) } - source = inputDecoders[inputEncoding](source) - - // Support legacy { "/" : "" } format so dag put is actually useful - // on the command line: https://github.com/ipld/js-ipld-dag-cbor/issues/84 - if (inputEncoding === 'json' && format === 'dag-cbor') { - source = objectSlashToCID(source) - } + const node = codecs[inputCodec].decode(source) - const cid = await ipfs.dag.put(source, { - format, + const cid = await ipfs.dag.put(node, { + storeCodec, hashAlg, version: cidVersion, onlyHash, @@ -144,30 +125,3 @@ export default { print(cid.toString(base.encoder)) } } - -/** - * @param {any} obj - * @returns {any} - */ -function objectSlashToCID (obj) { - if (Array.isArray(obj)) { - return obj.map(objectSlashToCID) - } - - if (obj && typeof obj === 'object') { - const keys = Object.keys(obj) - if (keys.length === 1 && '/' in obj) { - if (typeof obj['/'] !== 'string') { - throw new Error('link should have been a string') - } - return CID.parse(obj['/']) // throws if not a CID - consistent with go-ipfs - } - - return keys.reduce((obj, key) => { - obj[key] = objectSlashToCID(obj[key]) - return obj - }, obj) - } - - return obj -} diff --git a/packages/ipfs-cli/test/dag.spec.js b/packages/ipfs-cli/test/dag.spec.js index 6ec56aaa98..37e03a5031 100644 --- a/packages/ipfs-cli/test/dag.spec.js +++ b/packages/ipfs-cli/test/dag.spec.js @@ -47,11 +47,10 @@ describe('dag', () => { } ipfs.dag.get.withArgs(rawCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli(`dag get ${rawCid} --data-enc base16`, { ipfs }) + const out = await cli(`dag get ${rawCid} --output-codec raw --data-enc base16`, { ipfs }) - expect(out).to.equal(uint8ArrayToString(result.value, 'base16') + '\n') + expect(out).to.equal(uint8ArrayToString(result.value, 'base16')) }) it('should get a dag-pb node', async () => { @@ -67,14 +66,13 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagPbCid}`, { ipfs }) - expect(out).to.equal(`{"data":"AAED","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString(base58btc)}"}}]}\n`) + expect(out).to.equal(`{"Data":{"/":{"bytes":"AAED"}},"Links":[{"Hash":{"/":"${dagCborCid.toString()}"},"Name":"foo","Tsize":10}]}`) }) - it('should get a dag-pb node and specify data encoding', async () => { + it('should get a dag-pb node as dag-pb', async () => { const result = { value: { Data: Buffer.from([0, 1, 3]), @@ -87,14 +85,13 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli(`dag get ${dagPbCid} --data-enc base16`, { ipfs }) + const out = await cli(`dag get ${dagPbCid} --output-codec dag-pb`, { ipfs, raw: true }) - expect(out).to.equal(`{"data":"000103","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString(base58btc)}"}}]}\n`) + expect(out).to.deep.equal(Buffer.from('122d0a2401711220b80784f97f67ad80d52575d643044ffb37b20f8d4db32ae59e47b1ac68df20e01203666f6f180a0a03000103', 'hex')) }) - it('should get a dag-pb node and specify CID encoding', async () => { + it('should get a dag-pb node as dag-cbor', async () => { const result = { value: { Data: Buffer.from([0, 1, 3]), @@ -107,11 +104,55 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base64').returns(base64) - const out = await cli(`dag get ${dagPbCid} --cid-base base64`, { ipfs }) + const out = await cli(`dag get ${dagPbCid} --output-codec dag-cbor`, { ipfs, raw: true }) + + expect(out).to.deep.equal(Buffer.from('a2644461746143000103654c696e6b7381a36448617368d82a58250001711220b80784f97f67ad80d52575d643044ffb37b20f8d4db32ae59e47b1ac68df20e0644e616d6563666f6f655473697a650a', 'hex')) + }) + + it('should fail to get a non bytes node with "raw"', async () => { + const result = { + value: { + Data: Buffer.from([0, 1, 3]), + Links: [{ + Hash: dagCborCid, + Name: 'foo', + Tsize: 10 + }] + } + } + + ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) + + const out = await cli(`dag get ${dagPbCid} --output-codec raw --data-enc base16`, { ipfs }) - expect(out).to.equal(`{"data":"AAED","links":[{"Name":"foo","Size":10,"Cid":{"/":"${dagCborCid.toString(base64)}"}}]}\n`) + expect(out).to.equal('dag get cannot print a non-bytes node as "raw"\n') + }) + + it('should get a bytes node of a non-bytes block with "raw"', async () => { + // in this instance we're pretending to path into a 'Data' property of a dag-pb block + const result = { + value: Buffer.from([0, 1, 3]) + } + + ipfs.dag.get.withArgs(dagPbCid, { ...defaultOptions, path: '/Data' }).returns(result) + + const out = await cli(`dag get ${dagPbCid}/Data --output-codec raw --data-enc base16`, { ipfs }) + + expect(out).to.equal('000103') + }) + + it('should get raw bytes without data encoding', async () => { + // in this instance we're pretending to path into a 'Data' property of a dag-pb block + const result = { + value: Buffer.from([0, 1, 3]) + } + + ipfs.dag.get.withArgs(rawCid, defaultOptions).returns(result) + + const out = await cli(`dag get ${rawCid} --output-codec raw`, { ipfs }) + + expect(out).to.equal(Buffer.from([0, 1, 3]).toString()) }) it('should get a dag-cbor node', async () => { @@ -122,43 +163,39 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagCborCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagCborCid}`, { ipfs }) - expect(out).to.equal('{"foo":"bar"}\n') + expect(out).to.equal('{"foo":"bar"}') }) - it('should get a dag-cbor node with a nested CID', async () => { + it('should get a dag-cbor node as dag-cbor', async () => { const result = { value: { - foo: 'bar', - baz: dagPbCid + foo: 'bar' } } ipfs.dag.get.withArgs(dagCborCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli(`dag get ${dagCborCid}`, { ipfs }) + const out = await cli(`dag get ${dagCborCid} --output-codec dag-cbor`, { ipfs, raw: true }) - expect(out).to.equal(`{"foo":"bar","baz":{"/":"${dagPbCid}"}}\n`) + expect(out).to.deep.equal(Buffer.from('a163666f6f63626172', 'hex')) }) - it('should get a dag-cbor node with a nested CID and change the encoding', async () => { + it('should get a dag-cbor node with a nested CID', async () => { const result = { value: { foo: 'bar', - baz: rawCid + baz: dagPbCid } } ipfs.dag.get.withArgs(dagCborCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base64').returns(base64) - const out = await cli(`dag get ${dagCborCid} --cid-base=base64`, { ipfs }) + const out = await cli(`dag get ${dagCborCid}`, { ipfs }) - expect(out).to.equal(`{"foo":"bar","baz":{"/":"${rawCid.toString(base64)}"}}\n`) + expect(out).to.equal(`{"baz":{"/":"${dagPbCid}"},"foo":"bar"}`) }) it('should get a node with a deep path', async () => { @@ -172,9 +209,9 @@ describe('dag', () => { path }).returns(result) - const out = await cli(`dag get ${rawCid}${path} --data-enc base16`, { ipfs }) + const out = await cli(`dag get ${rawCid}${path} --output-codec raw --data-enc base16`, { ipfs }) - expect(out).to.be.eql(uint8ArrayToString(result.value, 'base16') + '\n') + expect(out).to.be.eql(uint8ArrayToString(result.value, 'base16')) }) it('should get a node with a deep path and an ipfs prefix', async () => { @@ -188,9 +225,9 @@ describe('dag', () => { path }).returns(result) - const out = await cli(`dag get /ipfs/${rawCid}${path} --data-enc base16`, { ipfs }) + const out = await cli(`dag get /ipfs/${rawCid}${path} --output-codec raw --data-enc base16`, { ipfs }) - expect(out).to.be.eql(uint8ArrayToString(result.value, 'base16') + '\n') + expect(out).to.be.eql(uint8ArrayToString(result.value, 'base16')) }) it('should get a node with local resolve', async () => { @@ -203,11 +240,11 @@ describe('dag', () => { localResolve: true }).returns(result) - const out = await cli(`dag get ${rawCid} --local-resolve --data-enc base16`, { ipfs }) + const out = await cli(`dag get ${rawCid} --local-resolve --output-codec raw --data-enc base16`, { ipfs }) expect(out).to.include('resolving path within the node only\n') expect(out).to.include('remainder path: n/a\n') - expect(out).to.include(uint8ArrayToString(result.value, 'base16') + '\n') + expect(out).to.include(uint8ArrayToString(result.value, 'base16')) }) it('should get a node with a timeout', async () => { @@ -220,9 +257,9 @@ describe('dag', () => { timeout: 1000 }).returns(result) - const out = await cli(`dag get ${rawCid} --timeout=1s --data-enc base16`, { ipfs }) + const out = await cli(`dag get ${rawCid} --timeout=1s --output-codec raw --data-enc base16`, { ipfs }) - expect(out).to.be.eql(uint8ArrayToString(result.value, 'base16') + '\n') + expect(out).to.be.eql(uint8ArrayToString(result.value, 'base16')) }) it('should strip control characters from dag-pb nodes', async () => { @@ -237,14 +274,13 @@ describe('dag', () => { } ipfs.dag.get.withArgs(dagPbCid, defaultOptions).returns(result) - ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) const out = await cli(`dag get ${dagPbCid}`, { ipfs }) - expect(out).to.equal(`{"links":[{"Name":"foo.txt","Size":9000,"Cid":{"/":"${dagPbCid.toString(base58btc)}"}}]}\n`) + expect(out).to.equal(`{"Links":[{"Hash":{"/":"${dagPbCid.toString(base58btc)}"},"Name":"foo\\b\\n\\t.txt","Tsize":9000}]}`) }) - it('should strip control characters from dag-cbor nodes', async () => { + it('should not strip control characters from dag-cbor nodes', async () => { const result = { value: { 'lo\nl': 'ok\t' @@ -255,10 +291,10 @@ describe('dag', () => { const out = await cli(`dag get ${dagCborCid}`, { ipfs }) - expect(out).to.equal('{"lol":"ok"}\n') + expect(out).to.equal('{"lo\\nl":"ok\\t"}') }) - it('should strip control characters from dag-cbor string nodes', async () => { + it('should not strip control characters from dag-cbor string nodes', async () => { const result = { value: 'lo\nl' } @@ -267,10 +303,10 @@ describe('dag', () => { const out = await cli(`dag get ${dagCborCid}`, { ipfs }) - expect(out).to.equal('"lol"\n') + expect(out).to.equal('"lo\\nl"') }) - it('should strip control characters from dag-cbor array nodes', async () => { + it('should not strip control characters from dag-cbor array nodes', async () => { const result = { value: ['lo\nl'] } @@ -279,10 +315,10 @@ describe('dag', () => { const out = await cli(`dag get ${dagCborCid}`, { ipfs }) - expect(out).to.equal('["lol"]\n') + expect(out).to.equal('["lo\\nl"]') }) - it('should strip control characters from dag-cbor nested array nodes', async () => { + it('should not strip control characters from dag-cbor nested array nodes', async () => { const result = { value: { 'lo\nl': ['ok\t'] @@ -293,7 +329,7 @@ describe('dag', () => { const out = await cli(`dag get ${dagCborCid}`, { ipfs }) - expect(out).to.equal('{"lol":["ok"]}\n') + expect(out).to.equal('{"lo\\nl":["ok\\t"]}') }) }) @@ -335,7 +371,7 @@ describe('dag', () => { describe('put', () => { const defaultOptions = { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256', version: 1, onlyHash: false, @@ -369,7 +405,7 @@ describe('dag', () => { ipfs.dag.put.withArgs({}, defaultOptions).resolves(dagCborCid) ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli('dag put --input-encoding cbor', { + const out = await cli('dag put --input-codec dag-cbor', { getStdin: function * () { yield dagCBOR.encode({}) }, @@ -379,13 +415,13 @@ describe('dag', () => { }) it('puts piped raw node', async () => { - ipfs.dag.put.withArgs(Buffer.alloc(10), { + ipfs.dag.put.withArgs(new Uint8Array(10), { ...defaultOptions, - format: 'raw' + storeCodec: 'raw' }).resolves(rawCid) ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli('dag put --input-encoding raw --format raw', { + const out = await cli('dag put --input-codec raw --store-codec raw', { getStdin: function * () { yield Buffer.alloc(10) }, @@ -394,15 +430,15 @@ describe('dag', () => { expect(out).to.equal(`${rawCid.toString(base58btc)}\n`) }) - it('puts piped protobuf node', async () => { + it('puts piped dag-pb node', async () => { ipfs.dag.put.withArgs(dagPB.decode(dagPB.encode({ Links: [] })), { ...defaultOptions, - format: 'dag-pb', + storeCodec: 'dag-pb', version: 0 }).resolves(dagPbCid) ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli('dag put --input-encoding protobuf --format protobuf', { + const out = await cli('dag put --input-codec dag-pb --store-codec dag-pb', { getStdin: function * () { yield dagPB.encode({ Links: [] }) }, @@ -411,29 +447,29 @@ describe('dag', () => { expect(out).to.equal(`${dagPbCid.toString(base58btc)}\n`) }) - it('puts protobuf node as json', async () => { + it('puts dag-pb node as dag-json', async () => { ipfs.dag.put.withArgs({ Links: [] }, { ...defaultOptions, - format: 'dag-pb', + storeCodec: 'dag-pb', version: 0 }).resolves(dagPbCid) ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli('dag put --format protobuf \'{"Links":[]}\'', { + const out = await cli('dag put --store-codec dag-pb --input-codec dag-json \'{"Links":[]}\'', { ipfs }) expect(out).to.equal(`${dagPbCid.toString(base58btc)}\n`) }) - it('puts piped protobuf node with cid-v1', async () => { + it('puts piped dag-pb node with cid-v1', async () => { ipfs.dag.put.withArgs(dagPB.decode(dagPB.encode({ Links: [] })), { ...defaultOptions, - format: 'dag-pb', + storeCodec: 'dag-pb', version: 1 }).resolves(dagPbCid) ipfs.bases.getBase.withArgs('base58btc').returns(base58btc) - const out = await cli('dag put --input-encoding protobuf --format protobuf --cid-version=1', { + const out = await cli('dag put --input-codec dag-pb --store-codec dag-pb --cid-version=1', { getStdin: function * () { yield dagPB.encode({ Links: [] }) }, diff --git a/packages/ipfs-core-types/src/dag/index.ts b/packages/ipfs-core-types/src/dag/index.ts index 61a53b23c3..875ea131cc 100644 --- a/packages/ipfs-core-types/src/dag/index.ts +++ b/packages/ipfs-core-types/src/dag/index.ts @@ -17,7 +17,7 @@ export interface API { * } * } * - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + * const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) * console.log(cid.toString()) * // zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5 * @@ -54,7 +54,7 @@ export interface API { * @example * ```js * const obj = { simple: 'object' } - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-512' }) + * const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-512' }) * * console.log(cid.toString()) * // zBwWX9ecx5F4X54WAjmFLErnBT6ByfNxStr5ovowTL7AhaUR98RWvXPS1V3HqV1qs3r5Ec5ocv7eCdbqYQREXNUfYNuKG @@ -77,7 +77,7 @@ export interface API { * } * } * - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + * const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) * console.log(cid.toString()) * // bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq * @@ -132,9 +132,14 @@ export interface GetResult { export interface PutOptions extends AbortOptions, PreloadOptions { /** - * The codec to use to create the CID (defaults to 'dag-cbor') + * The codec that the input object is encoded with if a pre-encoded object is supplied. */ - format?: string + inputCodec?: string + + /** + * The codec that the stored object will be encoded with (defaults to 'dag-cbor') + */ + storeCodec?: string /** * Multihash hashing algorithm to use (defaults to 'sha2-256') diff --git a/packages/ipfs-core/package.json b/packages/ipfs-core/package.json index a92287e20f..9d48421f69 100644 --- a/packages/ipfs-core/package.json +++ b/packages/ipfs-core/package.json @@ -68,6 +68,7 @@ "@achingbrain/libp2p-noise": "^5.0.0", "@ipld/car": "^3.1.0", "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-json": "^8.0.1", "@ipld/dag-pb": "^2.1.3", "@multiformats/murmur3": "^1.0.1", "any-signal": "^2.1.2", @@ -135,7 +136,7 @@ "@types/rimraf": "^3.0.1", "aegir": "^36.0.1", "delay": "^5.0.0", - "go-ipfs": "0.9.1", + "go-ipfs": "0.10.0", "interface-blockstore-tests": "^2.0.1", "interface-ipfs-core": "^0.152.2", "ipfsd-ctl": "^10.0.4", diff --git a/packages/ipfs-core/src/components/dag/put.js b/packages/ipfs-core/src/components/dag/put.js index eeeea52e36..ebd0e86119 100644 --- a/packages/ipfs-core/src/components/dag/put.js +++ b/packages/ipfs-core/src/components/dag/put.js @@ -16,23 +16,33 @@ export function createPut ({ repo, codecs, hashers, preload }) { const release = options.pin ? await repo.gcLock.readLock() : null try { - const codecName = options.format || 'dag-cbor' - const cidVersion = options.version != null ? options.version : (codecName === 'dag-pb' ? 0 : 1) - const codec = await codecs.getCodec(codecName) + const storeCodec = await codecs.getCodec(options.storeCodec || 'dag-cbor') + // TODO: doesn't getCodec throw? verify and possibly remove this + if (!storeCodec) { + throw new Error(`Unknown storeCodec ${options.storeCodec}, please configure additional BlockCodecs for this IPFS instance`) + } - if (!codec) { - throw new Error(`Unknown codec ${options.format}, please configure additional BlockCodecs for this IPFS instance`) + if (options.inputCodec) { + if (!(dagNode instanceof Uint8Array)) { + throw new Error('Can only inputCodec on raw bytes that can be decoded') + } + const inputCodec = await codecs.getCodec(options.inputCodec) + if (!inputCodec) { + throw new Error(`Unknown inputCodec ${options.inputCodec}, please configure additional BlockCodecs for this IPFS instance`) + } + dagNode = inputCodec.decode(dagNode) } + const cidVersion = options.version != null ? options.version : 1 const hasher = await hashers.getHasher(options.hashAlg || 'sha2-256') if (!hasher) { throw new Error(`Unknown hash algorithm ${options.hashAlg}, please configure additional MultihashHashers for this IPFS instance`) } - const buf = codec.encode(dagNode) + const buf = storeCodec.encode(dagNode) const hash = await hasher.digest(buf) - const cid = CID.create(cidVersion, codec.code, hash) + const cid = CID.create(cidVersion, storeCodec.code, hash) await repo.blocks.put(cid, buf, { signal: options.signal diff --git a/packages/ipfs-core/src/components/index.js b/packages/ipfs-core/src/components/index.js index d81bacb6ea..0739fd2f3c 100644 --- a/packages/ipfs-core/src/components/index.js +++ b/packages/ipfs-core/src/components/index.js @@ -5,6 +5,7 @@ import errCode from 'err-code' import { UnixFS } from 'ipfs-unixfs' import * as dagPB from '@ipld/dag-pb' import * as dagCBOR from '@ipld/dag-cbor' +import * as dagJSON from '@ipld/dag-json' import { identity } from 'multiformats/hashes/identity' import { bases, hashes, codecs } from 'multiformats/basics' import { initAssets } from 'ipfs-core-config/init-assets' @@ -286,7 +287,7 @@ export async function create (options = {}) { /** @type {BlockCodec[]} */ const blockCodecs = Object.values(codecs); - [dagPB, dagCBOR, id].concat((options.ipld && options.ipld.codecs) || []).forEach(codec => blockCodecs.push(codec)) + [dagPB, dagCBOR, dagJSON, id].concat((options.ipld && options.ipld.codecs) || []).forEach(codec => blockCodecs.push(codec)) const multicodecs = new Multicodecs({ codecs: blockCodecs, diff --git a/packages/ipfs-core/src/components/object/links.js b/packages/ipfs-core/src/components/object/links.js index c186218675..8c2e126684 100644 --- a/packages/ipfs-core/src/components/object/links.js +++ b/packages/ipfs-core/src/components/object/links.js @@ -1,5 +1,6 @@ import * as dagPB from '@ipld/dag-pb' import * as dagCBOR from '@ipld/dag-cbor' +import * as dagJSON from '@ipld/dag-json' import * as raw from 'multiformats/codecs/raw' import { CID } from 'multiformats/cid' import { withTimeoutOption } from 'ipfs-core-utils/with-timeout-option' @@ -67,19 +68,17 @@ export function createLinks ({ repo, codecs }) { const block = await repo.blocks.get(cid, options) const node = codec.decode(block) - if (cid.code === raw.code) { - return [] + switch (cid.code) { + case raw.code: + return [] + case dagPB.code: + return node.Links + case dagCBOR.code: + case dagJSON.code: + return findLinks(node) + default: + throw new Error(`Cannot resolve links from codec ${cid.code}`) } - - if (cid.code === dagPB.code) { - return node.Links - } - - if (cid.code === dagCBOR.code) { - return findLinks(node) - } - - throw new Error(`Cannot resolve links from codec ${cid.code}`) } return withTimeoutOption(links) diff --git a/packages/ipfs-core/src/components/object/put.js b/packages/ipfs-core/src/components/object/put.js index 5dc652fa4a..b9876bae76 100644 --- a/packages/ipfs-core/src/components/object/put.js +++ b/packages/ipfs-core/src/components/object/put.js @@ -18,7 +18,7 @@ export function createPut ({ repo, preload }) { try { const buf = dagPB.encode(obj) const hash = await sha256.digest(buf) - const cid = CID.createV0(hash) + const cid = CID.createV1(dagPB.code, hash) await repo.blocks.put(cid, buf, { signal: options.signal diff --git a/packages/ipfs-core/test/ipld.spec.js b/packages/ipfs-core/test/ipld.spec.js index e9bf1f55f6..e34c0d5525 100644 --- a/packages/ipfs-core/test/ipld.spec.js +++ b/packages/ipfs-core/test/ipld.spec.js @@ -68,7 +68,7 @@ describe('ipld', function () { hello: 'world' } const cid1 = await ipfs.dag.put(dagCborNode, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) @@ -77,13 +77,13 @@ describe('ipld', function () { Links: [] } const cid2 = await ipfs.dag.put(dagPbNode, { - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) const customNode = 'totally custom' const cid3 = await ipfs.dag.put(customNode, { - format: 'custom-codec', + storeCodec: 'custom-codec', hashAlg: 'sha2-256' }) diff --git a/packages/ipfs-core/test/preload.spec.js b/packages/ipfs-core/test/preload.spec.js index f22797aa05..7b68435b04 100644 --- a/packages/ipfs-core/test/preload.spec.js +++ b/packages/ipfs-core/test/preload.spec.js @@ -253,14 +253,14 @@ describe('preload', () => { it('should preload content added with dag.put', async function () { this.timeout(50 * 1000) const obj = { test: nanoid() } - const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(obj, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) await waitForCids(cid) }) it('should preload content retrieved with dag.get', async function () { this.timeout(50 * 1000) const obj = { test: nanoid() } - const opts = { format: 'dag-cbor', hashAlg: 'sha2-256', preload: false } + const opts = { storeCodec: 'dag-cbor', hashAlg: 'sha2-256', preload: false } const cid = await ipfs.dag.put(obj, opts) await clearPreloadCids() await ipfs.dag.get(cid) diff --git a/packages/ipfs-core/test/utils/codecs.js b/packages/ipfs-core/test/utils/codecs.js index a8dc549725..725715ddf8 100644 --- a/packages/ipfs-core/test/utils/codecs.js +++ b/packages/ipfs-core/test/utils/codecs.js @@ -3,9 +3,10 @@ import { Multicodecs } from 'ipfs-core-utils/multicodecs' import * as dagPB from '@ipld/dag-pb' import * as dagCBOR from '@ipld/dag-cbor' +import * as dagJSON from '@ipld/dag-json' import * as raw from 'multiformats/codecs/raw' export const codecs = new Multicodecs({ - codecs: [dagPB, dagCBOR, raw], + codecs: [dagPB, dagCBOR, dagJSON, raw], loadCodec: () => Promise.reject(new Error('No extra codecs configured')) }) diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index 8f52369b0f..3d1367e1a8 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -55,6 +55,7 @@ }, "dependencies": { "@ipld/dag-cbor": "^6.0.5", + "@ipld/dag-json": "^8.0.1", "@ipld/dag-pb": "^2.1.3", "abort-controller": "^3.0.0", "any-signal": "^2.1.2", @@ -76,7 +77,7 @@ "devDependencies": { "aegir": "^36.0.1", "delay": "^5.0.0", - "go-ipfs": "0.9.1", + "go-ipfs": "0.10.0", "ipfsd-ctl": "^10.0.4", "it-all": "^1.0.4", "it-first": "^1.0.4", diff --git a/packages/ipfs-http-client/src/dag/put.js b/packages/ipfs-http-client/src/dag/put.js index 6192e1750a..71701942b9 100644 --- a/packages/ipfs-http-client/src/dag/put.js +++ b/packages/ipfs-http-client/src/dag/put.js @@ -21,14 +21,31 @@ export const createPut = (codecs, options) => { */ const put = async (dagNode, options = {}) => { const settings = { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256', - inputEnc: 'raw', ...options } - const codec = await codecs.getCodec(settings.format) - const serialized = codec.encode(dagNode) + let serialized + + if (settings.inputCodec) { + // if you supply an inputCodec, we assume you're passing in a raw, encoded + // block using that codec, so we'll just pass that on to the server and let + // it deal with the decode/encode/store cycle + if (!(dagNode instanceof Uint8Array)) { + throw new Error('Can only inputCodec on raw bytes that can be decoded') + } + serialized = dagNode + } else { + // if you don't supply an inputCodec, we assume you've passed in a JavaScript + // object you want to have encoded using storeCodec, so we'll prepare it for + // you if we have the codec + const storeCodec = await codecs.getCodec(settings.storeCodec) + serialized = storeCodec.encode(dagNode) + // now we have a serialized form, the server should be told to receive it + // in that format + settings.inputCodec = settings.storeCodec + } // allow aborting requests on body errors const controller = new AbortController() diff --git a/packages/ipfs-http-client/src/index.js b/packages/ipfs-http-client/src/index.js index 4a8ab2ea92..bb7d055f72 100644 --- a/packages/ipfs-http-client/src/index.js +++ b/packages/ipfs-http-client/src/index.js @@ -5,6 +5,7 @@ import { Multicodecs } from 'ipfs-core-utils/multicodecs' import { Multihashes } from 'ipfs-core-utils/multihashes' import * as dagPB from '@ipld/dag-pb' import * as dagCBOR from '@ipld/dag-cbor' +import * as dagJSON from '@ipld/dag-json' import { identity } from 'multiformats/hashes/identity' import { bases, hashes, codecs } from 'multiformats/basics' import { createBitswap } from './bitswap/index.js' @@ -79,7 +80,7 @@ export function create (options = {}) { /** @type {BlockCodec[]} */ const blockCodecs = Object.values(codecs); - [dagPB, dagCBOR, id].concat((options.ipld && options.ipld.codecs) || []).forEach(codec => blockCodecs.push(codec)) + [dagPB, dagCBOR, dagJSON, id].concat((options.ipld && options.ipld.codecs) || []).forEach(codec => blockCodecs.push(codec)) const multicodecs = new Multicodecs({ codecs: blockCodecs, diff --git a/packages/ipfs-http-client/src/object/put.js b/packages/ipfs-http-client/src/object/put.js index 66b59653bb..0bf6d3362c 100644 --- a/packages/ipfs-http-client/src/object/put.js +++ b/packages/ipfs-http-client/src/object/put.js @@ -20,9 +20,9 @@ export const createPut = (codecs, options) => { async function put (obj, options = {}) { return dagPut(obj, { ...options, - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-256', - version: 0 + version: 1 }) } return put diff --git a/packages/ipfs-http-client/test/dag.spec.js b/packages/ipfs-http-client/test/dag.spec.js index 24c56b2269..346828fdc0 100644 --- a/packages/ipfs-http-client/test/dag.spec.js +++ b/packages/ipfs-http-client/test/dag.spec.js @@ -6,7 +6,6 @@ import { expect } from 'aegir/utils/chai.js' import * as dagPB from '@ipld/dag-pb' import * as dagCBOR from '@ipld/dag-cbor' import * as raw from 'multiformats/codecs/raw' -import { base58btc } from 'multiformats/bases/base58' import { base32 } from 'multiformats/bases/base32' import { create as httpClient } from '../src/index.js' import { factory } from './utils/factory.js' @@ -22,25 +21,25 @@ describe('.dag', function () { after(() => f.clean()) - it('should be able to put and get a DAG node with format dag-pb', async () => { + it('should be able to put and get a DAG node with dag-pb codec', async () => { const data = uint8ArrayFromString('some data') const node = { Data: data, Links: [] } - const cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256', cidVersion: 0 }) + const cid = await ipfs.dag.put(node, { storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) expect(cid.code).to.equal(dagPB.code) - expect(cid.toString(base58btc)).to.equal('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') + expect(cid.toV0().toString()).to.equal('Qmd7xRhW5f29QuBFtqu3oSD27iVy35NRB91XFjmKFhtgMr') const result = await ipfs.dag.get(cid) expect(result.value.Data).to.deep.equal(data) }) - it('should be able to put and get a DAG node with format dag-cbor', async () => { + it('should be able to put and get a DAG node with dag-cbor codec', async () => { const cbor = { foo: 'dag-cbor-bar' } - const cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(cbor, { storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) expect(cid.code).to.equal(dagCBOR.code) expect(cid.toString(base32)).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') @@ -50,9 +49,9 @@ describe('.dag', function () { expect(result.value).to.deep.equal(cbor) }) - it('should be able to put and get a DAG node with format raw', async () => { + it('should be able to put and get a DAG node with raw codec', async () => { const node = uint8ArrayFromString('some data') - const cid = await ipfs.dag.put(node, { format: 'raw', hashAlg: 'sha2-256' }) + const cid = await ipfs.dag.put(node, { storeCodec: 'raw', hashAlg: 'sha2-256' }) expect(cid.code).to.equal(raw.code) expect(cid.toString(base32)).to.equal('bafkreiata6mq425fzikf5m26temcvg7mizjrxrkn35swuybmpah2ajan5y') @@ -70,19 +69,28 @@ describe('.dag', function () { await expect(ipfs.dag.get(cid)).to.eventually.be.rejectedWith(/No codec found/) }) - it('should error when putting node with esoteric format', () => { + it('should error when putting node with esoteric codec', () => { const node = uint8ArrayFromString('some data') - return expect(ipfs.dag.put(node, { format: 'git-raw', hashAlg: 'sha2-256' })).to.eventually.be.rejectedWith(/No codec found/) + return expect(ipfs.dag.put(node, { storeCodec: 'git-raw', hashAlg: 'sha2-256' })).to.eventually.be.rejectedWith(/No codec found/) }) - it('should attempt to load an unsupported format', async () => { - let askedToLoadFormat + it('should pass through raw bytes with inputCodec', async () => { + const node = uint8ArrayFromString('blob 9\0some data') + // we don't support git-raw in the HTTP client, but inputCodec and a Uint8Array should make + // the raw data pass through to go-ipfs, which does talk git-raw + const cid = await ipfs.dag.put(node, { inputCodec: 'git-raw', storeCodec: 'git-raw', hashAlg: 'sha1' }) + expect(cid.code).to.equal(0x78) + expect(cid.toString(base32)).to.equal('baf4bcfd4azdl7vj4d4hnix75qfld6mabo4l4uwa') + }) + + it('should attempt to load an unsupported codec', async () => { + let askedToLoadCodec const ipfs2 = httpClient({ url: `http://${ipfs.apiHost}:${ipfs.apiPort}`, ipld: { - loadCodec: (format) => { - askedToLoadFormat = format === 'git-raw' + loadCodec: (codec) => { + askedToLoadCodec = codec === 'boop' return { encode: (buf) => buf } @@ -93,9 +101,9 @@ describe('.dag', function () { const node = uint8ArrayFromString('some data') // error is from go-ipfs, this means the client serialized it ok - await expect(ipfs2.dag.put(node, { format: 'git-raw', hashAlg: 'sha2-256' })).to.eventually.be.rejectedWith(/no parser for format "git-raw"/) + await expect(ipfs2.dag.put(node, { storeCodec: 'boop', hashAlg: 'sha2-256' })).to.eventually.be.rejectedWith(/unknown multicodec: "boop"/) - expect(askedToLoadFormat).to.be.true() + expect(askedToLoadCodec).to.be.true() }) it('should allow formats to be specified without overwriting others', async () => { @@ -115,7 +123,7 @@ describe('.dag', function () { hello: 'world' } const cid1 = await ipfs2.dag.put(dagCborNode, { - format: 'dag-cbor', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256' }) @@ -124,7 +132,7 @@ describe('.dag', function () { Links: [] } const cid2 = await ipfs2.dag.put(dagPbNode, { - format: 'dag-pb', + storeCodec: 'dag-pb', hashAlg: 'sha2-256' }) diff --git a/packages/ipfs-http-server/src/api/resources/dag.js b/packages/ipfs-http-server/src/api/resources/dag.js index 062ccb8dd7..e25834effb 100644 --- a/packages/ipfs-http-server/src/api/resources/dag.js +++ b/packages/ipfs-http-server/src/api/resources/dag.js @@ -127,8 +127,6 @@ export const putResource = { throw Boom.badRequest("File argument 'object data' is required") } - const enc = request.query.inputEncoding - if (!request.headers['content-type']) { throw Boom.badRequest("File argument 'object data' is required") } @@ -147,43 +145,8 @@ export const putResource = { throw Boom.badRequest("File argument 'object data' is required") } - let format = request.query.format - - if (format === 'cbor') { - format = 'dag-cbor' - } - - let node - - if (format === 'raw') { - node = data - } else if (enc === 'json') { - try { - node = JSON.parse(data.toString()) - } catch (/** @type {any} */ err) { - throw Boom.badRequest('Failed to parse the JSON: ' + err) - } - } else { - // the node is an uncommon format which the client should have - // serialized so add it to the block store and fetch it deserialized - // before continuing - const cidVersion = format === 'dag-pb' && request.query.hashAlg === 'sha2-256' ? request.query.version : 1 - - const cid = await request.server.app.ipfs.block.put(data, { - version: cidVersion, - format, - mhtype: request.query.hash - }) - - const { - value - } = await request.server.app.ipfs.dag.get(cid) - node = value - } - return { - node, - format, + data, hashAlg: request.query.hash } } @@ -194,15 +157,19 @@ export const putResource = { stripUnknown: true }, query: Joi.object().keys({ - format: Joi.string().default('cbor'), - inputEncoding: Joi.string().default('json'), + storeCodec: Joi.string().default('dag-cbor'), + inputCodec: Joi.string().default('dag-json'), pin: Joi.boolean().default(false), hash: Joi.string().default('sha2-256'), cidBase: Joi.string().default('base32'), version: Joi.number().integer().valid(0, 1).default(1), timeout: Joi.timeout() }) - .rename('input-enc', 'inputEncoding', { + .rename('store-codec', 'storeCodec', { + override: true, + ignoreUndefined: true + }) + .rename('input-codec', 'inputCodec', { override: true, ignoreUndefined: true }) @@ -229,25 +196,28 @@ export const putResource = { }, pre: { args: { - node, - format, + data, hashAlg } }, query: { + inputCodec, + storeCodec, pin, cidBase, - timeout, - version + version, + timeout } } = request - const cidVersion = format === 'dag-pb' && hashAlg === 'sha2-256' ? version : 1 + const cidVersion = storeCodec === 'dag-pb' && hashAlg === 'sha2-256' ? version : 1 + let cid try { - cid = await ipfs.dag.put(node, { - format, + cid = await ipfs.dag.put(data, { + inputCodec, + storeCodec, hashAlg, version: cidVersion, pin, diff --git a/packages/ipfs-http-server/src/api/resources/object.js b/packages/ipfs-http-server/src/api/resources/object.js index cd4f5291db..850b9901a7 100644 --- a/packages/ipfs-http-server/src/api/resources/object.js +++ b/packages/ipfs-http-server/src/api/resources/object.js @@ -70,7 +70,7 @@ export const newResource = { }, query: Joi.object().keys({ template: Joi.string().valid('unixfs-dir'), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -121,16 +121,17 @@ export const newResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') const answer = { Data: node.Data ? uint8ArrayToString(node.Data, 'base64pad') : '', - Hash: cid.toString(base.encoder), + Hash: cid.toString(cid.version === 1 ? base.encoder : base58.encoder), Size: block.length, Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) } @@ -148,7 +149,7 @@ export const getResource = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), dataEncoding: Joi.string() .valid('ascii', 'base64pad', 'base16', 'utf8') .replace(/text/, 'ascii') @@ -205,16 +206,17 @@ export const getResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') return h.response({ Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', - Hash: cid.toString(base.encoder), + Hash: cid.toString(cid.version === 1 ? base.encoder : base58.encoder), Size: block.length, Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) }) @@ -237,7 +239,7 @@ export const putResource = { stripUnknown: true }, query: Joi.object().keys({ - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), dataEncoding: Joi.string() .valid('ascii', 'base64pad', 'base16', 'utf8') .replace(/text/, 'ascii') @@ -325,16 +327,17 @@ export const putResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') const answer = { Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', - Hash: cid.toString(base.encoder), + Hash: cid.toString(cid.version === 1 ? base.encoder : base58.encoder), Size: block.length, Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) } @@ -352,7 +355,7 @@ export const statResource = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -397,10 +400,11 @@ export const statResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') return h.response({ ...stats, - Hash: stats.Hash.toString(base.encoder) + Hash: stats.Hash.toString(stats.Hash.version === 1 ? base.encoder : base58.encoder) }) } } @@ -414,7 +418,7 @@ export const dataResource = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -470,7 +474,7 @@ export const linksResource = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -510,14 +514,15 @@ export const linksResource = { }) const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') const response = { - Hash: cid.toString(base.encoder), + Hash: cid.toString(cid.version === 1 ? base.encoder : base58.encoder), Links: (links || []).map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) } @@ -543,7 +548,7 @@ export const patchAppendDataResource = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), dataEncoding: Joi.string() .valid('ascii', 'base64pad', 'base16', 'utf8') .replace(/text/, 'ascii') @@ -609,16 +614,17 @@ export const patchAppendDataResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') const answer = { Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', - Hash: newCid.toString(base.encoder), + Hash: newCid.toString(newCid.version === 1 ? base.encoder : base58.encoder), Size: block.length, Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) } @@ -644,7 +650,7 @@ export const patchSetDataResource = { }, query: Joi.object().keys({ cid: Joi.cid().required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), timeout: Joi.timeout() }) .rename('cid-base', 'cidBase', { @@ -697,14 +703,15 @@ export const patchSetDataResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') return h.response({ - Hash: newCid.toString(base.encoder), + Hash: newCid.toString(newCid.version === 1 ? base.encoder : base58.encoder), Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) }) @@ -724,7 +731,7 @@ export const patchAddLinkResource = { Joi.string().required(), Joi.cid().required() ).required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), dataEncoding: Joi.string() .valid('ascii', 'base64pad', 'base16', 'utf8') .replace(/text/, 'ascii') @@ -790,16 +797,17 @@ export const patchAddLinkResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') const answer = { Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', - Hash: cid.toString(base.encoder), + Hash: cid.toString(cid.version === 1 ? base.encoder : base58.encoder), Size: block.length, Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) } @@ -820,7 +828,7 @@ export const patchRmLinkResource = { Joi.cid().required(), Joi.string().required() ).required(), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string().default('base32'), dataEncoding: Joi.string() .valid('ascii', 'base64pad', 'base16', 'utf8') .replace(/text/, 'ascii') @@ -880,16 +888,17 @@ export const patchRmLinkResource = { } const base = await ipfs.bases.getBase(cidBase) + const base58 = await ipfs.bases.getBase('base58btc') const answer = { Data: node.Data ? uint8ArrayToString(node.Data, dataEncoding) : '', - Hash: cid.toString(base.encoder), + Hash: cid.toString(cid.version === 1 ? base.encoder : base58.encoder), Size: block.length, Links: node.Links.map((l) => { return { Name: l.Name, Size: l.Tsize, - Hash: l.Hash.toString(base.encoder) + Hash: l.Hash.toString(l.Hash.version === 1 ? base.encoder : base58.encoder) } }) } diff --git a/packages/ipfs-http-server/test/inject/dag.js b/packages/ipfs-http-server/test/inject/dag.js index 38ee126205..2d9056f519 100644 --- a/packages/ipfs-http-server/test/inject/dag.js +++ b/packages/ipfs-http-server/test/inject/dag.js @@ -221,7 +221,8 @@ describe('/dag', () => { describe('/put', () => { const defaultOptions = { - format: 'dag-cbor', + inputCodec: 'dag-json', + storeCodec: 'dag-cbor', hashAlg: 'sha2-256', version: 1, pin: false, @@ -248,7 +249,8 @@ describe('/dag', () => { const node = { foo: 'bar' } - ipfs.dag.put.withArgs(node, defaultOptions).returns(cid.toV1()) + const encoded = Buffer.from(JSON.stringify(node)) + ipfs.dag.put.withArgs(encoded, defaultOptions).returns(cid.toV1()) const res = await http({ method: 'POST', @@ -266,14 +268,15 @@ describe('/dag', () => { data: [], links: [] } - ipfs.dag.put.withArgs(node, { + const encoded = Buffer.from(JSON.stringify(node)) + ipfs.dag.put.withArgs(encoded, { ...defaultOptions, - format: 'dag-pb' + storeCodec: 'dag-pb' }).returns(cid.toV1()) const res = await http({ method: 'POST', - url: '/api/v0/dag/put?format=dag-pb', + url: '/api/v0/dag/put?storeCodec=dag-pb', ...await toHeadersAndPayload(JSON.stringify(node)) }, { ipfs }) @@ -287,15 +290,17 @@ describe('/dag', () => { data: [], links: [] } - ipfs.dag.put.withArgs(node, { + const encoded = Buffer.from(JSON.stringify(node)) + ipfs.dag.put.withArgs(encoded, { ...defaultOptions, version: 0, - format: 'dag-pb' + inputCodec: 'dag-json', + storeCodec: 'dag-pb' }).returns(cid) const res = await http({ method: 'POST', - url: '/api/v0/dag/put?format=dag-pb&version=0', + url: '/api/v0/dag/put?inputCodec=dag-json&storeCodec=dag-pb&version=0', ...await toHeadersAndPayload(JSON.stringify(node)) }, { ipfs }) @@ -308,12 +313,12 @@ describe('/dag', () => { const node = Buffer.from([0, 1, 2, 3]) ipfs.dag.put.withArgs(node, { ...defaultOptions, - format: 'raw' + storeCodec: 'raw' }).returns(cid.toV1()) const res = await http({ method: 'POST', - url: '/api/v0/dag/put?format=raw', + url: '/api/v0/dag/put?storeCodec=raw', ...await toHeadersAndPayload(node) }, { ipfs }) @@ -326,7 +331,8 @@ describe('/dag', () => { const node = { foo: 'bar' } - ipfs.dag.put.withArgs(node, { + const encoded = Buffer.from(JSON.stringify(node)) + ipfs.dag.put.withArgs(encoded, { ...defaultOptions, pin: true }).returns(cid.toV1()) @@ -347,22 +353,19 @@ describe('/dag', () => { const data = Buffer.from('some data') const codec = 'git-raw' - ipfs.block.put.withArgs(data).returns(cid) - ipfs.dag.get.withArgs(cid).returns({ - value: data - }) ipfs.dag.put.withArgs(data, { ...defaultOptions, - format: codec + inputCodec: codec, + storeCodec: codec }).returns(cid.toV1()) const res = await http({ method: 'POST', - url: '/api/v0/dag/put?format=git-raw&input-enc=raw', + url: `/api/v0/dag/put?storeCodec=${codec}&inputCodec=${codec}`, ...await toHeadersAndPayload(data) }, { ipfs }) - expect(ipfs.block.put.called).to.be.true() + expect(ipfs.dag.put.called).to.be.true() expect(res).to.have.property('statusCode', 200) expect(res).to.have.deep.nested.property('result.Cid', { '/': cid.toV1().toString() }) }) @@ -372,7 +375,8 @@ describe('/dag', () => { const node = { foo: 'bar' } - ipfs.dag.put.withArgs(node, { + const encoded = Buffer.from(JSON.stringify(node)) + ipfs.dag.put.withArgs(encoded, { ...defaultOptions, timeout: 1000 }).returns(cid.toV1()) diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index e13a42627f..66acc1a6bb 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -84,7 +84,7 @@ "copyfiles": "^2.4.1", "cross-env": "^7.0.0", "electron-webrtc": "^0.3.0", - "go-ipfs": "0.9.1", + "go-ipfs": "0.10.0", "interface-ipfs-core": "^0.152.2", "ipfs-client": "^0.7.4", "ipfs-core-types": "^0.8.4", diff --git a/packages/ipfs/test/interface-http-go.js b/packages/ipfs/test/interface-http-go.js index d12e0a5f75..45c38564f0 100644 --- a/packages/ipfs/test/interface-http-go.js +++ b/packages/ipfs/test/interface-http-go.js @@ -118,21 +118,13 @@ describe('interface-ipfs-core over ipfs-http-client tests against go-ipfs', () = tests.dag(commonFactory, { skip: [ // dag.get: - { - name: 'should get a dag-pb node local value', - reason: 'FIXME vmx 2018-02-22: Currently not supported in go-ipfs, it might be possible once https://github.com/ipfs/go-ipfs/issues/4728 is done' - }, - { - name: 'should get dag-pb value via dag-cbor node', - reason: 'FIXME vmx 2018-02-22: Currently not supported in go-ipfs, it might be possible once https://github.com/ipfs/go-ipfs/issues/4728 is done' - }, - { - name: 'should get by CID with path option', - reason: 'FIXME vmx 2018-02-22: Currently not supported in go-ipfs, it might be possible once https://github.com/ipfs/go-ipfs/issues/4728 is done' - }, { name: 'should get only a CID, due to resolving locally only', reason: 'FIXME: go-ipfs does not support localResolve option' + }, + { + name: 'should get a node added as CIDv0 with a CIDv1', + reason: 'go-ipfs doesn\'t use CIDv0 for DAG API anymore' } ] })