Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Commit

Permalink
feat: update to latest ipld-resolver (#137)
Browse files Browse the repository at this point in the history
* feat: update to latest ipld-resolver
  • Loading branch information
daviddias authored Feb 8, 2017
1 parent 1f0d760 commit 211dfb6
Show file tree
Hide file tree
Showing 9 changed files with 42 additions and 45 deletions.
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
},
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme",
"devDependencies": {
"aegir": "^9.4.0",
"aegir": "^10.0.0",
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"fs-pull-blob-store": "^0.4.1",
Expand All @@ -58,7 +58,7 @@
"deep-extend": "^0.4.1",
"ipfs-unixfs": "^0.1.9",
"ipld-dag-pb": "^0.9.4",
"ipld-resolver": "^0.6.0",
"ipld-resolver": "^0.8.0",
"is-ipfs": "^0.3.0",
"lodash": "^4.17.4",
"multihashes": "^0.3.2",
Expand All @@ -85,4 +85,4 @@
"jbenet <juan@benet.ai>",
"nginnever <ginneversource@gmail.com>"
]
}
}
12 changes: 4 additions & 8 deletions src/builder/builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
waterfall([
(cb) => DAGNode.create(d.marshal(), cb),
(node, cb) => {
ipldResolver.put({
node: node,
ipldResolver.put(node, {
cid: new CID(node.multihash)
}, (err) => cb(err, node))
}
Expand Down Expand Up @@ -104,12 +103,9 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
})
}),
pull.asyncMap((leaf, callback) => {
ipldResolver.put(
{
node: leaf.DAGNode,
cid: new CID(leaf.DAGNode.multihash)
},
err => callback(err, leaf)
ipldResolver.put(leaf.DAGNode, {
cid: new CID(leaf.DAGNode.multihash)
}, (err) => callback(err, leaf)
)
}),
pull.map((leaf) => {
Expand Down
3 changes: 1 addition & 2 deletions src/builder/reduce.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@ module.exports = function (file, ipldResolver, options) {
waterfall([
(cb) => DAGNode.create(f.marshal(), links, cb),
(node, cb) => {
ipldResolver.put({
node: node,
ipldResolver.put(node, {
cid: new CID(node.multihash)
}, (err) => cb(err, node))
}
Expand Down
10 changes: 6 additions & 4 deletions src/exporter/dir.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ function dirExporter (node, name, ipldResolver) {
path: path.join(name, link.name),
hash: link.multihash
})),
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, result) => {
if (err) {
return cb(err)
}
Expand All @@ -40,10 +40,12 @@ function dirExporter (node, name, ipldResolver) {
size: item.size
}

const node = result.value

cb(null, switchType(
n,
() => cat([pull.values([dir]), dirExporter(n, item.path, ipldResolver)]),
() => fileExporter(n, item.path, ipldResolver)
node,
() => cat([pull.values([dir]), dirExporter(node, item.path, ipldResolver)]),
() => fileExporter(node, item.path, ipldResolver)
))
})),
pull.flatten()
Expand Down
3 changes: 2 additions & 1 deletion src/exporter/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ module.exports = (node, name, ipldResolver) => {
function visitor (node) {
return pull(
pull.values(node.links),
paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb))
paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb)),
pull.map((result) => result.value)
)
}

Expand Down
2 changes: 2 additions & 0 deletions src/exporter/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ module.exports = (hash, ipldResolver, options) => {
}
return pull(
ipldResolver.getStream(new CID(item.hash)),
pull.map((result) => result.value),
pull.map((node) => switchType(
node,
() => dirExporter(node, item.path, ipldResolver),
Expand All @@ -46,6 +47,7 @@ module.exports = (hash, ipldResolver, options) => {
// Traverse the DAG
return pull(
ipldResolver.getStream(new CID(hash)),
pull.map((result) => result.value),
pull.map((node) => switchType(
node,
() => traverse.widthFirst({path: hash, hash}, visitor),
Expand Down
3 changes: 1 addition & 2 deletions src/importer/flush-tree.js
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,7 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
(node, cb) => {
sizeIndex[mh.toB58String(node.multihash)] = node.size

ipldResolver.put({
node: node,
ipldResolver.put(node, {
cid: new CID(node.multihash)
}, (err) => cb(err, node))
}
Expand Down
9 changes: 5 additions & 4 deletions test/browser.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
/* eslint-env mocha */
/* global self */
'use strict'

const Store = require('idb-pull-blob-store')
const IPFSRepo = require('ipfs-repo')
const repoContext = require.context('buffer!./repo-example', true)
const pull = require('pull-stream')

const idb = window.indexedDB ||
window.mozIndexedDB ||
window.webkitIndexedDB ||
window.msIndexedDB
const idb = self.indexedDB ||
self.mozIndexedDB ||
self.webkitIndexedDB ||
self.msIndexedDB

idb.deleteDatabase('ipfs')
idb.deleteDatabase('ipfs/blocks')
Expand Down
39 changes: 18 additions & 21 deletions test/test-exporter.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,29 +28,26 @@ module.exports = (repo) => {
it('ensure hash inputs are sanitized', (done) => {
const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8'
const mhBuf = new Buffer(bs58.decode(hash))
const cid = new CID(hash)

pull(
ipldResolver.getStream(new CID(hash)),
pull.map((node) => UnixFS.unmarshal(node.data)),
pull.collect((err, nodes) => {
expect(err).to.not.exist

const unmarsh = nodes[0]
ipldResolver.get(cid, (err, result) => {
expect(err).to.not.exist
const node = result.value
const unmarsh = UnixFS.unmarshal(node.data)

pull(
exporter(mhBuf, ipldResolver),
pull.collect(onFiles)
)
pull(
exporter(mhBuf, ipldResolver),
pull.collect(onFiles)
)

function onFiles (err, files) {
expect(err).to.not.exist
expect(files).to.have.length(1)
expect(files[0]).to.have.property('path', hash)
function onFiles (err, files) {
expect(err).to.not.exist
expect(files).to.have.length(1)
expect(files[0]).to.have.property('path', hash)

fileEql(files[0], unmarsh.data, done)
}
})
)
fileEql(files[0], unmarsh.data, done)
}
})
})

it('export a file with no links', (done) => {
Expand All @@ -59,7 +56,7 @@ module.exports = (repo) => {
pull(
zip(
pull(
ipldResolver.getStream(new CID(hash)),
ipldResolver._getStream(new CID(hash)),
pull.map((node) => UnixFS.unmarshal(node.data))
),
exporter(hash, ipldResolver)
Expand Down Expand Up @@ -176,7 +173,7 @@ function fileEql (f1, f2, done) {

try {
if (f2) {
expect(Buffer.concat(data)).to.be.eql(f2)
expect(Buffer.concat(data)).to.eql(f2)
} else {
expect(data).to.exist
}
Expand Down

0 comments on commit 211dfb6

Please sign in to comment.