diff --git a/src/exporters/file.js b/src/exporters/file.js index c2993fcb..7dfb4bfb 100644 --- a/src/exporters/file.js +++ b/src/exporters/file.js @@ -3,28 +3,33 @@ const UnixFS = require('ipfs-unixfs') const pull = require('pull-stream') -function extractContent (node) { - return UnixFS.unmarshal(node.data).data -} - // Logic to export a single (possibly chunked) unixfs file. module.exports = (node, name, ds) => { + const file = UnixFS.unmarshal(node.data) let content if (node.links.length === 0) { - const c = extractContent(node) - content = pull.values([c]) + content = pull.values([file.data]) } else { content = pull( pull.values(node.links), pull.map((link) => ds.getStream(link.hash)), pull.flatten(), - pull.map(extractContent) + pull.map((node) => { + try { + const ex = UnixFS.unmarshal(node.data) + return ex.data + } catch (err) { + console.error(node) + throw new Error('Failed to unmarshal node') + } + }) ) } return pull.values([{ content: content, - path: name + path: name, + size: file.fileSize() }]) } diff --git a/test/test-exporter.js b/test/test-exporter.js index baa7f174..9f3a8cad 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -24,6 +24,27 @@ module.exports = (repo) => { ds = new DAGService(bs) }) + it('import and export', (done) => { + pull( + pull.values([{ + path: '1.2MiB.txt', + content: pull.values([bigFile, Buffer('hello world')]) + }]), + unixFSEngine.importer(ds), + pull.map((file) => { + expect(file.path).to.be.eql('1.2MiB.txt') + + return exporter(file.multihash, ds) + }), + pull.flatten(), + pull.collect((err, files) => { + expect(err).to.not.exist + expect(files[0].size).to.be.eql(bigFile.length + 11) + fileEql(files[0], Buffer.concat([bigFile, Buffer('hello world')]), done) + }) + ) + }) + it('ensure hash inputs are sanitized', (done) => { const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' const mhBuf = new Buffer(bs58.decode(hash))