diff --git a/package.json b/package.json index d684499640..92346aa177 100644 --- a/package.json +++ b/package.json @@ -96,6 +96,7 @@ "promisify-es6": "^1.0.1", "pull-file": "^1.0.0", "pull-paramap": "^1.1.6", + "pull-pushable": "^2.0.1", "pull-sort": "^1.0.0", "pull-stream": "^3.4.5", "pull-stream-to-stream": "^1.3.3", diff --git a/src/core/ipfs/files.js b/src/core/ipfs/files.js index 5070398b17..5525a66d62 100644 --- a/src/core/ipfs/files.js +++ b/src/core/ipfs/files.js @@ -50,23 +50,26 @@ module.exports = function files (self) { return callback(new Error('You must supply a multihash')) } - pull( - pull.values([hash]), - pull.asyncMap(self._dagS.get.bind(self._dagS)), - pull.map((node) => { - const data = UnixFS.unmarshal(node.data) - if (data.type === 'directory') { - return pull.error(new Error('This dag node is a directory')) - } - - return exporter(hash, self._dagS) - }), - pull.flatten(), - pull.collect((err, files) => { - if (err) return callback(err) - callback(null, toStream.source(files[0].content)) - }) - ) + self._dagS.get(hash, (err, node) => { + if (err) { + return callback(err) + } + + const data = UnixFS.unmarshal(node.data) + if (data.type === 'directory') { + return callback( + new Error('This dag node is a directory') + ) + } + + pull( + exporter(hash, self._dagS), + pull.collect((err, files) => { + if (err) return callback(err) + callback(null, toStream.source(files[0].content)) + }) + ) + }) }), get: promisify((hash, callback) => { @@ -81,6 +84,10 @@ module.exports = function files (self) { return file }) ))) + }), + + getPull: promisify((hash, callback) => { + callback(null, exporter(hash, self._dagS)) }) } } diff --git a/src/core/ipfs/object.js b/src/core/ipfs/object.js index 85eac9033d..8e987b4d07 100644 --- a/src/core/ipfs/object.js +++ b/src/core/ipfs/object.js @@ -155,7 +155,6 @@ module.exports = function object (self) { if (err) { return cb(err) } - cb(null, node.data) }) }), diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index f4b8f14dd2..3c3cbd1e5e 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -1,9 +1,16 @@ 'use strict' const bs58 = require('bs58') +const multipart = require('ipfs-multipart') const debug = require('debug') +const tar = require('tar-stream') const log = debug('http-api:files') log.error = debug('http-api:files:error') +const pull = require('pull-stream') +const toStream = require('pull-stream-to-stream') +const toPull = require('stream-to-pull-stream') +const pushable = require('pull-pushable') +const EOL = require('os').EOL exports = module.exports @@ -33,8 +40,9 @@ exports.cat = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { const key = request.pre.args.key + const ipfs = request.server.app.ipfs - request.server.app.ipfs.files.cat(key, (err, stream) => { + ipfs.files.cat(key, (err, stream) => { if (err) { log.error(err) return reply({ @@ -42,9 +50,142 @@ exports.cat = { Code: 0 }).code(500) } - stream.on('data', (data) => { - return reply(data) + + // hapi is not very clever and throws if no + // - _read method + // - _readableState object + // are there :( + stream._read = () => {} + stream._readableState = {} + return reply(stream).header('X-Stream-Output', '1') + }) + } +} + +exports.get = { + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler: (request, reply) => { + const key = request.pre.args.key + const ipfs = request.server.app.ipfs + const pack = tar.pack() + + ipfs.files.getPull(key, (err, stream) => { + if (err) { + log.error(err) + + reply({ + Message: 'Failed to get file: ' + err, + Code: 0 + }).code(500) + return + } + + pull( + stream, + pull.asyncMap((file, cb) => { + const header = {name: file.path} + + if (!file.content) { + header.type = 'directory' + pack.entry(header) + cb() + } else { + header.size = file.size + toStream.source(file.content) + .pipe(pack.entry(header, cb)) + } + }), + pull.onEnd((err) => { + if (err) { + log.error(err) + + reply({ + Message: 'Failed to get file: ' + err, + Code: 0 + }).code(500) + return + } + + pack.finalize() + }) + ) + + // the reply must read the tar stream, + // to pull values through + reply(pack).header('X-Stream-Output', '1') + }) + } +} + +exports.add = { + handler: (request, reply) => { + if (!request.payload) { + return reply('Array, Buffer, or String is required.').code(400).takeover() + } + + const ipfs = request.server.app.ipfs + // TODO: make pull-multipart + const parser = multipart.reqParser(request.payload) + let filesParsed = false + + const fileAdder = pushable() + + parser.on('file', (fileName, fileStream) => { + const filePair = { + path: fileName, + content: toPull(fileStream) + } + filesParsed = true + fileAdder.push(filePair) + }) + + parser.on('directory', (directory) => { + fileAdder.push({ + path: directory, + content: '' }) }) + + parser.on('end', () => { + if (!filesParsed) { + return reply("File argument 'data' is required.") + .code(400).takeover() + } + fileAdder.end() + }) + + pull( + fileAdder, + ipfs.files.createAddPullStream(), + pull.map((file) => { + return { + Name: file.path ? file.path : file.hash, + Hash: file.hash + } + }), + pull.map((file) => JSON.stringify(file) + EOL), + pull.collect((err, files) => { + if (err) { + return reply({ + Message: err, + Code: 0 + }).code(500) + } + + if (files.length === 0 && filesParsed) { + return reply({ + Message: 'Failed to add files.', + Code: 0 + }).code(500) + } + + reply(files.join('')) + .header('x-chunked-output', '1') + .header('content-type', 'application/json') + }) + ) } } diff --git a/src/http-api/routes/files.js b/src/http-api/routes/files.js index 99c47741e3..da57b3f2f1 100644 --- a/src/http-api/routes/files.js +++ b/src/http-api/routes/files.js @@ -6,6 +6,7 @@ module.exports = (server) => { const api = server.select('API') api.route({ + // TODO fix method method: '*', path: '/api/v0/cat', config: { @@ -15,4 +16,29 @@ module.exports = (server) => { handler: resources.files.cat.handler } }) + + api.route({ + // TODO fix method + method: '*', + path: '/api/v0/get', + config: { + pre: [ + { method: resources.files.get.parseArgs, assign: 'args' } + ], + handler: resources.files.get.handler + } + }) + + api.route({ + // TODO fix method + method: '*', + path: '/api/v0/add', + config: { + payload: { + parse: false, + output: 'stream' + }, + handler: resources.files.add.handler + } + }) } diff --git a/test/core/both/test-init.js b/test/core/both/test-init.js index fb566b670d..c7671b5183 100644 --- a/test/core/both/test-init.js +++ b/test/core/both/test-init.js @@ -13,7 +13,7 @@ describe('init', function () { const repo = createTempRepo() const ipfs = new IPFS(repo) - ipfs.init({ emptyRepo: true }, (err) => { + ipfs.init({ emptyRepo: true, bits: 128 }, (err) => { expect(err).to.not.exist repo.exists((err, res) => { diff --git a/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js b/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js index 4a322b948e..4762a26f76 100644 --- a/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js +++ b/test/http-api/interface-ipfs-core-over-ipfs-api/test-files.js @@ -2,7 +2,6 @@ 'use strict' -/* const test = require('interface-ipfs-core') const FactoryClient = require('./../../utils/factory-http') @@ -17,8 +16,5 @@ const common = { fc.dismantle(callback) } } -*/ -// TODO -// needs: https://github.com/ipfs/js-ipfs/pull/323 -// test.files(common) +test.files(common) diff --git a/test/http-api/ipfs-api/test-files.js b/test/http-api/ipfs-api/test-files.js deleted file mode 100644 index 95312de30b..0000000000 --- a/test/http-api/ipfs-api/test-files.js +++ /dev/null @@ -1,38 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('chai').expect - -module.exports = (ctl) => { - describe('.files', () => { - describe('.add', () => {}) // TODO - - describe('.cat', () => { - it('returns error for request without argument', (done) => { - ctl.cat(null, (err, result) => { - expect(err).to.exist - done() - }) - }) - - it('returns error for request with invalid argument', (done) => { - ctl.cat('invalid', (err, result) => { - expect(err).to.exist - done() - }) - }) - - it('returns a buffer', (done) => { - ctl.cat('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o', (err, result) => { - expect(err).to.not.exist - expect(result).to.deep.equal(new Buffer('hello world' + '\n')) - done() - }) - }) - }) - - describe('.get', () => {}) // TODO - - describe('.ls', () => {}) // TODO - }) -} diff --git a/test/utils/factory-core/index.js b/test/utils/factory-core/index.js index 03b95d89ad..f334536795 100644 --- a/test/utils/factory-core/index.js +++ b/test/utils/factory-core/index.js @@ -36,7 +36,7 @@ function Factory () { if (!config) { config = JSON.parse(JSON.stringify(defaultConfig)) - const pId = PeerId.create({ bits: 32 }).toJSON() + const pId = PeerId.create({ bits: 512 }).toJSON() config.Identity.PeerID = pId.id config.Identity.PrivKey = pId.privKey } @@ -69,7 +69,7 @@ function Factory () { // create the IPFS node const ipfs = new IPFS(repo) - ipfs.init({ emptyRepo: true }, (err) => { + ipfs.init({ emptyRepo: true, bits: 512 }, (err) => { if (err) { return callback(err) } diff --git a/test/utils/factory-http/index.js b/test/utils/factory-http/index.js index bcafd3b2a2..6582184d25 100644 --- a/test/utils/factory-http/index.js +++ b/test/utils/factory-http/index.js @@ -37,7 +37,7 @@ function Factory () { if (!config) { config = JSON.parse(JSON.stringify(defaultConfig)) - const pId = PeerId.create({ bits: 32 }).toJSON() + const pId = PeerId.create({ bits: 512 }).toJSON() config.Identity.PeerID = pId.id config.Identity.PrivKey = pId.privKey } @@ -53,7 +53,7 @@ function Factory () { // create the IPFS node const ipfs = new IPFS(repo) - ipfs.init({ emptyRepo: true }, (err) => { + ipfs.init({ emptyRepo: true, bits: 512 }, (err) => { if (err) { return callback(err) } diff --git a/test/utils/temp-node.js b/test/utils/temp-node.js index adabcc0155..7375d9e60b 100644 --- a/test/utils/temp-node.js +++ b/test/utils/temp-node.js @@ -31,7 +31,7 @@ function createTempNode (num, callback) { num = leftPad(num, 3, 0) series([ - (cb) => ipfs.init({ emptyRepo: true }, cb), + (cb) => ipfs.init({ emptyRepo: true, bits: 512 }, cb), (cb) => setAddresses(repo, num, cb), (cb) => ipfs.load(cb) ], (err) => { diff --git a/test/utils/temp-repo.js b/test/utils/temp-repo.js index 47d39de6e0..878ca2812d 100644 --- a/test/utils/temp-repo.js +++ b/test/utils/temp-repo.js @@ -31,7 +31,7 @@ function createTempRepo () { } var repo = new IPFSRepo(repoPath, { - bits: 64, + bits: 512, stores: store })