diff --git a/package.json b/package.json index 05b0886b14..2633008ed2 100644 --- a/package.json +++ b/package.json @@ -122,4 +122,4 @@ "kumavis ", "nginnever " ] -} \ No newline at end of file +} diff --git a/src/cli/commands/files/add.js b/src/cli/commands/files/add.js index f69a8b700f..cd2b69aa92 100644 --- a/src/cli/commands/files/add.js +++ b/src/cli/commands/files/add.js @@ -8,6 +8,7 @@ log.error = debug('cli:version:error') const bs58 = require('bs58') const fs = require('fs') const parallelLimit = require('run-parallel-limit') +const async = require('async') const path = require('path') const glob = require('glob') @@ -35,6 +36,50 @@ function checkPath (inPath, recursive) { return inPath } +function daemonOn (res, inPath, ipfs) { + const files = [] + if (res.length !== 0) { + const index = inPath.lastIndexOf('/') + async.eachLimit(res, 10, (element, callback) => { + if (fs.statSync(element).isDirectory()) { + callback() + } else { + const filePair = { + path: element.substring(index + 1, element.length), + content: fs.createReadStream(element) + } + files.push(filePair) + callback() + } + }, (err) => { + if (err) { + throw err + } + ipfs.add(files, (err, res) => { + if (err) { + throw err + } + res.forEach((goRes) => { + console.log('added', goRes.Hash, goRes.Name) + }) + }) + }) + } else { + const filePair = { + path: inPath.substring(inPath.lastIndexOf('/') + 1, inPath.length), + content: fs.createReadStream(inPath) + } + files.push(filePair) + ipfs.add(files, (err, res) => { + if (err) { + throw err + } + console.log('added', res[0].Hash, res[0].Name) + }) + } + return +} + module.exports = Command.extend({ desc: 'Add a file to IPFS using the UnixFS data format', @@ -59,36 +104,71 @@ module.exports = Command.extend({ if (err) { throw err } - const i = ipfs.files.add() - var filePair - i.on('data', (file) => { - console.log('added', bs58.encode(file.multihash).toString(), file.path) - }) - i.once('end', () => { - return - }) - if (res.length !== 0) { - const index = inPath.lastIndexOf('/') - parallelLimit(res.map((element) => (callback) => { - if (!fs.statSync(element).isDirectory()) { - i.write({ - path: element.substring(index + 1, element.length), - stream: fs.createReadStream(element) - }) - } - callback() - }), 10, (err) => { + if (utils.isDaemonOn()) { + daemonOn(res, inPath, ipfs) + } else { + ipfs.files.add((err, i) => { if (err) { throw err } - i.end() + var filePair + i.on('data', (file) => { + console.log('added', bs58.encode(file.multihash).toString(), file.path) + }) + i.once('end', () => { + return + }) + if (res.length !== 0) { + const index = inPath.lastIndexOf('/') + parallelLimit(res.map((element) => (callback) => { + if (!fs.statSync(element).isDirectory()) { + element.substring(index + 1, element.length) + i.write({ + path: element.substring(index + 1, element.length), + stream: fs.createReadStream(element) + }) + } else { + fs.readdir(element, (err, files) => { + if (err) { + throw err + } + if (files.length === 0) { + i.write({ + path: element.substring(index + 1, element.length), + stream: null + }) + } + }) + } + callback() + }), 10, (err) => { + if (err) { + throw err + } + i.end() + }) + } else { + if (!fs.statSync(inPath).isDirectory()) { + rs = fs.createReadStream(inPath) + inPath = inPath.substring(inPath.lastIndexOf('/') + 1, inPath.length) + filePair = {path: inPath, stream: rs} + i.write(filePair) + i.end() + } else { + fs.readdir(inPath, (err, files) => { + if (err) { + throw err + } + if (files.length === 0) { + i.write({ + path: inPath, + stream: null + }) + } + }) + } + } }) - } else { - rs = fs.createReadStream(inPath) - inPath = inPath.substring(inPath.lastIndexOf('/') + 1, inPath.length) - filePair = {path: inPath, stream: rs} - i.write(filePair) - i.end() } }) }) diff --git a/src/cli/commands/files/cat.js b/src/cli/commands/files/cat.js index c517b8ad3e..ff87a82a69 100644 --- a/src/cli/commands/files/cat.js +++ b/src/cli/commands/files/cat.js @@ -31,13 +31,11 @@ module.exports = Command.extend({ }) return } - ipfs.files.cat(path, (err, res) => { + ipfs.files.cat(path, (err, file) => { if (err) { throw (err) } - res.on('data', (data) => { - data.stream.pipe(process.stdout) - }) + file.pipe(process.stdout) }) }) } diff --git a/src/core/index.js b/src/core/index.js index e6d1d3be32..90ce7b5e05 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -57,5 +57,7 @@ function IPFS (repoInstance) { this.object = object(this) this.libp2p = libp2p(this) this.files = files(this) + this.cat = files(this).cat // Alias for js-ipfs-api cat + this.add = files(this).add // Alias for js-ipfs-api add this.bitswap = bitswap(this) } diff --git a/src/core/ipfs/files.js b/src/core/ipfs/files.js index 4b4dcf8c34..92b65173fa 100644 --- a/src/core/ipfs/files.js +++ b/src/core/ipfs/files.js @@ -3,10 +3,11 @@ const Importer = require('ipfs-unixfs-engine').importer const Exporter = require('ipfs-unixfs-engine').exporter const UnixFS = require('ipfs-unixfs') +const promisify = require('promisify-es6') module.exports = function files (self) { return { - add: (arr, callback) => { + add: promisify((arr, callback) => { if (typeof arr === 'function') { callback = arr arr = undefined @@ -15,7 +16,7 @@ module.exports = function files (self) { callback = function noop () {} } if (arr === undefined) { - return new Importer(self._dagS) + callback(null, new Importer(self._dagS)) } const i = new Importer(self._dagS) @@ -34,8 +35,13 @@ module.exports = function files (self) { }) i.end() - }, - cat: (hash, callback) => { + }), + + cat: promisify((hash, callback) => { + if (typeof hash === 'function') { + callback = hash + return callback('You must supply a multihash', null) + } self._dagS.get(hash, (err, fetchedNode) => { if (err) { return callback(err, null) @@ -45,13 +51,16 @@ module.exports = function files (self) { callback('This dag node is a directory', null) } else { const exportStream = Exporter(hash, self._dagS) - callback(null, exportStream) + exportStream.once('data', (object) => { + callback(null, object.stream) + }) } }) - }, - get: (hash, callback) => { + }), + + get: promisify((hash, callback) => { var exportFile = Exporter(hash, self._dagS) callback(null, exportFile) - } + }) } } diff --git a/src/http-api/resources/files.js b/src/http-api/resources/files.js index 8647a8e314..ad8b6ca773 100644 --- a/src/http-api/resources/files.js +++ b/src/http-api/resources/files.js @@ -1,6 +1,9 @@ 'use strict' const bs58 = require('bs58') +const multihash = require('multihashes') +const ndjson = require('ndjson') +const multipart = require('ipfs-multipart') const debug = require('debug') const log = debug('http-api:files') log.error = debug('http-api:files:error') @@ -42,8 +45,70 @@ exports.cat = { Code: 0 }).code(500) } - stream.on('data', (data) => { - return reply(data.stream) + return reply(stream) + }) + } +} + +exports.add = { + handler: (request, reply) => { + if (!request.payload) { + return reply('Array, Buffer, or String is required.').code(400).takeover() + } + + const parser = multipart.reqParser(request.payload) + + var filesParsed = false + var filesAdded = 0 + + var serialize = ndjson.serialize() + // hapi doesn't permit object streams: http://hapijs.com/api#replyerr-result + serialize._readableState.objectMode = false + + request.server.app.ipfs.files.add((err, fileAdder) => { + if (err) { + return reply({ + Message: err, + Code: 0 + }).code(500) + } + + fileAdder.on('data', (file) => { + serialize.write({ + Name: file.path, + Hash: multihash.toB58String(file.multihash) + }) + filesAdded++ + }) + + fileAdder.on('end', () => { + if (filesAdded === 0 && filesParsed) { + return reply({ + Message: 'Failed to add files.', + Code: 0 + }).code(500) + } else { + serialize.end() + return reply(serialize) + .header('x-chunked-output', '1') + .header('content-type', 'application/json') + } + }) + + parser.on('file', (fileName, fileStream) => { + var filePair = { + path: fileName, + stream: fileStream + } + filesParsed = true + fileAdder.write(filePair) + }) + + parser.on('end', () => { + if (!filesParsed) { + return reply("File argument 'data' is required.").code(400).takeover() + } + fileAdder.end() }) }) } diff --git a/src/http-api/routes/files.js b/src/http-api/routes/files.js index 99c47741e3..ddf722f850 100644 --- a/src/http-api/routes/files.js +++ b/src/http-api/routes/files.js @@ -15,4 +15,16 @@ module.exports = (server) => { handler: resources.files.cat.handler } }) + + api.route({ + method: '*', + path: '/api/v0/add', + config: { + payload: { + parse: false, + output: 'stream' + }, + handler: resources.files.add.handler + } + }) } diff --git a/src/http-api/routes/index.js b/src/http-api/routes/index.js index 587f25de77..6f73019f1e 100644 --- a/src/http-api/routes/index.js +++ b/src/http-api/routes/index.js @@ -8,7 +8,7 @@ module.exports = (server) => { require('./object')(server) // require('./repo')(server) require('./config')(server) + require('./files')(server) require('./swarm')(server) require('./bitswap')(server) - require('./files')(server) } diff --git a/test/cli-tests/test-bitswap.js b/test/cli-tests/test-bitswap.js index baec2e6356..bde99fe902 100644 --- a/test/cli-tests/test-bitswap.js +++ b/test/cli-tests/test-bitswap.js @@ -12,7 +12,7 @@ const createTempNode = require('../utils/temp-node') const repoPath = require('./index').repoPath describe('bitswap', function () { - this.timeout(20000) + this.timeout(80000) const env = _.clone(process.env) env.IPFS_PATH = repoPath diff --git a/test/cli-tests/test-files.js b/test/cli-tests/test-files.js index 512b088c9e..bc8fa62d87 100644 --- a/test/cli-tests/test-files.js +++ b/test/cli-tests/test-files.js @@ -12,6 +12,17 @@ describe('files', () => { env.IPFS_PATH = repoPath describe('api offline', () => { + it('add', (done) => { + nexpect.spawn('node', [process.cwd() + '/src/cli/bin.js', 'files', 'add', process.cwd() + '/test/test-data/node.json'], {env}) + .run((err, stdout, exitcode) => { + expect(err).to.not.exist + expect(exitcode).to.equal(0) + expect(stdout[0]) + .to.equal('added QmRRdjTN2PjyEPrW73GBxJNAZrstH5tCZzwHYFJpSTKkhe node.json') + done() + }) + }) + it('cat', (done) => { nexpect.spawn('node', [process.cwd() + '/src/cli/bin.js', 'files', 'cat', 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o'], {env}) .run((err, stdout, exitcode) => { @@ -40,6 +51,17 @@ describe('files', () => { }) }) + it('add', (done) => { + nexpect.spawn('node', [process.cwd() + '/src/cli/bin.js', 'files', 'add', process.cwd() + '/test/test-data/node.json'], {env}) + .run((err, stdout, exitcode) => { + expect(err).to.not.exist + expect(exitcode).to.equal(0) + expect(stdout[0]) + .to.equal('added QmRRdjTN2PjyEPrW73GBxJNAZrstH5tCZzwHYFJpSTKkhe node.json') + done() + }) + }) + it('cat', (done) => { nexpect.spawn('node', [process.cwd() + '/src/cli/bin.js', 'files', 'cat', 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o'], {env}) .run((err, stdout, exitcode) => { diff --git a/test/cli-tests/test-swarm.js b/test/cli-tests/test-swarm.js index 629cc381af..055bc6b83a 100644 --- a/test/cli-tests/test-swarm.js +++ b/test/cli-tests/test-swarm.js @@ -9,7 +9,7 @@ const repoPath = require('./index').repoPath const _ = require('lodash') describe('swarm', function () { - this.timeout(20000) + this.timeout(80000) const env = _.clone(process.env) env.IPFS_PATH = repoPath diff --git a/test/core-tests/test-files.js b/test/core-tests/test-files.js index 3be2be1530..02e84b888e 100644 --- a/test/core-tests/test-files.js +++ b/test/core-tests/test-files.js @@ -1,64 +1,20 @@ /* eslint-env mocha */ 'use strict' -const bl = require('bl') -const expect = require('chai').expect -const Readable = require('stream').Readable -const bs58 = require('bs58') +const test = require('interface-ipfs-core') const IPFS = require('../../src/core') -describe('files', () => { - let ipfs - - before((done) => { - ipfs = new IPFS(require('./repo-path')) - ipfs.load(done) - }) - - it('add', (done) => { - const buffered = new Buffer('some data') - const rs = new Readable() - rs.push(buffered) - rs.push(null) - const arr = [] - const filePair = {path: 'data.txt', stream: rs} - arr.push(filePair) - ipfs.files.add(arr, (err, res) => { - expect(err).to.not.exist - expect(res[0].path).to.equal('data.txt') - expect(res[0].size).to.equal(17) - expect(bs58.encode(res[0].multihash).toString()).to.equal('QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS') - done() +const common = { + setup: function (cb) { + const ipfs = new IPFS(require('./repo-path')) + ipfs.load(() => { + cb(null, ipfs) }) - }) + }, + teardown: function (cb) { + cb() + } +} - it('cat', (done) => { - const hash = 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o' - ipfs.files.cat(hash, (err, res) => { - expect(err).to.not.exist - res.on('data', (data) => { - data.stream.pipe(bl((err, bldata) => { - expect(err).to.not.exist - expect(bldata.toString()).to.equal('hello world\n') - done() - })) - }) - }) - }) - - it('get', (done) => { - // TODO create non-trival get test - const hash = 'QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o' - ipfs.files.get(hash, (err, res) => { - expect(err).to.not.exist - res.on('data', (data) => { - data.stream.pipe(bl((err, bldata) => { - expect(err).to.not.exist - expect(bldata.toString()).to.equal('hello world\n') - done() - })) - }) - }) - }) -}) +test.files(common) diff --git a/test/core-tests/test-swarm-node.js b/test/core-tests/test-swarm-node.js index 56ee09c800..384e41a318 100644 --- a/test/core-tests/test-swarm-node.js +++ b/test/core-tests/test-swarm-node.js @@ -6,7 +6,7 @@ const expect = require('chai').expect const createTempNode = require('../utils/temp-node') describe('swarm', function () { - this.timeout(20000) + this.timeout(80000) var ipfsA var ipfsB diff --git a/test/http-api-tests/test-bitswap.js b/test/http-api-tests/test-bitswap.js index 60785afbb8..765d0ac333 100644 --- a/test/http-api-tests/test-bitswap.js +++ b/test/http-api-tests/test-bitswap.js @@ -5,7 +5,7 @@ const expect = require('chai').expect module.exports = (httpAPI) => { describe('bitswap', function () { - this.timeout(20000) + this.timeout(80000) describe('commands', () => { let api before(() => { diff --git a/test/http-api-tests/test-files.js b/test/http-api-tests/test-files.js index c41dda2862..d0d6ba5928 100644 --- a/test/http-api-tests/test-files.js +++ b/test/http-api-tests/test-files.js @@ -4,6 +4,18 @@ const expect = require('chai').expect const APIctl = require('ipfs-api') +const fs = require('fs') +const FormData = require('form-data') +const streamToPromise = require('stream-to-promise') +const Readable = require('stream').Readable +const http = require('http') + +function singleFileServer (filename) { + return http.createServer(function (req, res) { + fs.createReadStream(filename).pipe(res) + }) +} + module.exports = (httpAPI) => { describe('files', () => { describe('api', () => { @@ -83,4 +95,155 @@ module.exports = (httpAPI) => { }) }) }) + + describe('files', () => { + describe('api', () => { + let api + + it('api', () => { + api = httpAPI.server.select('API') + }) + + describe('/files/add', () => { + it('returns 400 if no tuple is provided', (done) => { + const form = new FormData() + const headers = form.getHeaders() + + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/add', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(400) + done() + }) + }) + }) + + it('adds a file', (done) => { + const form = new FormData() + const filePath = 'test/test-data/node.json' + form.append('file', fs.createReadStream(filePath)) + const headers = form.getHeaders() + + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/add', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(200) + var result = JSON.parse(res.result) + expect(result.Name).to.equal('node.json') + expect(result.Hash).to.equal('QmRRdjTN2PjyEPrW73GBxJNAZrstH5tCZzwHYFJpSTKkhe') + done() + }) + }) + }) + + it('adds multiple files', (done) => { + const form = new FormData() + const filePath = 'test/test-data/hello' + const filePath2 = 'test/test-data/otherconfig' + form.append('file', fs.createReadStream(filePath)) + form.append('file', fs.createReadStream(filePath2)) + const headers = form.getHeaders() + + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/add', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(200) + var results = res.result.split('\n').slice(0, -1).map(JSON.parse) + expect(results[0].Name).to.equal('hello') + expect(results[0].Hash).to.equal('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + expect(results[1].Name).to.equal('otherconfig') + expect(results[1].Hash).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') + done() + }) + }) + }) + }) + }) + + describe('using js-ipfs-api', () => { + var ctl + + it('start IPFS API ctl', (done) => { + ctl = APIctl('/ip4/127.0.0.1/tcp/6001') + done() + }) + + describe('ipfs.add', () => { + it('adds two files under a chunk Size', (done) => { + const rs = new Readable() + const rs2 = new Readable() + var files = [] + const buffered = fs.readFileSync('test/test-data/hello') + const buffered2 = fs.readFileSync('test/test-data/otherconfig') + rs.push(buffered) + rs.push(null) + rs2.push(buffered2) + rs2.push(null) + const filePair = {path: 'hello', content: rs} + const filePair2 = {path: 'otherconfig', content: rs2} + files.push(filePair) + files.push(filePair2) + + ctl.add(files, (err, res) => { + expect(err).to.not.exist + expect(res[0].Name).to.equal('hello') + expect(res[0].Hash).to.equal('QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o') + expect(res[1].Name).to.equal('otherconfig') + expect(res[1].Hash).to.equal('QmayedZNznnEbHtyfjeQvvt29opSLjYjLtLqwfwSWq28ds') + done() + }) + }) + + it('adds a large file > a chunk', (done) => { + const rs = new Readable() + var files = [] + const buffered = fs.readFileSync('test/test-data/1.2MiB.txt') + rs.push(buffered) + rs.push(null) + const filePair = {path: '1.2MiB.txt', content: rs} + files.push(filePair) + + ctl.add(filePair, (err, res) => { + expect(err).to.not.exist + expect(res[0].Name).to.equal('1.2MiB.txt') + expect(res[0].Hash).to.equal('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') + done() + }) + }) + + it('adds a buffer', (done) => { + const buffer = new Buffer('hello world') + ctl.add(buffer, (err, res) => { + expect(err).to.not.exist + expect(res[0].Hash).to.equal('Qmf412jQZiuVUtdgnB36FXFX7xg5V6KEbSJ4dpQuhkLyfD') + done() + }) + }) + + it('adds a url', (done) => { + var server = singleFileServer('test/test-data/1.2MiB.txt') + server.listen(2913, function () { + ctl.add('http://localhost:2913/', (err, res) => { + expect(err).to.not.exist + const added = res[0] != null ? res[0] : res + expect(added).to.have.a.property('Hash', 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') + done() + }) + }) + }) + }) + }) + }) } diff --git a/test/http-api-tests/test-swarm.js b/test/http-api-tests/test-swarm.js index d3d3cd281f..26f11a5171 100644 --- a/test/http-api-tests/test-swarm.js +++ b/test/http-api-tests/test-swarm.js @@ -7,7 +7,7 @@ const createTempNode = require('../utils/temp-node') module.exports = (httpAPI) => { describe('swarm', function () { - this.timeout(20000) + this.timeout(80000) describe('api', () => { var api diff --git a/test/test-data/1.2MiB.txt b/test/test-data/1.2MiB.txt new file mode 100644 index 0000000000..6e306c55ab Binary files /dev/null and b/test/test-data/1.2MiB.txt differ