diff --git a/package.json b/package.json index 9875580401..ecf20b76de 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,7 @@ "main": "src/core/index.js", "browser": { "./src/core/components/init-assets.js": false, + "./src/core/runtime/add-from-fs-nodejs.js": "./src/core/runtime/add-from-fs-browser.js", "./src/core/runtime/config-nodejs.js": "./src/core/runtime/config-browser.js", "./src/core/runtime/dns-nodejs.js": "./src/core/runtime/dns-browser.js", "./src/core/runtime/fetch-nodejs.js": "./src/core/runtime/fetch-browser.js", @@ -93,7 +94,6 @@ "datastore-core": "~0.6.0", "datastore-pubsub": "~0.1.1", "debug": "^4.1.0", - "deep-extend": "~0.6.0", "err-code": "^1.1.2", "file-type": "^10.2.0", "fnv1a": "^1.0.1", @@ -159,16 +159,14 @@ "promisify-es6": "^1.0.3", "protons": "^1.0.1", "pull-abortable": "^4.1.1", - "pull-catch": "^1.0.0", + "pull-cat": "^1.1.11", "pull-defer": "~0.2.3", "pull-file": "^1.1.0", "pull-ndjson": "~0.1.1", - "pull-paramap": "^1.2.2", "pull-pushable": "^2.2.0", "pull-sort": "^1.0.1", "pull-stream": "^3.6.9", "pull-stream-to-stream": "^1.3.4", - "pull-zip": "^2.0.1", "pump": "^3.0.0", "read-pkg-up": "^4.0.0", "readable-stream": "3.0.6", diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 1d29e46853..dd1af23e90 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -1,86 +1,41 @@ 'use strict' -const fs = require('fs') -const path = require('path') -const glob = require('glob') const sortBy = require('lodash/sortBy') const pull = require('pull-stream') -const paramap = require('pull-paramap') -const zip = require('pull-zip') const getFolderSize = require('get-folder-size') const byteman = require('byteman') -const waterfall = require('async/waterfall') +const reduce = require('async/reduce') const mh = require('multihashes') const multibase = require('multibase') const { print, isDaemonOn, createProgressBar } = require('../utils') const { cidToString } = require('../../utils/cid') +const globSource = require('../../utils/files/glob-source') -function checkPath (inPath, recursive) { - // This function is to check for the following possible inputs - // 1) "." add the cwd but throw error for no recursion flag - // 2) "." -r return the cwd - // 3) "/some/path" but throw error for no recursion - // 4) "/some/path" -r - // 5) No path, throw err - // 6) filename.type return the cwd + filename - - if (!inPath) { - throw new Error('Error: Argument \'path\' is required') - } - - if (inPath === '.') { - inPath = process.cwd() - } - - // Convert to POSIX format - inPath = inPath - .split(path.sep) - .join('/') - - // Strips trailing slash from path. - inPath = inPath.replace(/\/$/, '') - - if (fs.statSync(inPath).isDirectory() && recursive === false) { - throw new Error(`Error: ${inPath} is a directory, use the '-r' flag to specify directories`) - } - - return inPath -} - -function getTotalBytes (path, recursive, cb) { - if (recursive) { - getFolderSize(path, cb) - } else { - fs.stat(path, (err, stat) => cb(err, stat.size)) - } +function getTotalBytes (paths, cb) { + reduce(paths, 0, (total, path, cb) => { + getFolderSize(path, (err, size) => { + if (err) return cb(err) + cb(null, total + size) + }) + }, cb) } -function addPipeline (index, addStream, list, argv) { +function addPipeline (paths, addStream, options) { const { + recursive, quiet, quieter, silent - } = argv + } = options pull( - zip( - pull.values(list), - pull( - pull.values(list), - paramap(fs.stat.bind(fs), 50) - ) - ), - pull.map((pair) => ({ - path: pair[0], - isDirectory: pair[1].isDirectory() - })), - pull.filter((file) => !file.isDirectory), - pull.map((file) => ({ - path: file.path.substring(index, file.path.length), - content: fs.createReadStream(file.path) - })), + globSource(...paths, { recursive }), addStream, pull.collect((err, added) => { if (err) { + // Tweak the error message and add more relevant infor for the CLI + if (err.code === 'ERR_DIR_NON_RECURSIVE') { + err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` + } throw err } @@ -90,10 +45,8 @@ function addPipeline (index, addStream, list, argv) { sortBy(added, 'path') .reverse() .map((file) => { - const log = [ 'added', cidToString(file.hash, { base: argv.cidBase }) ] - + const log = [ 'added', cidToString(file.hash, { base: options.cidBase }) ] if (!quiet && file.path.length > 0) log.push(file.path) - return log.join(' ') }) .forEach((msg) => print(msg)) @@ -102,7 +55,7 @@ function addPipeline (index, addStream, list, argv) { } module.exports = { - command: 'add ', + command: 'add ', describe: 'Add a file to IPFS using the UnixFS data format', @@ -191,8 +144,7 @@ module.exports = { }, handler (argv) { - const inPath = checkPath(argv.file, argv.recursive) - const index = inPath.lastIndexOf('/') + 1 + const { ipfs } = argv const options = { strategy: argv.trickle ? 'trickle' : 'balanced', shardSplitThreshold: argv.enableShardingExperiment @@ -210,38 +162,21 @@ module.exports = { if (options.enableShardingExperiment && isDaemonOn()) { throw new Error('Error: Enabling the sharding experiment should be done on the daemon') } - const ipfs = argv.ipfs - let list - waterfall([ - (next) => { - if (fs.statSync(inPath).isDirectory()) { - return glob('**/*', { cwd: inPath }, next) - } - next(null, []) - }, - (globResult, next) => { - if (globResult.length === 0) { - list = [inPath] - } else { - list = globResult.map((f) => inPath + '/' + f) - } - getTotalBytes(inPath, argv.recursive, next) - }, - (totalBytes, next) => { - if (argv.progress) { - const bar = createProgressBar(totalBytes) - options.progress = function (byteLength) { - bar.update(byteLength / totalBytes, { progress: byteman(byteLength, 2, 'MB') }) - } - } + if (!argv.progress) { + return addPipeline(argv.file, ipfs.addPullStream(options), argv) + } - next(null, ipfs.addPullStream(options)) - } - ], (err, addStream) => { + getTotalBytes(argv.file, (err, totalBytes) => { if (err) throw err - addPipeline(index, addStream, list, argv) + const bar = createProgressBar(totalBytes) + + options.progress = byteLength => { + bar.update(byteLength / totalBytes, { progress: byteman(byteLength, 2, 'MB') }) + } + + addPipeline(argv.file, ipfs.addPullStream(options), argv) }) } } diff --git a/src/core/components/files-regular/add-from-fs.js b/src/core/components/files-regular/add-from-fs.js new file mode 100644 index 0000000000..409c4e8d7e --- /dev/null +++ b/src/core/components/files-regular/add-from-fs.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = (self) => require('../../runtime/add-from-fs-nodejs')(self) diff --git a/src/core/components/files-regular/index.js b/src/core/components/files-regular/index.js index ac3522758b..058d07d618 100644 --- a/src/core/components/files-regular/index.js +++ b/src/core/components/files-regular/index.js @@ -2,6 +2,7 @@ module.exports = self => ({ add: require('./add')(self), + addFromFs: require('./add-from-fs')(self), addFromStream: require('./add-from-stream')(self), addFromURL: require('./add-from-url')(self), addPullStream: require('./add-pull-stream')(self), diff --git a/src/core/runtime/add-from-fs-browser.js b/src/core/runtime/add-from-fs-browser.js new file mode 100644 index 0000000000..52e56d1ea2 --- /dev/null +++ b/src/core/runtime/add-from-fs-browser.js @@ -0,0 +1,10 @@ +'use strict' + +const promisify = require('promisify-es6') + +module.exports = self => { + return promisify((...args) => { + const callback = args.pop() + callback(new Error('not available in the browser')) + }) +} diff --git a/src/core/runtime/add-from-fs-nodejs.js b/src/core/runtime/add-from-fs-nodejs.js new file mode 100644 index 0000000000..cff5375fbd --- /dev/null +++ b/src/core/runtime/add-from-fs-nodejs.js @@ -0,0 +1,19 @@ +'use strict' + +const promisify = require('promisify-es6') +const pull = require('pull-stream') +const globSource = require('../../utils/files/glob-source') + +module.exports = self => { + return promisify((...args) => { + const callback = args.pop() + const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() + const paths = args + + pull( + globSource(...paths, options), + self.addPullStream(options), + pull.collect(callback) + ) + }) +} diff --git a/src/utils/files/glob-source.js b/src/utils/files/glob-source.js new file mode 100644 index 0000000000..8c6368f043 --- /dev/null +++ b/src/utils/files/glob-source.js @@ -0,0 +1,109 @@ +'use strict' + +const fs = require('fs') +const Path = require('path') +const pull = require('pull-stream') +const glob = require('glob') +const cat = require('pull-cat') +const defer = require('pull-defer') +const pushable = require('pull-pushable') +const map = require('async/map') +const errCode = require('err-code') + +/** +* Create a pull stream source that can be piped to ipfs.addPullStream for the +* provided file paths. +* +* @param {String} ...paths File system path(s) to glob from +* @param {Object} [options] Optional options +* @param {Boolean} [options.recursive] Recursively glob all paths in directories +* @param {Boolean} [options.hidden] Include .dot files in matched paths +* @param {Array} [options.ignore] Glob paths to ignore +* @param {Boolean} [options.followSymlinks] follow symlinks +* @returns {Function} pull stream source +*/ +module.exports = (...args) => { + const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() + const paths = args + const deferred = defer.source() + + const globSourceOptions = { + recursive: options.recursive, + glob: { + dot: Boolean(options.hidden), + ignore: Array.isArray(options.ignore) ? options.ignore : [], + follow: options.followSymlinks != null ? options.followSymlinks : true + } + } + + // Check the input paths comply with options.recursive and convert to glob sources + map(paths, pathAndType, (err, results) => { + if (err) return deferred.abort(err) + + try { + const sources = results.map(res => toGlobSource(res, globSourceOptions)) + deferred.resolve(cat(sources)) + } catch (err) { + deferred.abort(err) + } + }) + + return pull( + deferred, + pull.map(({ path, contentPath }) => ({ + path, + content: fs.createReadStream(contentPath) + })) + ) +} + +function toGlobSource ({ path, type }, options) { + options = options || {} + + const baseName = Path.basename(path) + + if (type === 'file') { + return pull.values([{ path: baseName, contentPath: path }]) + } + + if (type === 'dir' && !options.recursive) { + throw errCode( + new Error(`'${path}' is a directory and recursive option not set`), + 'ERR_DIR_NON_RECURSIVE', + { path } + ) + } + + const globOptions = Object.assign({}, options.glob, { + cwd: path, + nodir: true, + realpath: false, + absolute: false + }) + + // TODO: want to use pull-glob but it doesn't have the features... + const pusher = pushable() + + glob('**/*', globOptions) + .on('match', m => pusher.push(m)) + .on('end', () => pusher.end()) + .on('abort', () => pusher.end()) + .on('error', err => pusher.end(err)) + + return pull( + pusher, + pull.map(p => ({ + path: `${baseName}/${toPosix(p)}`, + contentPath: Path.join(path, p) + })) + ) +} + +function pathAndType (path, cb) { + fs.stat(path, (err, stat) => { + if (err) return cb(err) + cb(null, { path, type: stat.isDirectory() ? 'dir' : 'file' }) + }) +} + +const toPosix = path => path.replace(/\\/g, '/') diff --git a/test/cli/files.js b/test/cli/files.js index 71f0a8a886..d9990ebd29 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -143,6 +143,18 @@ describe('files', () => runOnAndOff((thing) => { }) }) + it('add multiple', function () { + this.timeout(30 * 1000) + + return ipfs('add', 'src/init-files/init-docs/readme', 'test/fixtures/odd-name-[v0]/odd name [v1]/hello', '--wrap-with-directory') + .then((out) => { + expect(out) + .to.include('added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB readme\n') + expect(out) + .to.include('added QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o hello\n') + }) + }) + it('add alias', function () { this.timeout(30 * 1000) @@ -278,7 +290,7 @@ describe('files', () => runOnAndOff((thing) => { it('add --quieter', function () { this.timeout(30 * 1000) - return ipfs('add -Q -w test/fixtures/test-data/hello test/test-data/node.json') + return ipfs('add -Q -w test/fixtures/test-data/hello') .then((out) => { expect(out) .to.eql('QmYRMUVULBfj7WrdPESnwnyZmtayN6Sdrwh1nKcQ9QgQeZ\n') diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index 6a31a90958..05284161b7 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -51,10 +51,7 @@ describe('interface-ipfs-core tests', () => { }) tests.filesRegular(defaultCommonFactory, { - skip: isNode ? [{ - name: 'addFromFs', - reason: 'TODO: not implemented yet' - }] : [{ + skip: isNode ? null : [{ name: 'addFromStream', reason: 'Not designed to run in the browser' }, { diff --git a/test/http-api/inject/files.js b/test/http-api/inject/files.js index 66c4e8a1f6..677c4c43fa 100644 --- a/test/http-api/inject/files.js +++ b/test/http-api/inject/files.js @@ -106,16 +106,34 @@ module.exports = (http) => { }) }) - it('valid hash', function (done) { + it('should cat a valid hash', function (done) { this.timeout(30 * 1000) - api.inject({ - method: 'GET', - url: '/api/v0/cat?arg=QmT78zSuBmuS4z925WZfrqQ1qHaJ56DQaTfyMUF7F8ff5o' - }, (res) => { - expect(res.statusCode).to.equal(200) - expect(res.rawPayload).to.deep.equal(Buffer.from('hello world' + '\n')) - expect(res.payload).to.equal('hello world' + '\n') - done() + + const data = Buffer.from('TEST' + Date.now()) + const form = new FormData() + form.append('data', data) + const headers = form.getHeaders() + + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/add', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(200) + const cid = JSON.parse(res.result).Hash + + api.inject({ + method: 'GET', + url: '/api/v0/cat?arg=' + cid + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.rawPayload).to.deep.equal(data) + expect(res.payload).to.equal(data.toString()) + done() + }) + }) }) }) })