From f75eb35d850c68424bebda7a23fd98efd98bde55 Mon Sep 17 00:00:00 2001 From: Jonathan Date: Fri, 16 Mar 2018 16:46:14 -0400 Subject: [PATCH] feat: jsipfs pin improvements (#1249) * initial sweep through to understand how pin works. Did make some changes but mostly minor. * refactor pb schema to it's own file * fix: don't pin files during files.add if opts.pin === false * feat: add some http qs parsing, http route/resources cleanup, cleanup core/utils.parseIpfsPath * feat: expand pin tests. \nFirst draft. still needs some further work. * feat: Add logging for entry/exit of pins: add/rm/flush/load. Clean some documentation. * feat: add --pin to files.add, fix: improper pin option parsing in core. * feat: Use ipfs.files.add to add init-docs instead of directly using the unix-fs importer. * feat(tests): Add tests for cli --pin option. I know this should be more of an integration test. Should be written in /core. Maybe talk with Victor about testing different layers * feat: use isIPFS to valiate a multihash. --- package.json | 4 +- src/cli/commands/files/add.js | 8 +- src/cli/commands/pin/add.js | 4 +- src/cli/commands/pin/ls.js | 17 ++-- src/cli/commands/pin/rm.js | 4 +- src/core/components/files.js | 6 +- src/core/components/init-assets.js | 21 +++-- src/core/components/init.js | 13 +-- src/core/components/pin-set.js | 38 +++------ src/core/components/pin.js | 42 +++++----- src/core/components/pin.proto.js | 17 ++++ src/core/utils.js | 126 ++++++++++++++++------------- src/http/api/resources/files.js | 6 +- src/http/api/resources/pin.js | 114 +++++++++++++++----------- src/http/api/routes/pin.js | 11 ++- test/cli/files.js | 25 ++++++ test/cli/pin.js | 12 ++- test/core/utils.spec.js | 7 +- test/http-api/spec/pin.js | 2 +- 19 files changed, 277 insertions(+), 200 deletions(-) create mode 100644 src/core/components/pin.proto.js diff --git a/package.json b/package.json index ba5bc0eb67..4e9ebfaeac 100644 --- a/package.json +++ b/package.json @@ -107,10 +107,8 @@ "hapi": "^16.6.2", "hapi-set-header": "^1.0.2", "hoek": "^5.0.3", - "interface-datastore": "^0.4.1", - "ipfs-api": "^18.0.0", - "ipfs-bitswap": "~0.19.0", "human-to-milliseconds": "^1.0.0", + "interface-datastore": "^0.4.1", "ipfs-api": "^18.1.1", "ipfs-bitswap": "~0.19.0", "ipfs-block": "~0.6.1", diff --git a/src/cli/commands/files/add.js b/src/cli/commands/files/add.js index 5886ac2365..a5c62087ae 100644 --- a/src/cli/commands/files/add.js +++ b/src/cli/commands/files/add.js @@ -172,6 +172,11 @@ module.exports = { type: 'boolean', default: false, describe: 'Write no output' + }, + pin: { + type: 'boolean', + default: true, + describe: 'Pin this object when adding' } }, @@ -182,7 +187,8 @@ module.exports = { strategy: argv.trickle ? 'trickle' : 'balanced', shardSplitThreshold: argv.enableShardingExperiment ? argv.shardSplitThreshold : Infinity, 'cid-version': argv['cid-version'], - 'raw-leaves': argv['raw-leaves'] + 'raw-leaves': argv['raw-leaves'], + pin: argv.pin } // Temporary restriction on raw-leaves: diff --git a/src/cli/commands/pin/add.js b/src/cli/commands/pin/add.js index 1c6ab15bcc..7b1a220aa5 100644 --- a/src/cli/commands/pin/add.js +++ b/src/cli/commands/pin/add.js @@ -1,5 +1,7 @@ 'use strict' +const print = require('../../utils').print + module.exports = { command: 'add ', @@ -21,7 +23,7 @@ module.exports = { argv.ipfs.pin.add(paths[0], { recursive: recursive }, (err, results) => { if (err) { throw err } results.forEach((res) => { - console.log(`pinned ${res.hash} ${type}ly`) + print(`pinned ${res.hash} ${type}ly`) }) }) } diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js index 0702eba1b5..869d684f73 100644 --- a/src/cli/commands/pin/ls.js +++ b/src/cli/commands/pin/ls.js @@ -1,21 +1,20 @@ 'use strict' +const print = require('../../utils').print + module.exports = { - command: 'ls', + // bracket syntax with '...' tells yargs to optionally accept a list + command: 'ls [ipfs-path...]', describe: 'List objects pinned to local storage.', builder: { - path: { - type: 'string', - describe: 'List pinned state of specific .' - }, type: { type: 'string', alias: 't', default: 'all', - describe: ('The type of pinned keys to list. ' + - 'Can be "direct", "indirect", "recursive", or "all".') + choices: ['direct', 'indirect', 'recursive', 'all'], + describe: 'The type of pinned keys to list.' }, quiet: { type: 'boolean', @@ -26,7 +25,7 @@ module.exports = { }, handler: (argv) => { - const paths = argv.path && argv.path.split(' ') + const paths = argv.ipfsPath || '' const type = argv.type const quiet = argv.quiet argv.ipfs.pin.ls(paths, { type: type }, (err, results) => { @@ -36,7 +35,7 @@ module.exports = { if (!quiet) { line += ` ${res.type}` } - console.log(line) + print(line) }) }) } diff --git a/src/cli/commands/pin/rm.js b/src/cli/commands/pin/rm.js index bef493aae3..f3d1e7cf7f 100644 --- a/src/cli/commands/pin/rm.js +++ b/src/cli/commands/pin/rm.js @@ -1,5 +1,7 @@ 'use strict' +const print = require('../../utils').print + module.exports = { command: 'rm ', @@ -20,7 +22,7 @@ module.exports = { argv.ipfs.pin.rm(paths, { recursive: recursive }, (err, results) => { if (err) { throw err } results.forEach((res) => { - console.log(`unpinned ${res.hash}`) + print(`unpinned ${res.hash}`) }) }) } diff --git a/src/core/components/files.js b/src/core/components/files.js index 674f3c5335..7b9a9b4ebc 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -18,6 +18,7 @@ const CID = require('cids') const toB58String = require('multihashes').toB58String function noop () {} +function identity (x) { return x } function prepareFile (self, opts, file, callback) { opts = opts || {} @@ -125,7 +126,8 @@ module.exports = function files (self) { }, options) let total = 0 - let prog = opts.progress || (() => {}) + const shouldPin = 'pin' in opts ? opts.pin : true + const prog = opts.progress || noop const progress = (bytes) => { total += bytes prog(total) @@ -137,7 +139,7 @@ module.exports = function files (self) { pull.flatten(), importer(self._ipld, opts), pull.asyncMap(prepareFile.bind(null, self, opts)), - pull.asyncMap(pinFile.bind(null, self)) + shouldPin ? pull.asyncMap(pinFile.bind(null, self)) : identity ) } diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js index 00c37120a8..79e96a9fdf 100644 --- a/src/core/components/init-assets.js +++ b/src/core/components/init-assets.js @@ -15,23 +15,20 @@ module.exports = function addDefaultAssets (self, log, callback) { pull( pull.values([initDocsPath]), - pull.asyncMap((val, cb) => glob(path.join(val, '/**/*'), cb)), + pull.asyncMap((val, cb) => + glob(path.join(val, '/**/*'), { nodir: true }, cb) + ), pull.flatten(), - pull.map((element) => { + pull.map(element => { const addPath = element.substring(index + 1) - - if (fs.statSync(element).isDirectory()) { return } - return { path: addPath, content: file(element) } }), - // Filter out directories, which are undefined from above - pull.filter(Boolean), - importer(self._ipld), - pull.through((el) => { - if (el.path === 'init-docs') { - const cid = new CID(el.multihash) + self.files.addPullStream(), + pull.through(file => { + if (file.path === 'init-docs') { + const cid = new CID(file.hash) log('to get started, enter:\n') - log(`\t jsipfs files cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) + log(`\tjsipfs files cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) } }), pull.collect((err) => { diff --git a/src/core/components/init.js b/src/core/components/init.js index 21504c9128..3ef13ff2b7 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -85,16 +85,11 @@ module.exports = function init (self) { } self.log('adding assets') - const tasks = [ + parallel([ // add empty unixfs dir object (go-ipfs assumes this exists) - (cb) => self.object.new('unixfs-dir', cb) - ] - - if (typeof addDefaultAssets === 'function') { - tasks.push((cb) => addDefaultAssets(self, opts.log, cb)) - } - - parallel(tasks, (err) => { + (cb) => self.object.new('unixfs-dir', cb), + (cb) => addDefaultAssets(self, opts.log, cb) + ], (err) => { if (err) { cb(err) } else { diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index 4a3319dc94..a0ea499051 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -11,28 +11,14 @@ const DAGLink = dagPB.DAGLink const varint = require('varint') const once = require('once') +const pbSchema = require('./pin.proto') + const emptyKeyHash = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' const emptyKey = multihashes.fromB58String(emptyKeyHash) const defaultFanout = 256 const maxItems = 8192 - -// Protobuf interface -// from go-ipfs/pin/internal/pb/header.proto -const pbSchema = ` - syntax = "proto2"; - - package ipfs.pin; - - option go_package = "pb"; - - message Set { - optional uint32 version = 1; - optional uint32 fanout = 2; - optional fixed32 seed = 3; - } -` - const pb = protobuf(pbSchema) + function readHeader (rootNode) { // rootNode.data should be a buffer of the format: // < varint(headerLength) | header | itemData... > @@ -63,7 +49,6 @@ exports = module.exports = function (dag) { const pinSet = { // should this be part of `object` API? hasChild: (root, childhash, callback, _links, _checked, _seen) => { - // callback (err, has) callback = once(callback) if (typeof childhash === 'object') { childhash = toB58String(childhash) @@ -81,11 +66,13 @@ exports = module.exports = function (dag) { if (bs58link === childhash) { return callback(null, true) } + + // don't check the same links twice + if (bs58link in _seen) { return } + _seen[bs58link] = true + dag.get(new CID(link.multihash), (err, res) => { if (err) { return callback(err) } - // don't check the same links twice - if (bs58link in _seen) { return } - _seen[bs58link] = true _checked++ _links += res.value.links.length @@ -95,7 +82,6 @@ exports = module.exports = function (dag) { }, storeSet: (keys, logInternalKey, callback) => { - // callback (err, rootNode) callback = once(callback) const items = keys.map((key) => { return { @@ -115,10 +101,8 @@ exports = module.exports = function (dag) { }, storeItems: (items, logInternalKey, callback, _depth, _subcalls, _done) => { - // callback (err, rootNode) callback = once(callback) - // const seed = crypto.randomBytes(4).readUInt32LE(0, true) // old nondeterministic behavior - const seed = _depth // new deterministic behavior + const seed = _depth const pbHeader = pb.Set.encode({ version: 1, fanout: defaultFanout, @@ -210,9 +194,8 @@ exports = module.exports = function (dag) { }, loadSet: (rootNode, name, logInternalKey, callback) => { - // callback (err, keys) callback = once(callback) - const link = rootNode.links.filter(l => l.name === name).pop() + const link = rootNode.links.find(l => l.name === name) if (!link) { return callback(new Error('No link found with name ' + name)) } logInternalKey(link.multihash) dag.get(new CID(link.multihash), (err, res) => { @@ -229,7 +212,6 @@ exports = module.exports = function (dag) { }, walkItems: (node, walkerFn, logInternalKey, callback) => { - // callback (err) callback = once(callback) const h = readHeader(node) if (h.err) { return callback(h.err) } diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 7bb8b2173f..00959d24f0 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -8,21 +8,22 @@ const pinSet = require('./pin-set') const normalizeHashes = require('../utils').normalizeHashes const promisify = require('promisify-es6') const multihashes = require('multihashes') -const Key = require('interface-datastore').Key const each = require('async/each') const series = require('async/series') const waterfall = require('async/waterfall') const until = require('async/until') const once = require('once') -const toB58String = multihashes.toB58String +function toB58String (hash) { + return new CID(hash).toBaseEncodedString() +} module.exports = function pin (self) { let directPins = new Set() let recursivePins = new Set() let internalPins = new Set() - const pinDataStoreKey = new Key('/local/pins') + const pinDataStoreKey = '/local/pins' const repo = self._repo const dag = self.dag @@ -50,7 +51,7 @@ module.exports = function pin (self) { options = null } callback = once(callback) - const recursive = !options || options.recursive !== false + const recursive = options ? options.recursive : true normalizeHashes(self, hashes, (err, mhs) => { if (err) { return callback(err) } // verify that each hash can be pinned @@ -71,9 +72,7 @@ module.exports = function pin (self) { } else { if (recursivePins.has(key)) { // recursive supersedes direct, can't have both - return cb( - new Error(`${key} already pinned recursively`) - ) + return cb(new Error(`${key} already pinned recursively`)) } if (directPins.has(key)) { // already directly pinned @@ -101,6 +100,7 @@ module.exports = function pin (self) { // persist updated pin sets to datastore pin.flush((err, root) => { if (err) { return callback(err) } + self.log(`Added pins: ${results}`) return callback(null, results.map(key => ({hash: key}))) }) }) @@ -122,16 +122,16 @@ module.exports = function pin (self) { pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res - if (!pinned) { return cb(new Error('not pinned')) } const key = toB58String(multihash) + if (!pinned) { + return cb(new Error(`${key} is not pinned`)) + } switch (reason) { case (pin.types.recursive): if (recursive) { return cb(null, key) } else { - return cb(new Error( - `${key} is pinned recursively` - )) + return cb(new Error(`${key} is pinned recursively`)) } case (pin.types.direct): return cb(null, key) @@ -149,6 +149,7 @@ module.exports = function pin (self) { // persist updated pin sets to datastore pin.flush((err, root) => { if (err) { return callback(err) } + self.log(`Removed pins: ${results}`) return callback(null, results.map(key => ({hash: key}))) }) }) @@ -173,7 +174,7 @@ module.exports = function pin (self) { type = options.type.toLowerCase() } callback = once(callback) - if (Object.keys(pin.types).indexOf(type) < 0) { + if (!pin.types[type]) { return callback(new Error( `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` )) @@ -206,10 +207,7 @@ module.exports = function pin (self) { }) } }) - }), (err, results) => { - if (err) { return callback(err) } - return callback(null, results) - }) + }), callback) }) } else { // show all pinned items of type @@ -269,6 +267,7 @@ module.exports = function pin (self) { if ((pinType === pin.types.direct)) { return callback(null, {pinned: false}) } + // internal if ((pinType === pin.types.internal || pinType === pin.types.all) && internalPins.has(key)) { return callback(null, {pinned: true, reason: pin.types.internal}) @@ -318,9 +317,6 @@ module.exports = function pin (self) { getIndirectKeys: (callback) => { const indirectKeys = new Set() const rKeys = pin.recursiveKeys() - if (!rKeys.length) { - return callback(null, []) - } each(rKeys, (multihash, cb) => { dag._getRecursive(multihash, (err, nodes) => { if (err) { return cb(err) } @@ -374,6 +370,7 @@ module.exports = function pin (self) { (_, cb) => repo.datastore.put(pinDataStoreKey, handle.root.multihash, cb) ], (err, result) => { if (err) { return callback(err) } + self.log(`Flushed ${handle.root} to the datastore.`) internalPins = newInternalPins return callback(null, handle.root) }) @@ -391,7 +388,7 @@ module.exports = function pin (self) { waterfall([ (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests (_, cb) => repo.datastore.has(pinDataStoreKey, cb), - (has, cb) => has ? cb() : callback(), + (has, cb) => has ? cb() : cb('No pins to load'), (cb) => repo.datastore.get(pinDataStoreKey, cb), (mh, cb) => dag.get(new CID(mh), cb), (root, cb) => handle.put('root', root.value, cb), @@ -399,13 +396,16 @@ module.exports = function pin (self) { (rKeys, cb) => handle.put('rKeys', rKeys, cb), (cb) => pin.set.loadSet(handle.root, pin.types.direct, logInternalKey, cb) ], (err, dKeys) => { - if (err && err !== 'break') { return callback(err) } + if (err && err !== 'No pins to load') { + return callback(err) + } if (dKeys) { directPins = new Set(dKeys.map(mh => toB58String(mh))) recursivePins = new Set(handle.rKeys.map(mh => toB58String(mh))) logInternalKey(handle.root.multihash) internalPins = newInternalPins } + self.log('Loaded pins from the datastore') return callback() }) }) diff --git a/src/core/components/pin.proto.js b/src/core/components/pin.proto.js new file mode 100644 index 0000000000..db2391c91f --- /dev/null +++ b/src/core/components/pin.proto.js @@ -0,0 +1,17 @@ +/** + * Protobuf interface + * from go-ipfs/pin/internal/pb/header.proto + */ +module.exports = ` + syntax = "proto2"; + + package ipfs.pin; + + option go_package = "pb"; + + message Set { + optional uint32 version = 1; + optional uint32 fanout = 2; + optional fixed32 seed = 3; + } +` diff --git a/src/core/utils.js b/src/core/utils.js index e8a1f9371b..be101273bb 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -3,80 +3,94 @@ const multihashes = require('multihashes') const mapSeries = require('async/mapSeries') const CID = require('cids') +const isIPFS = require('is-ipfs') exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' -const parseIpfsPath = exports.parseIpfsPath = function (pathString) { - // example: '/ipfs/b58Hash/links/by/name' - // -> { root: 'b58Hash', links: ['links', 'by', 'name'] } - const matched = pathString.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) - const errorResult = () => ({ +/** + * Break an ipfs-path down into it's root hash and an array of links. + * + * examples: + * b58Hash -> { root: 'b58Hash', links: [] } + * b58Hash/mercury/venus -> { root: 'b58Hash', links: ['mercury', 'venus']} + * /ipfs/b58Hash/links/by/name -> { root: 'b58Hash', links: ['links', 'by', 'name'] } + * + * @param {String} ipfsPath An ipfs-path + * @return {Object} { root: base58 string, links: [string], ?err: Error } + */ +exports.parseIpfsPath = function parseIpfsPath (ipfsPath) { + const matched = ipfsPath.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) + const errorResult = { error: new Error('invalid ipfs ref path') - }) + } if (!matched) { - return errorResult() + return errorResult } - const split = matched[1].split('/') - const root = split[0] - try { - if (CID.isCID(new CID(root))) { - return { - root: root, - links: split.slice(1, split.length) - } - } else { - return errorResult() + + const [root, ...links] = matched[1].split('/') + + if (isIPFS.multihash(root)) { + return { + root: root, + links: links } - } catch (err) { - return errorResult() + } else { + return errorResult } } -exports.normalizeHashes = function (ipfs, hashes, callback) { - // try to accept a variety of hash options including - // multihash Buffers, base58 strings, and ipfs path - // strings, either individually or as an array - if (!Array.isArray(hashes)) { - hashes = [hashes] +/** + * Resolve various styles of an ipfs-path to the hash of the target node. + * Follows links in the path. + * + * Handles formats: + * - + * - /link/to/another/planet + * - /ipfs/ + * - Buffers of any of the above + * - multihash Buffer + * + * @param {IPFS} ipfs the IPFS node + * @param {Described above} ipfsPaths A single or collection of ipfs-paths + * @param {Function} callback Node-style callback. res is Array + * @return {void} + */ +exports.normalizeHashes = function normalizeHashes (ipfs, ipfsPaths, callback) { + if (!Array.isArray(ipfsPaths)) { + ipfsPaths = [ipfsPaths] } - mapSeries(hashes, (hash, cb) => { + mapSeries(ipfsPaths, (path, cb) => { const validate = (mh) => { try { multihashes.validate(mh) cb(null, mh) } catch (err) { cb(err) } } - if (typeof hash === 'string') { - const {error, root, links} = parseIpfsPath(hash) - const rootHash = multihashes.fromB58String(root) - if (error) return cb(error) + if (typeof path !== 'string') { + return validate(path) + } + const {error, root, links} = exports.parseIpfsPath(path) + const rootHash = multihashes.fromB58String(root) + if (error) return cb(error) + if (!links.length) { + return validate(rootHash) + } + // recursively follow named links to the target node + const pathFn = (err, obj) => { + if (err) { return cb(err) } if (!links.length) { - return validate(rootHash) - } else { - // recursively follow named links to the target - const pathFn = (err, obj) => { - if (err) { return cb(err) } - if (links.length) { - const linkName = links.shift() - const nextLink = obj.links.filter(link => link.name === linkName) - if (!nextLink.length) { - return cb(new Error( - `no link named ${linkName} under ${obj.toJSON().Hash}` - )) - } - const nextHash = nextLink[0].multihash - ipfs.object.get(nextHash, pathFn) - } else { - validate(obj.multihash) - } - } - ipfs.object.get(rootHash, pathFn) + // done tracing, we have the target node + return validate(obj.multihash) + } + const linkName = links.shift() + const nextLink = obj.links.find(link => link.name === linkName) + if (!nextLink) { + return cb(new Error( + `no link named ${linkName} under ${obj.toJSON().Hash}` + )) } - } else { - validate(hash) + ipfs.object.get(nextLink.multihash, pathFn) } - }, (err, results) => { - if (err) { return callback(err) } - return callback(null, results) - }) + ipfs.object.get(rootHash, pathFn) + }, callback) } diff --git a/src/http/api/resources/files.js b/src/http/api/resources/files.js index 55c406d86c..142bdbbee7 100644 --- a/src/http/api/resources/files.js +++ b/src/http/api/resources/files.js @@ -147,7 +147,8 @@ exports.add = { is: 1, then: Joi.boolean().valid(false).required(), otherwise: Joi.boolean().valid(false) - }) + }), + pin: Joi.boolean().default(true) }) // TODO: Necessary until validate "recursive", "stream-channels" etc. .options({ allowUnknown: true }) @@ -205,7 +206,8 @@ exports.add = { const options = { 'cid-version': request.query['cid-version'], 'raw-leaves': request.query['raw-leaves'], - progress: request.query['progress'] ? progressHandler : null + progress: request.query['progress'] ? progressHandler : null, + pin: request.query.pin } const aborter = abortable() diff --git a/src/http/api/resources/pin.js b/src/http/api/resources/pin.js index c942cc1e11..a2c89ad29b 100644 --- a/src/http/api/resources/pin.js +++ b/src/http/api/resources/pin.js @@ -7,48 +7,70 @@ log.error = debug('jsipfs:http-api:pin:error') exports = module.exports -exports.ls = (request, reply) => { - const ipfs = request.server.app.ipfs - const types = ipfs.pin.types - const path = request.query.arg - const type = request.query.type || types.all - ipfs.pin.ls(path, { type }, (err, result) => { - if (err) { - log.error(err) - return reply({ - Message: `Failed to list pins: ${err.message}`, - Code: 0 - }).code(500) - } +function parseArgs (request, reply) { + if (!request.query.arg) { + return reply({ + Message: "Argument 'arg' is required", + Code: 0 + }).code(400).takeover() + } + + const recursive = request.query.recursive !== 'false' + + return reply({ + path: request.query.arg, + recursive: recursive, + }) +} + +exports.ls = { + parseArgs: (request, reply) => { + const ipfs = request.server.app.ipfs + const type = request.query.type || ipfs.pin.types.all return reply({ - Keys: _.mapValues( - _.keyBy(result, obj => obj.hash), - obj => ({Type: obj.type}) - ) + path: request.query.arg, + type: type }) - }) + }, + + handler: (request, reply) => { + const { path, type } = request.pre.args + const ipfs = request.server.app.ipfs + ipfs.pin.ls(path, { type }, (err, result) => { + if (err) { + log.error(err) + return reply({ + Message: `Failed to list pins: ${err.message}`, + Code: 0 + }).code(500) + } + + return reply({ + Keys: _.mapValues( + _.keyBy(result, obj => obj.hash), + obj => ({Type: obj.type}) + ) + }) + }) + } } exports.add = { - // main route handler which is called after `parseArgs`, - // but only if the args were valid + parseArgs: parseArgs, + handler: (request, reply) => { const ipfs = request.server.app.ipfs - const path = request.query.arg - const recursive = request.query.recursive !== 'false' - const onError = (err, code) => { - log.error(err) - return reply({ - Message: `Failed to add pin: ${err.message}`, - Code: 0 - }).code(code) - } - if (!path) { - return onError(new Error("Argument 'ipfs-path' is required"), 400) - } + const { path, recursive } = request.pre.args ipfs.pin.add(path, { recursive }, (err, result) => { - if (err) { return onError(err, 500) } + if (err) { + log.error(err) + return reply({ + Message: `Failed to add pin: ${err.message}`, + Code: 0 + }).code(500) + } + return reply({ Pins: result.map(obj => obj.hash) }) @@ -57,24 +79,20 @@ exports.add = { } exports.rm = { - // main route handler which is called after `parseArgs`, - // but only if the args were valid + parseArgs: parseArgs, + handler: (request, reply) => { const ipfs = request.server.app.ipfs - const path = request.query.arg - const recursive = request.query.recursive !== 'false' - const onError = (err, code) => { - log.error(err) - return reply({ - Message: `Failed to remove pin: ${err.message}`, - Code: 0 - }).code(code) - } - if (!path) { - return onError(new Error("Argument 'ipfs-path' is required"), 400) - } + const { path, recursive } = request.pre.args ipfs.pin.rm(path, { recursive }, (err, result) => { - if (err) { return onError(err, 500) } + if (err) { + log.error(err) + return reply({ + Message: `Failed to remove pin: ${err.message}`, + Code: 0 + }).code(500) + } + return reply({ Pins: result.map(obj => obj.hash) }) diff --git a/src/http/api/routes/pin.js b/src/http/api/routes/pin.js index 2e3cf185b6..657bb375ac 100644 --- a/src/http/api/routes/pin.js +++ b/src/http/api/routes/pin.js @@ -9,6 +9,9 @@ module.exports = (server) => { method: '*', path: '/api/v0/pin/add', config: { + pre: [ + { method: resources.pin.add.parseArgs, assign: 'args' } + ], handler: resources.pin.add.handler } }) @@ -17,6 +20,9 @@ module.exports = (server) => { method: '*', path: '/api/v0/pin/rm', config: { + pre: [ + { method: resources.pin.rm.parseArgs, assign: 'args' } + ], handler: resources.pin.rm.handler } }) @@ -25,7 +31,10 @@ module.exports = (server) => { method: '*', path: '/api/v0/pin/ls', config: { - handler: resources.pin.ls + pre: [ + { method: resources.pin.ls.parseArgs, assign: 'args' } + ], + handler: resources.pin.ls.handler } }) } diff --git a/test/cli/files.js b/test/cli/files.js index b2d317abd3..95d2b5b31e 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -270,6 +270,31 @@ describe('files', () => runOnAndOff((thing) => { }) }) + it('add pins by default', function () { + const filePath = path.join(os.tmpdir(), hat()) + const content = String(Math.random()) + const file = fs.writeFileSync(filePath, content) + + return ipfs(`files add -Q ${filePath}`) + .then(out => { + const hash = out.trim() + return ipfs(`pin ls ${hash}`) + .then(ls => expect(ls).to.include(hash)) + }) + .then(() => fs.unlinkSync(filePath)) + }) + + it('add does not pin with --pin=false', function () { + const filePath = path.join(os.tmpdir(), hat()) + const content = String(Math.random()) + const file = fs.writeFileSync(filePath, content) + + return ipfs(`files add -Q --pin=false ${filePath}`) + .then(out => ipfs(`pin ls ${out.trim()}`)) + .then(ls => expect(ls.trim()).to.eql('')) + .then(() => fs.unlinkSync(filePath)) + }) + it('cat', function () { this.timeout(30 * 1000) diff --git a/test/cli/pin.js b/test/cli/pin.js index 5506ea58b5..9271ffe410 100644 --- a/test/cli/pin.js +++ b/test/cli/pin.js @@ -48,23 +48,29 @@ describe('pin', () => runOnAndOff((thing) => { }) it('ls (recursive)', () => { - return ipfs(`pin ls --path ${keys.root}`).then((out) => { + return ipfs(`pin ls ${keys.root}`).then((out) => { expect(out).to.eql(`${keys.root} recursive\n`) }) }) it('ls (direct)', () => { - return ipfs(`pin ls --path ${keys.readme}`).then((out) => { + return ipfs(`pin ls ${keys.readme}`).then((out) => { expect(out).to.eql(`${keys.readme} direct\n`) }) }) it('ls (indirect)', () => { - return ipfs(`pin ls --path ${keys.index}`).then((out) => { + return ipfs(`pin ls ${keys.index}`).then((out) => { expect(out).to.eql(`${keys.index} indirect through ${keys.root}\n`) }) }) + it('ls with multiple keys', () => { + return ipfs(`pin ls ${keys.root} ${keys.readme}`).then((out) => { + expect(out).to.eql(`${keys.root} recursive\n${keys.readme} direct\n`) + }) + }) + it('ls (all)', () => { return ipfs('pin ls').then((out) => { expect(out.split('\n').length).to.eql(12) diff --git a/test/core/utils.spec.js b/test/core/utils.spec.js index df2141adfe..638aebf837 100644 --- a/test/core/utils.spec.js +++ b/test/core/utils.spec.js @@ -7,12 +7,11 @@ const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) const multihashes = require('multihashes') -const utils = require('../../src/core/utils') // This gets replaced by `create-repo-browser.js` in the browser const createTempRepo = require('../utils/create-repo-nodejs.js') - const IPFS = require('../../src/core') +const utils = require('../../src/core/utils') describe('utils', () => { const rootHashString = 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU' @@ -30,6 +29,7 @@ describe('utils', () => { links: [] }) }) + it('parses path with links', function () { expect(utils.parseIpfsPath(`${rootHashString}/docs/index`)) .to.deep.equal({ @@ -37,6 +37,7 @@ describe('utils', () => { links: ['docs', 'index'] }) }) + it('parses path with /ipfs/ prefix', function () { expect(utils.parseIpfsPath(`/ipfs/${rootHashString}/about`)) .to.deep.equal({ @@ -44,11 +45,13 @@ describe('utils', () => { links: ['about'] }) }) + it('returns error for malformed path', function () { const result = utils.parseIpfsPath(`${rootHashString}//about`) expect(result.error).to.be.instanceof(Error) .and.have.property('message', 'invalid ipfs ref path') }) + it('returns error if root is not a valid CID', function () { const result = utils.parseIpfsPath('invalid/ipfs/path') expect(result.error).to.be.instanceof(Error) diff --git a/test/http-api/spec/pin.js b/test/http-api/spec/pin.js index 7016ce1110..f58ff3a10f 100644 --- a/test/http-api/spec/pin.js +++ b/test/http-api/spec/pin.js @@ -94,7 +94,7 @@ module.exports = (http) => { it('finds all pinned objects', (done) => { api.inject({ method: 'GET', - url: ('/api/v0/pin/ls') + url: '/api/v0/pin/ls' }, (res) => { expect(res.statusCode).to.equal(200) expect(res.result.Keys[keys.root].Type).to.equal('recursive')