From ada027341cbb207c4fa17cf431575a1f3621ad5c Mon Sep 17 00:00:00 2001 From: Jonathan Date: Fri, 16 Mar 2018 16:46:14 -0400 Subject: [PATCH] feat: jsipfs pin improvements (#1249) * initial sweep through to understand how pin works. Did make some changes but mostly minor. * refactor pb schema to it's own file * fix: don't pin files during files.add if opts.pin === false * feat: add some http qs parsing, http route/resources cleanup, cleanup core/utils.parseIpfsPath * feat: expand pin tests. \nFirst draft. still needs some further work. * feat: Add logging for entry/exit of pins: add/rm/flush/load. Clean some documentation. * feat: add --pin to files.add, fix: improper pin option parsing in core. * feat: Use ipfs.files.add to add init-docs instead of directly using the unix-fs importer. * feat(tests): Add tests for cli --pin option. I know this should be more of an integration test. Should be written in /core. Maybe talk with Victor about testing different layers * feat: use isIPFS to valiate a multihash. --- src/cli/commands/files/add.js | 5 + src/cli/commands/pin/add.js | 30 +++ src/cli/commands/pin/ls.js | 42 +++ src/cli/commands/pin/rm.js | 29 ++ src/core/components/files.js | 7 +- src/core/components/init-assets.js | 21 +- src/core/components/init.js | 13 +- src/core/components/pin-set.js | 255 ++++++++++++++++++ src/core/components/pin.js | 414 +++++++++++++++++++++++++++++ src/core/components/pin.proto.js | 17 ++ src/core/utils.js | 93 +++++++ src/http/api/resources/files.js | 1 + src/http/api/resources/pin.js | 101 +++++++ test/cli/files.js | 51 +++- test/cli/pin.js | 90 +++++++ test/core/utils.spec.js | 136 ++++++++++ test/http-api/spec/pin.js | 147 ++++++++++ 17 files changed, 1416 insertions(+), 36 deletions(-) create mode 100644 src/cli/commands/pin/add.js create mode 100644 src/cli/commands/pin/ls.js create mode 100644 src/cli/commands/pin/rm.js create mode 100644 src/core/components/pin-set.js create mode 100644 src/core/components/pin.js create mode 100644 src/core/components/pin.proto.js create mode 100644 src/http/api/resources/pin.js create mode 100644 test/cli/pin.js create mode 100644 test/core/utils.spec.js create mode 100644 test/http-api/spec/pin.js diff --git a/src/cli/commands/files/add.js b/src/cli/commands/files/add.js index 1abedb5122..cd0106e86d 100644 --- a/src/cli/commands/files/add.js +++ b/src/cli/commands/files/add.js @@ -173,6 +173,11 @@ module.exports = { type: 'boolean', default: false, describe: 'Write no output' + }, + pin: { + type: 'boolean', + default: true, + describe: 'Pin this object when adding' } }, diff --git a/src/cli/commands/pin/add.js b/src/cli/commands/pin/add.js new file mode 100644 index 0000000000..7b1a220aa5 --- /dev/null +++ b/src/cli/commands/pin/add.js @@ -0,0 +1,30 @@ +'use strict' + +const print = require('../../utils').print + +module.exports = { + command: 'add ', + + describe: 'Pins objects to local storage.', + + builder: { + recursive: { + type: 'boolean', + alias: 'r', + default: true, + describe: 'Recursively pin the object linked to by the specified object(s).' + } + }, + + handler (argv) { + const paths = argv['ipfs-path'].split(' ') + const recursive = argv.recursive + const type = recursive ? 'recursive' : 'direct' + argv.ipfs.pin.add(paths[0], { recursive: recursive }, (err, results) => { + if (err) { throw err } + results.forEach((res) => { + print(`pinned ${res.hash} ${type}ly`) + }) + }) + } +} diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js new file mode 100644 index 0000000000..869d684f73 --- /dev/null +++ b/src/cli/commands/pin/ls.js @@ -0,0 +1,42 @@ +'use strict' + +const print = require('../../utils').print + +module.exports = { + // bracket syntax with '...' tells yargs to optionally accept a list + command: 'ls [ipfs-path...]', + + describe: 'List objects pinned to local storage.', + + builder: { + type: { + type: 'string', + alias: 't', + default: 'all', + choices: ['direct', 'indirect', 'recursive', 'all'], + describe: 'The type of pinned keys to list.' + }, + quiet: { + type: 'boolean', + alias: 'q', + default: false, + describe: 'Write just hashes of objects.' + } + }, + + handler: (argv) => { + const paths = argv.ipfsPath || '' + const type = argv.type + const quiet = argv.quiet + argv.ipfs.pin.ls(paths, { type: type }, (err, results) => { + if (err) { throw err } + results.forEach((res) => { + let line = res.hash + if (!quiet) { + line += ` ${res.type}` + } + print(line) + }) + }) + } +} diff --git a/src/cli/commands/pin/rm.js b/src/cli/commands/pin/rm.js new file mode 100644 index 0000000000..f3d1e7cf7f --- /dev/null +++ b/src/cli/commands/pin/rm.js @@ -0,0 +1,29 @@ +'use strict' + +const print = require('../../utils').print + +module.exports = { + command: 'rm ', + + describe: 'Removes the pinned object from local storage.', + + builder: { + recursive: { + type: 'boolean', + alias: 'r', + default: true, + describe: 'Recursively unpin the objects linked to by the specified object(s).' + } + }, + + handler: (argv) => { + const paths = argv['ipfs-path'].split(' ') + const recursive = argv.recursive + argv.ipfs.pin.rm(paths, { recursive: recursive }, (err, results) => { + if (err) { throw err } + results.forEach((res) => { + print(`unpinned ${res.hash}`) + }) + }) + } +} diff --git a/src/core/components/files.js b/src/core/components/files.js index e223e26d13..b7faf7e9d9 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -21,6 +21,7 @@ const toB58String = require('multihashes').toB58String const WRAPPER = 'wrapper/' function noop () {} +function identity (x) { return x } function prepareFile (self, opts, file, callback) { opts = opts || {} @@ -130,7 +131,8 @@ module.exports = function files (self) { } let total = 0 - let prog = opts.progress || (() => {}) + const shouldPin = 'pin' in opts ? opts.pin : true + const prog = opts.progress || noop const progress = (bytes) => { total += bytes prog(total) @@ -141,7 +143,8 @@ module.exports = function files (self) { pull.map(normalizeContent.bind(null, opts)), pull.flatten(), importer(self._ipld, opts), - pull.asyncMap(prepareFile.bind(null, self, opts)) + pull.asyncMap(prepareFile.bind(null, self, opts)), + shouldPin ? pull.asyncMap(pinFile.bind(null, self)) : identity ) } diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js index 00c37120a8..79e96a9fdf 100644 --- a/src/core/components/init-assets.js +++ b/src/core/components/init-assets.js @@ -15,23 +15,20 @@ module.exports = function addDefaultAssets (self, log, callback) { pull( pull.values([initDocsPath]), - pull.asyncMap((val, cb) => glob(path.join(val, '/**/*'), cb)), + pull.asyncMap((val, cb) => + glob(path.join(val, '/**/*'), { nodir: true }, cb) + ), pull.flatten(), - pull.map((element) => { + pull.map(element => { const addPath = element.substring(index + 1) - - if (fs.statSync(element).isDirectory()) { return } - return { path: addPath, content: file(element) } }), - // Filter out directories, which are undefined from above - pull.filter(Boolean), - importer(self._ipld), - pull.through((el) => { - if (el.path === 'init-docs') { - const cid = new CID(el.multihash) + self.files.addPullStream(), + pull.through(file => { + if (file.path === 'init-docs') { + const cid = new CID(file.hash) log('to get started, enter:\n') - log(`\t jsipfs files cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) + log(`\tjsipfs files cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) } }), pull.collect((err) => { diff --git a/src/core/components/init.js b/src/core/components/init.js index 59555f383a..2db6377c66 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -86,16 +86,11 @@ module.exports = function init (self) { } self.log('adding assets') - const tasks = [ + parallel([ // add empty unixfs dir object (go-ipfs assumes this exists) - (cb) => self.object.new('unixfs-dir', cb) - ] - - if (typeof addDefaultAssets === 'function') { - tasks.push((cb) => addDefaultAssets(self, opts.log, cb)) - } - - parallel(tasks, (err) => { + (cb) => self.object.new('unixfs-dir', cb), + (cb) => addDefaultAssets(self, opts.log, cb) + ], (err) => { if (err) { cb(err) } else { diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js new file mode 100644 index 0000000000..a0ea499051 --- /dev/null +++ b/src/core/components/pin-set.js @@ -0,0 +1,255 @@ +'use strict' + +const multihashes = require('multihashes') +const toB58String = multihashes.toB58String +const CID = require('cids') +const protobuf = require('protons') +const fnv1a = require('fnv1a') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const DAGLink = dagPB.DAGLink +const varint = require('varint') +const once = require('once') + +const pbSchema = require('./pin.proto') + +const emptyKeyHash = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' +const emptyKey = multihashes.fromB58String(emptyKeyHash) +const defaultFanout = 256 +const maxItems = 8192 +const pb = protobuf(pbSchema) + +function readHeader (rootNode) { + // rootNode.data should be a buffer of the format: + // < varint(headerLength) | header | itemData... > + const rootData = rootNode.data + const hdrLength = varint.decode(rootData) + const vBytes = varint.decode.bytes + if (vBytes <= 0) { + return { err: 'Invalid Set header length' } + } + if (vBytes + hdrLength > rootData.length) { + return { err: 'Impossibly large set header length' } + } + const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) + const header = pb.Set.decode(hdrSlice) + if (header.version !== 1) { + return { err: 'Unsupported Set version: ' + header.version } + } + if (header.fanout > rootNode.links.length) { + return { err: 'Impossibly large fanout' } + } + return { + header: header, + data: rootData.slice(hdrLength + vBytes) + } +} + +exports = module.exports = function (dag) { + const pinSet = { + // should this be part of `object` API? + hasChild: (root, childhash, callback, _links, _checked, _seen) => { + callback = once(callback) + if (typeof childhash === 'object') { + childhash = toB58String(childhash) + } + _links = _links || root.links.length + _checked = _checked || 0 + _seen = _seen || {} + + if (!root.links.length && _links === _checked) { + // all nodes have been checked + return callback(null, false) + } + root.links.forEach((link) => { + const bs58link = toB58String(link.multihash) + if (bs58link === childhash) { + return callback(null, true) + } + + // don't check the same links twice + if (bs58link in _seen) { return } + _seen[bs58link] = true + + dag.get(new CID(link.multihash), (err, res) => { + if (err) { return callback(err) } + + _checked++ + _links += res.value.links.length + pinSet.hasChild(res.value, childhash, callback, _links, _checked, _seen) + }) + }) + }, + + storeSet: (keys, logInternalKey, callback) => { + callback = once(callback) + const items = keys.map((key) => { + return { + key: key, + data: null + } + }) + pinSet.storeItems(items, logInternalKey, (err, rootNode) => { + if (err) { return callback(err) } + const opts = { cid: new CID(rootNode.multihash) } + dag.put(rootNode, opts, (err, cid) => { + if (err) { return callback(err) } + logInternalKey(rootNode.multihash) + callback(null, rootNode) + }) + }) + }, + + storeItems: (items, logInternalKey, callback, _depth, _subcalls, _done) => { + callback = once(callback) + const seed = _depth + const pbHeader = pb.Set.encode({ + version: 1, + fanout: defaultFanout, + seed: seed + }) + let rootData = Buffer.concat([ + Buffer.from(varint.encode(pbHeader.length)), pbHeader + ]) + let rootLinks = [] + for (let i = 0; i < defaultFanout; i++) { + rootLinks.push(new DAGLink('', 1, emptyKey)) + } + logInternalKey(emptyKey) + + if (items.length <= maxItems) { + // the items will fit in a single root node + const itemLinks = [] + const itemData = [] + const indices = [] + for (let i = 0; i < items.length; i++) { + itemLinks.push(new DAGLink('', 1, items[i].key)) + itemData.push(items[i].data || Buffer.alloc(0)) + indices.push(i) + } + indices.sort((a, b) => { + const x = Buffer.compare(itemLinks[a].multihash, itemLinks[b].multihash) + if (x) { return x } + return (a < b ? -1 : 1) + }) + const sortedLinks = indices.map((i) => { return itemLinks[i] }) + const sortedData = indices.map((i) => { return itemData[i] }) + rootLinks = rootLinks.concat(sortedLinks) + rootData = Buffer.concat([rootData].concat(sortedData)) + DAGNode.create(rootData, rootLinks, (err, rootNode) => { + if (err) { return callback(err) } + return callback(null, rootNode) + }) + } else { + // need to split up the items into multiple root nodes + // (using go-ipfs "wasteful but simple" approach for consistency) + _subcalls = _subcalls || 0 + _done = _done || 0 + const hashed = {} + const hashFn = (seed, key) => { + const buf = Buffer.alloc(4) + buf.writeUInt32LE(seed, 0) + const data = Buffer.concat([ + buf, Buffer.from(toB58String(key)) + ]) + return fnv1a(data.toString('binary')) + } + // items will be distributed among `defaultFanout` bins + for (let i = 0; i < items.length; i++) { + let h = hashFn(seed, items[i].key) % defaultFanout + hashed[h] = hashed[h] || [] + hashed[h].push(items[i]) + } + const storeItemsCb = (err, child) => { + if (err) { return callback(err) } + dag.put(child, (err) => { + if (err) { return callback(err) } + logInternalKey(child.multihash) + rootLinks[this.h] = new DAGLink( + '', child.size, child.multihash + ) + _done++ + if (_done === _subcalls) { + // all finished + DAGNode.create(rootData, rootLinks, (err, rootNode) => { + if (err) { return callback(err) } + return callback(null, rootNode) + }) + } + }) + } + const hashedKeys = Object.keys(hashed) + _subcalls += hashedKeys.length + hashedKeys.forEach(h => { + pinSet.storeItems( + hashed[h], + logInternalKey, + storeItemsCb.bind({h: h}), + _depth + 1, + _subcalls, + _done + ) + }) + } + }, + + loadSet: (rootNode, name, logInternalKey, callback) => { + callback = once(callback) + const link = rootNode.links.find(l => l.name === name) + if (!link) { return callback(new Error('No link found with name ' + name)) } + logInternalKey(link.multihash) + dag.get(new CID(link.multihash), (err, res) => { + if (err) { return callback(err) } + const keys = [] + const walkerFn = (link) => { + keys.push(link.multihash) + } + pinSet.walkItems(res.value, walkerFn, logInternalKey, (err) => { + if (err) { return callback(err) } + return callback(null, keys) + }) + }) + }, + + walkItems: (node, walkerFn, logInternalKey, callback) => { + callback = once(callback) + const h = readHeader(node) + if (h.err) { return callback(h.err) } + const fanout = h.header.fanout + let subwalkCount = 0 + let finishedCount = 0 + + const walkCb = (err) => { + if (err) { return callback(err) } + finishedCount++ + if (subwalkCount === finishedCount) { + return callback() + } + } + + for (let i = 0; i < node.links.length; i++) { + const link = node.links[i] + if (i >= fanout) { + // item link + walkerFn(link, i, h.data) + } else { + // fanout link + logInternalKey(link.multihash) + if (!emptyKey.equals(link.multihash)) { + subwalkCount++ + dag.get(new CID(link.multihash), (err, res) => { + if (err) { return callback(err) } + pinSet.walkItems( + res.value, walkerFn, logInternalKey, walkCb + ) + }) + } + } + } + if (!subwalkCount) { + return callback() + } + } + } + return pinSet +} diff --git a/src/core/components/pin.js b/src/core/components/pin.js new file mode 100644 index 0000000000..00959d24f0 --- /dev/null +++ b/src/core/components/pin.js @@ -0,0 +1,414 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const DAGLink = dagPB.DAGLink +const CID = require('cids') +const pinSet = require('./pin-set') +const normalizeHashes = require('../utils').normalizeHashes +const promisify = require('promisify-es6') +const multihashes = require('multihashes') +const each = require('async/each') +const series = require('async/series') +const waterfall = require('async/waterfall') +const until = require('async/until') +const once = require('once') + +function toB58String (hash) { + return new CID(hash).toBaseEncodedString() +} + +module.exports = function pin (self) { + let directPins = new Set() + let recursivePins = new Set() + let internalPins = new Set() + + const pinDataStoreKey = '/local/pins' + + const repo = self._repo + const dag = self.dag + + const pin = { + types: { + direct: 'direct', + recursive: 'recursive', + indirect: 'indirect', + internal: 'internal', + all: 'all' + }, + + clear: () => { + directPins.clear() + recursivePins.clear() + internalPins.clear() + }, + + set: pinSet(dag), + + add: promisify((hashes, options, callback) => { + if (typeof options === 'function') { + callback = options + options = null + } + callback = once(callback) + const recursive = options ? options.recursive : true + normalizeHashes(self, hashes, (err, mhs) => { + if (err) { return callback(err) } + // verify that each hash can be pinned + series(mhs.map(multihash => cb => { + const key = toB58String(multihash) + if (recursive) { + if (recursivePins.has(key)) { + // it's already pinned recursively + return cb(null, key) + } + // entire graph of nested links should be pinned, + // so make sure we have all the objects + dag._getRecursive(multihash, (err) => { + if (err) { return cb(err) } + // found all objects, we can add the pin + return cb(null, key) + }) + } else { + if (recursivePins.has(key)) { + // recursive supersedes direct, can't have both + return cb(new Error(`${key} already pinned recursively`)) + } + if (directPins.has(key)) { + // already directly pinned + return cb(null, key) + } + // make sure we have the object + dag.get(new CID(multihash), (err) => { + if (err) { return cb(err) } + // found the object, we can add the pin + return cb(null, key) + }) + } + }), (err, results) => { + if (err) { return callback(err) } + // update the pin sets in memory + if (recursive) { + results.forEach(key => { + // recursive pin should replace direct pin + directPins.delete(key) + recursivePins.add(key) + }) + } else { + results.forEach(key => directPins.add(key)) + } + // persist updated pin sets to datastore + pin.flush((err, root) => { + if (err) { return callback(err) } + self.log(`Added pins: ${results}`) + return callback(null, results.map(key => ({hash: key}))) + }) + }) + }) + }), + + rm: promisify((hashes, options, callback) => { + let recursive = true + if (typeof options === 'function') { + callback = options + } else if (options && options.recursive === false) { + recursive = false + } + callback = once(callback) + normalizeHashes(self, hashes, (err, mhs) => { + if (err) { return callback(err) } + // verify that each hash can be unpinned + series(mhs.map(multihash => cb => { + pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { + if (err) { return cb(err) } + const { pinned, reason } = res + const key = toB58String(multihash) + if (!pinned) { + return cb(new Error(`${key} is not pinned`)) + } + switch (reason) { + case (pin.types.recursive): + if (recursive) { + return cb(null, key) + } else { + return cb(new Error(`${key} is pinned recursively`)) + } + case (pin.types.direct): + return cb(null, key) + default: + return cb(new Error( + `${key} is pinned indirectly under ${reason}` + )) + } + }) + }), (err, results) => { + if (err) { return callback(err) } + // update the pin sets in memory + const pins = recursive ? recursivePins : directPins + results.forEach(key => pins.delete(key)) + // persist updated pin sets to datastore + pin.flush((err, root) => { + if (err) { return callback(err) } + self.log(`Removed pins: ${results}`) + return callback(null, results.map(key => ({hash: key}))) + }) + }) + }) + }), + + ls: promisify((hashes, options, callback) => { + let type = pin.types.all + if (typeof hashes === 'function') { + callback = hashes + options = null + hashes = null + } + if (typeof options === 'function') { + callback = options + } + if (hashes && hashes.type) { + options = hashes + hashes = null + } + if (options && options.type) { + type = options.type.toLowerCase() + } + callback = once(callback) + if (!pin.types[type]) { + return callback(new Error( + `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` + )) + } + if (hashes) { + // check the pinned state of specific hashes + normalizeHashes(self, hashes, (err, mhs) => { + if (err) { return callback(err) } + series(mhs.map(multihash => cb => { + pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { + if (err) { return cb(err) } + const { pinned, reason } = res + const key = toB58String(multihash) + if (!pinned) { + return cb(new Error( + `Path ${key} is not pinned` + )) + } + switch (reason) { + case pin.types.direct: + case pin.types.recursive: + return cb(null, { + hash: key, + type: reason + }) + default: + return cb(null, { + hash: key, + type: `${pin.types.indirect} through ${reason}` + }) + } + }) + }), callback) + }) + } else { + // show all pinned items of type + const result = [] + if (type === pin.types.direct || type === pin.types.all) { + pin.directKeyStrings().forEach((hash) => { + result.push({ + type: pin.types.direct, + hash: hash + }) + }) + } + if (type === pin.types.recursive || type === pin.types.all) { + pin.recursiveKeyStrings().forEach((hash) => { + result.push({ + type: pin.types.recursive, + hash: hash + }) + }) + } + if (type === pin.types.indirect || type === pin.types.all) { + pin.getIndirectKeys((err, hashes) => { + if (err) { return callback(err) } + hashes.forEach((hash) => { + result.push({ + type: pin.types.indirect, + hash: hash + }) + }) + return callback(null, result) + }) + } else { + return callback(null, result) + } + } + }), + + isPinned: (multihash, callback) => { + pin.isPinnedWithType(multihash, pin.types.all, callback) + }, + + isPinnedWithType: (multihash, pinType, callback) => { + const key = toB58String(multihash) + // recursive + if ((pinType === pin.types.recursive || pinType === pin.types.all) && + recursivePins.has(key)) { + return callback(null, {pinned: true, reason: pin.types.recursive}) + } + if ((pinType === pin.types.recursive)) { + return callback(null, {pinned: false}) + } + // direct + if ((pinType === pin.types.direct || pinType === pin.types.all) && + directPins.has(key)) { + return callback(null, {pinned: true, reason: pin.types.direct}) + } + if ((pinType === pin.types.direct)) { + return callback(null, {pinned: false}) + } + // internal + if ((pinType === pin.types.internal || pinType === pin.types.all) && + internalPins.has(key)) { + return callback(null, {pinned: true, reason: pin.types.internal}) + } + if ((pinType === pin.types.internal)) { + return callback(null, {pinned: false}) + } + + // indirect (default) + // check each recursive key to see if multihash is under it + const rKeys = pin.recursiveKeys() + let found = false + until( + // search until multihash was found or no more keys to check + () => (found || !rKeys.length), + (cb) => { + const key = rKeys.pop() + dag.get(new CID(key), (err, res) => { + if (err) { return cb(err) } + pin.set.hasChild(res.value, multihash, (err, has) => { + if (err) { return cb(err) } + found = has + // if found, return the hash of the parent recursive pin + cb(null, found ? toB58String(res.value.multihash) : null) + }) + }) + }, + (err, result) => { + if (err) { return callback(err) } + return callback(null, {pinned: found, reason: result}) + } + ) + }, + + directKeyStrings: () => Array.from(directPins), + + recursiveKeyStrings: () => Array.from(recursivePins), + + internalKeyStrings: () => Array.from(internalPins), + + directKeys: () => pin.directKeyStrings().map(key => multihashes.fromB58String(key)), + + recursiveKeys: () => pin.recursiveKeyStrings().map(key => multihashes.fromB58String(key)), + + internalKeys: () => pin.internalKeyStrings().map(key => multihashes.fromB58String(key)), + + getIndirectKeys: (callback) => { + const indirectKeys = new Set() + const rKeys = pin.recursiveKeys() + each(rKeys, (multihash, cb) => { + dag._getRecursive(multihash, (err, nodes) => { + if (err) { return cb(err) } + nodes.forEach((node) => { + const key = toB58String(node.multihash) + if (!directPins.has(key) && !recursivePins.has(key)) { + // not already pinned recursively or directly + indirectKeys.add(key) + } + }) + cb() + }) + }, (err) => { + if (err) { return callback(err) } + callback(null, Array.from(indirectKeys)) + }) + }, + + // encodes and writes pin key sets to the datastore + // each key set will be stored as a DAG node, and a root node will link to both + flush: promisify((callback) => { + const newInternalPins = new Set() + const logInternalKey = (mh) => newInternalPins.add(toB58String(mh)) + const handle = { + put: (k, v, cb) => { + handle[k] = v + cb() + } + } + waterfall([ + // create link to direct keys node + (cb) => pin.set.storeSet(pin.directKeys(), logInternalKey, cb), + (dRoot, cb) => DAGLink.create(pin.types.direct, dRoot.size, dRoot.multihash, cb), + (dLink, cb) => handle.put('dLink', dLink, cb), + // create link to recursive keys node + (cb) => pin.set.storeSet(pin.recursiveKeys(), logInternalKey, cb), + (rRoot, cb) => DAGLink.create(pin.types.recursive, rRoot.size, rRoot.multihash, cb), + (rLink, cb) => handle.put('rLink', rLink, cb), + // the pin-set nodes link to an empty node, so make sure it's added to dag + (cb) => DAGNode.create(Buffer.alloc(0), cb), + (empty, cb) => dag.put(empty, {cid: new CID(empty.multihash)}, cb), + // create root node with links to direct and recursive nodes + (cid, cb) => DAGNode.create(Buffer.alloc(0), [handle.dLink, handle.rLink], cb), + (root, cb) => handle.put('root', root, cb), + // add the root node to dag + (cb) => dag.put(handle.root, {cid: new CID(handle.root.multihash)}, cb), + // update the internal pin set + (cid, cb) => cb(null, logInternalKey(handle.root.multihash)), + // save serialized root to datastore under a consistent key + (_, cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests + (_, cb) => repo.datastore.put(pinDataStoreKey, handle.root.multihash, cb) + ], (err, result) => { + if (err) { return callback(err) } + self.log(`Flushed ${handle.root} to the datastore.`) + internalPins = newInternalPins + return callback(null, handle.root) + }) + }), + + load: promisify((callback) => { + const newInternalPins = new Set() + const logInternalKey = (mh) => newInternalPins.add(toB58String(mh)) + const handle = { + put: (k, v, cb) => { + handle[k] = v + cb() + } + } + waterfall([ + (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests + (_, cb) => repo.datastore.has(pinDataStoreKey, cb), + (has, cb) => has ? cb() : cb('No pins to load'), + (cb) => repo.datastore.get(pinDataStoreKey, cb), + (mh, cb) => dag.get(new CID(mh), cb), + (root, cb) => handle.put('root', root.value, cb), + (cb) => pin.set.loadSet(handle.root, pin.types.recursive, logInternalKey, cb), + (rKeys, cb) => handle.put('rKeys', rKeys, cb), + (cb) => pin.set.loadSet(handle.root, pin.types.direct, logInternalKey, cb) + ], (err, dKeys) => { + if (err && err !== 'No pins to load') { + return callback(err) + } + if (dKeys) { + directPins = new Set(dKeys.map(mh => toB58String(mh))) + recursivePins = new Set(handle.rKeys.map(mh => toB58String(mh))) + logInternalKey(handle.root.multihash) + internalPins = newInternalPins + } + self.log('Loaded pins from the datastore') + return callback() + }) + }) + } + return pin +} diff --git a/src/core/components/pin.proto.js b/src/core/components/pin.proto.js new file mode 100644 index 0000000000..db2391c91f --- /dev/null +++ b/src/core/components/pin.proto.js @@ -0,0 +1,17 @@ +/** + * Protobuf interface + * from go-ipfs/pin/internal/pb/header.proto + */ +module.exports = ` + syntax = "proto2"; + + package ipfs.pin; + + option go_package = "pb"; + + message Set { + optional uint32 version = 1; + optional uint32 fanout = 2; + optional fixed32 seed = 3; + } +` diff --git a/src/core/utils.js b/src/core/utils.js index a492250383..be101273bb 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,3 +1,96 @@ 'use strict' +const multihashes = require('multihashes') +const mapSeries = require('async/mapSeries') +const CID = require('cids') +const isIPFS = require('is-ipfs') + exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' + +/** + * Break an ipfs-path down into it's root hash and an array of links. + * + * examples: + * b58Hash -> { root: 'b58Hash', links: [] } + * b58Hash/mercury/venus -> { root: 'b58Hash', links: ['mercury', 'venus']} + * /ipfs/b58Hash/links/by/name -> { root: 'b58Hash', links: ['links', 'by', 'name'] } + * + * @param {String} ipfsPath An ipfs-path + * @return {Object} { root: base58 string, links: [string], ?err: Error } + */ +exports.parseIpfsPath = function parseIpfsPath (ipfsPath) { + const matched = ipfsPath.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) + const errorResult = { + error: new Error('invalid ipfs ref path') + } + if (!matched) { + return errorResult + } + + const [root, ...links] = matched[1].split('/') + + if (isIPFS.multihash(root)) { + return { + root: root, + links: links + } + } else { + return errorResult + } +} + +/** + * Resolve various styles of an ipfs-path to the hash of the target node. + * Follows links in the path. + * + * Handles formats: + * - + * - /link/to/another/planet + * - /ipfs/ + * - Buffers of any of the above + * - multihash Buffer + * + * @param {IPFS} ipfs the IPFS node + * @param {Described above} ipfsPaths A single or collection of ipfs-paths + * @param {Function} callback Node-style callback. res is Array + * @return {void} + */ +exports.normalizeHashes = function normalizeHashes (ipfs, ipfsPaths, callback) { + if (!Array.isArray(ipfsPaths)) { + ipfsPaths = [ipfsPaths] + } + mapSeries(ipfsPaths, (path, cb) => { + const validate = (mh) => { + try { + multihashes.validate(mh) + cb(null, mh) + } catch (err) { cb(err) } + } + if (typeof path !== 'string') { + return validate(path) + } + const {error, root, links} = exports.parseIpfsPath(path) + const rootHash = multihashes.fromB58String(root) + if (error) return cb(error) + if (!links.length) { + return validate(rootHash) + } + // recursively follow named links to the target node + const pathFn = (err, obj) => { + if (err) { return cb(err) } + if (!links.length) { + // done tracing, we have the target node + return validate(obj.multihash) + } + const linkName = links.shift() + const nextLink = obj.links.find(link => link.name === linkName) + if (!nextLink) { + return cb(new Error( + `no link named ${linkName} under ${obj.toJSON().Hash}` + )) + } + ipfs.object.get(nextLink.multihash, pathFn) + } + ipfs.object.get(rootHash, pathFn) + }, callback) +} diff --git a/src/http/api/resources/files.js b/src/http/api/resources/files.js index b62fb67aa1..6f889926ef 100644 --- a/src/http/api/resources/files.js +++ b/src/http/api/resources/files.js @@ -166,6 +166,7 @@ exports.add = { otherwise: Joi.boolean().valid(false) }), 'only-hash': Joi.boolean(), + pin: Joi.boolean().default(true), 'wrap-with-directory': Joi.boolean() }) // TODO: Necessary until validate "recursive", "stream-channels" etc. diff --git a/src/http/api/resources/pin.js b/src/http/api/resources/pin.js new file mode 100644 index 0000000000..a2c89ad29b --- /dev/null +++ b/src/http/api/resources/pin.js @@ -0,0 +1,101 @@ +'use strict' + +const _ = require('lodash') +const debug = require('debug') +const log = debug('jsipfs:http-api:pin') +log.error = debug('jsipfs:http-api:pin:error') + +exports = module.exports + +function parseArgs (request, reply) { + if (!request.query.arg) { + return reply({ + Message: "Argument 'arg' is required", + Code: 0 + }).code(400).takeover() + } + + const recursive = request.query.recursive !== 'false' + + return reply({ + path: request.query.arg, + recursive: recursive, + }) +} + +exports.ls = { + parseArgs: (request, reply) => { + const ipfs = request.server.app.ipfs + const type = request.query.type || ipfs.pin.types.all + + return reply({ + path: request.query.arg, + type: type + }) + }, + + handler: (request, reply) => { + const { path, type } = request.pre.args + const ipfs = request.server.app.ipfs + ipfs.pin.ls(path, { type }, (err, result) => { + if (err) { + log.error(err) + return reply({ + Message: `Failed to list pins: ${err.message}`, + Code: 0 + }).code(500) + } + + return reply({ + Keys: _.mapValues( + _.keyBy(result, obj => obj.hash), + obj => ({Type: obj.type}) + ) + }) + }) + } +} + +exports.add = { + parseArgs: parseArgs, + + handler: (request, reply) => { + const ipfs = request.server.app.ipfs + const { path, recursive } = request.pre.args + ipfs.pin.add(path, { recursive }, (err, result) => { + if (err) { + log.error(err) + return reply({ + Message: `Failed to add pin: ${err.message}`, + Code: 0 + }).code(500) + } + + return reply({ + Pins: result.map(obj => obj.hash) + }) + }) + } +} + +exports.rm = { + parseArgs: parseArgs, + + handler: (request, reply) => { + const ipfs = request.server.app.ipfs + const { path, recursive } = request.pre.args + ipfs.pin.rm(path, { recursive }, (err, result) => { + if (err) { + log.error(err) + return reply({ + Message: `Failed to remove pin: ${err.message}`, + Code: 0 + }).code(500) + } + + return reply({ + Pins: result.map(obj => obj.hash) + }) + }) + } +} diff --git a/test/cli/files.js b/test/cli/files.js index 07b5421aa7..8d0c667fea 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -287,10 +287,10 @@ describe('files', () => runOnAndOff((thing) => { it('add --only-hash outputs correct hash', function () { return ipfs('files add --only-hash src/init-files/init-docs/readme') - .then(out => - expect(out) - .to.eql('added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB readme\n') - ) + .then(out => + expect(out) + .to.eql('added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB readme\n') + ) }) it('add --only-hash does not add a file to the datastore', function () { @@ -301,17 +301,42 @@ describe('files', () => runOnAndOff((thing) => { fs.writeFileSync(filepath, content) return ipfs(`files add --only-hash ${filepath}`) + .then(out => { + const hash = out.split(' ')[1] + + // 'jsipfs object get ' should timeout with the daemon on + // and should fail fast with the daemon off + return Promise.race([ + ipfs.fail(`object get ${hash}`), + new Promise((resolve, reject) => setTimeout(resolve, 4000)) + ]) + .then(() => fs.unlinkSync(filepath)) + }) + }) + + it('add pins by default', function () { + const filePath = path.join(os.tmpdir(), hat()) + const content = String(Math.random()) + const file = fs.writeFileSync(filePath, content) + + return ipfs(`files add -Q ${filePath}`) .then(out => { - const hash = out.split(' ')[1] - - // 'jsipfs object get ' should timeout with the daemon on - // and should fail fast with the daemon off - return Promise.race([ - ipfs.fail(`object get ${hash}`), - new Promise((resolve, reject) => setTimeout(resolve, 4000)) - ]) - .then(() => fs.unlinkSync(filepath)) + const hash = out.trim() + return ipfs(`pin ls ${hash}`) + .then(ls => expect(ls).to.include(hash)) }) + .then(() => fs.unlinkSync(filePath)) + }) + + it('add does not pin with --pin=false', function () { + const filePath = path.join(os.tmpdir(), hat()) + const content = String(Math.random()) + const file = fs.writeFileSync(filePath, content) + + return ipfs(`files add -Q --pin=false ${filePath}`) + .then(out => ipfs(`pin ls ${out.trim()}`)) + .then(ls => expect(ls.trim()).to.eql('')) + .then(() => fs.unlinkSync(filePath)) }) HASH_ALGS.forEach((name) => { diff --git a/test/cli/pin.js b/test/cli/pin.js new file mode 100644 index 0000000000..9271ffe410 --- /dev/null +++ b/test/cli/pin.js @@ -0,0 +1,90 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const runOnAndOff = require('../utils/on-and-off') + +// file structure for recursive tests: +// root (init-docs) +// |`readme +// `docs +// `index + +const keys = { + root: 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU', + readme: 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB', + docs: 'QmegvLXxpVKiZ4b57Xs1syfBVRd8CbucVHAp7KpLQdGieC', + index: 'QmQN88TEidd3RY2u3dpib49fERTDfKtDpvxnvczATNsfKT' +} + +describe('pin', () => runOnAndOff((thing) => { + const filesDir = 'test/fixtures/test-data/recursive-get-dir/init-docs' + + let ipfs + + before(() => { + ipfs = thing.ipfs + return ipfs(`files add -r ${filesDir}`) + }) + + // rm first because `files add` should pin recursively by default + it('rm (recursively by default)', () => { + return ipfs(`pin rm ${keys.root}`) + .then((out) => expect(out).to.equal(`unpinned ${keys.root}\n`)) + .then(() => ipfs('pin ls')) + .then((out) => expect(out).to.equal('')) + }) + + it('add (recursively by default)', () => { + return ipfs(`pin add ${keys.root}`).then((out) => { + expect(out).to.eql(`pinned ${keys.root} recursively\n`) + }) + }) + + it('add (direct)', () => { + return ipfs(`pin add ${keys.readme} --recursive false`).then((out) => { + expect(out).to.eql(`pinned ${keys.readme} directly\n`) + }) + }) + + it('ls (recursive)', () => { + return ipfs(`pin ls ${keys.root}`).then((out) => { + expect(out).to.eql(`${keys.root} recursive\n`) + }) + }) + + it('ls (direct)', () => { + return ipfs(`pin ls ${keys.readme}`).then((out) => { + expect(out).to.eql(`${keys.readme} direct\n`) + }) + }) + + it('ls (indirect)', () => { + return ipfs(`pin ls ${keys.index}`).then((out) => { + expect(out).to.eql(`${keys.index} indirect through ${keys.root}\n`) + }) + }) + + it('ls with multiple keys', () => { + return ipfs(`pin ls ${keys.root} ${keys.readme}`).then((out) => { + expect(out).to.eql(`${keys.root} recursive\n${keys.readme} direct\n`) + }) + }) + + it('ls (all)', () => { + return ipfs('pin ls').then((out) => { + expect(out.split('\n').length).to.eql(12) + expect(out).to.include(`${keys.root} recursive\n`) + expect(out).to.include(`${keys.readme} direct\n`) + expect(out).to.include(`${keys.docs} indirect\n`) + expect(out).to.include(`${keys.index} indirect\n`) + }) + }) + + it('rm (direct)', () => { + return ipfs(`pin rm --recursive false ${keys.readme}`) + .then((out) => expect(out).to.equal(`unpinned ${keys.readme}\n`)) + .then(() => ipfs('pin ls')) + .then((out) => expect(out).to.not.include(`${keys.readme} direct\n`)) + }) +})) diff --git a/test/core/utils.spec.js b/test/core/utils.spec.js new file mode 100644 index 0000000000..638aebf837 --- /dev/null +++ b/test/core/utils.spec.js @@ -0,0 +1,136 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const multihashes = require('multihashes') + +// This gets replaced by `create-repo-browser.js` in the browser +const createTempRepo = require('../utils/create-repo-nodejs.js') +const IPFS = require('../../src/core') +const utils = require('../../src/core/utils') + +describe('utils', () => { + const rootHashString = 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU' + const rootHash = multihashes.fromB58String(rootHashString) + const rootPathString = `/ipfs/${rootHashString}/` + const aboutHashString = 'QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V' + const aboutHash = multihashes.fromB58String(aboutHashString) + const aboutPathString = `/ipfs/${rootHashString}/about` + + describe('parseIpfsPath', () => { + it('parses path with no links', function () { + expect(utils.parseIpfsPath(rootHashString)) + .to.deep.equal({ + root: rootHashString, + links: [] + }) + }) + + it('parses path with links', function () { + expect(utils.parseIpfsPath(`${rootHashString}/docs/index`)) + .to.deep.equal({ + root: rootHashString, + links: ['docs', 'index'] + }) + }) + + it('parses path with /ipfs/ prefix', function () { + expect(utils.parseIpfsPath(`/ipfs/${rootHashString}/about`)) + .to.deep.equal({ + root: rootHashString, + links: ['about'] + }) + }) + + it('returns error for malformed path', function () { + const result = utils.parseIpfsPath(`${rootHashString}//about`) + expect(result.error).to.be.instanceof(Error) + .and.have.property('message', 'invalid ipfs ref path') + }) + + it('returns error if root is not a valid CID', function () { + const result = utils.parseIpfsPath('invalid/ipfs/path') + expect(result.error).to.be.instanceof(Error) + .and.have.property('message', 'invalid ipfs ref path') + }) + }) + + describe('normalizeHashes', function () { + this.timeout(80 * 1000) + let node + let repo + + before((done) => { + repo = createTempRepo() + node = new IPFS({ + repo: repo + }) + node.once('ready', done) + }) + + after((done) => { + repo.teardown(done) + }) + + it('normalizes hash string to array with multihash object', (done) => { + utils.normalizeHashes(node, rootHashString, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(rootHash) + done() + }) + }) + + it('normalizes array of hash strings to array of multihash objects', (done) => { + utils.normalizeHashes(node, [rootHashString, aboutHashString], (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(2) + expect(hashes[0]).to.deep.equal(rootHash) + expect(hashes[1]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes multihash object to array with multihash object', (done) => { + utils.normalizeHashes(node, aboutHash, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes array of multihash objects to array of multihash objects', (done) => { + utils.normalizeHashes(node, [rootHash, aboutHash], (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(2) + expect(hashes[0]).to.deep.equal(rootHash) + expect(hashes[1]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes ipfs path string to array with multihash object', (done) => { + utils.normalizeHashes(node, aboutPathString, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes array of ipfs path strings to array with multihash objects', (done) => { + utils.normalizeHashes(node, [aboutPathString, rootPathString], (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(2) + expect(hashes[0]).to.deep.equal(aboutHash) + expect(hashes[1]).to.deep.equal(rootHash) + done() + }) + }) + }) +}) diff --git a/test/http-api/spec/pin.js b/test/http-api/spec/pin.js new file mode 100644 index 0000000000..f58ff3a10f --- /dev/null +++ b/test/http-api/spec/pin.js @@ -0,0 +1,147 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const fs = require('fs') +const FormData = require('form-data') +const streamToPromise = require('stream-to-promise') +const each = require('async/each') + +// use a tree of ipfs objects for recursive tests: +// root +// |`leaf +// `branch +// `subLeaf + +const keys = { + root: 'QmWQwS2Xh1SFGMPzUVYQ52b7RC7fTfiaPHm3ZyTRZuHmer', + leaf: 'QmaZoTQ6wFe7EtvaePBUeXavfeRqCAq3RUMomFxBpZLrLA', + branch: 'QmNxjjP7dtx6pzxWGBRCrgmjX3JqKL7uF2Kjx7ExiZDbSB', + subLeaf: 'QmUzzznkyQL7FjjBztG3D1tTjBuxeArLceDZnuSowUggXL' +} + +module.exports = (http) => { + describe('pin', () => { + let api + + before((done) => { + // add test tree to repo + api = http.api.server.select('API') + const putFile = (filename, cb) => { + const filePath = `test/test-data/tree/${filename}.json` + const form = new FormData() + form.append('file', fs.createReadStream(filePath)) + const headers = form.getHeaders() + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/object/put', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(200) + cb() + }) + }) + } + each(Object.keys(keys), putFile, (err) => { + expect(err).to.not.exist() + done() + }) + }) + + describe('/pin/add', () => { + it('pins object recursively by default', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=${keys.root}` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.root]}) + done() + }) + }) + }) + + describe('/pin/add (direct)', () => { + it('pins object directly if specified', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=${keys.leaf}&recursive=false` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.leaf]}) + done() + }) + }) + }) + + describe('/pin/ls (with path)', () => { + it('finds specified pinned object', (done) => { + api.inject({ + method: 'GET', + url: `/api/v0/pin/ls?arg=/ipfs/${keys.root}/branch/subLeaf` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Keys[keys.subLeaf].Type) + .to.equal(`indirect through ${keys.root}`) + done() + }) + }) + }) + + describe('/pin/ls (without path or type)', () => { + it('finds all pinned objects', (done) => { + api.inject({ + method: 'GET', + url: '/api/v0/pin/ls' + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Keys[keys.root].Type).to.equal('recursive') + expect(res.result.Keys[keys.leaf].Type).to.equal('direct') + expect(res.result.Keys[keys.branch].Type).to.equal('indirect') + expect(res.result.Keys[keys.subLeaf].Type).to.equal('indirect') + done() + }) + }) + }) + + describe('/pin/rm (direct)', () => { + it('unpins only directly pinned objects if specified', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${keys.leaf}&recursive=false` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.leaf]}) + + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${keys.root}&recursive=false` + }, (res) => { + expect(res.statusCode).to.equal(500) + expect(res.result.Message).to.equal( + 'Failed to remove pin: ' + + 'QmWQwS2Xh1SFGMPzUVYQ52b7RC7fTfiaPHm3ZyTRZuHmer ' + + 'is pinned recursively' + ) + done() + }) + }) + }) + }) + + describe('/pin/rm', () => { + it('unpins recursively by default', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${keys.root}` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.root]}) + done() + }) + }) + }) + }) +}