diff --git a/package.json b/package.json index 723da697c4..48af301836 100644 --- a/package.json +++ b/package.json @@ -118,17 +118,17 @@ "ipld-ethereum": "^2.0.1", "ipld-git": "~0.2.2", "ipld-zcash": "~0.1.6", - "ipns": "~0.3.0", + "ipns": "~0.4.2", "is-ipfs": "~0.4.7", "is-pull-stream": "~0.0.0", "is-stream": "^1.1.0", "joi": "^13.4.0", "joi-browser": "^13.4.0", "joi-multiaddr": "^3.0.0", - "libp2p": "~0.24.0", + "libp2p": "libp2p/js-libp2p#master", "libp2p-bootstrap": "~0.9.3", "libp2p-crypto": "~0.14.1", - "libp2p-kad-dht": "~0.11.1", + "libp2p-kad-dht": "libp2p/js-libp2p-kad-dht#feat/allow-configurable-validators-and-selectors", "libp2p-keychain": "~0.3.3", "libp2p-mdns": "~0.12.0", "libp2p-mplex": "~0.8.4", diff --git a/src/core/components/libp2p.js b/src/core/components/libp2p.js index 87838cb1e7..d195623467 100644 --- a/src/core/components/libp2p.js +++ b/src/core/components/libp2p.js @@ -3,6 +3,7 @@ const promisify = require('promisify-es6') const get = require('lodash/get') const defaultsDeep = require('@nodeutils/defaults-deep') +const ipnsUtils = require('../ipns/routing/utils') module.exports = function libp2p (self) { return { @@ -16,6 +17,7 @@ module.exports = function libp2p (self) { const defaultBundle = (opts) => { const libp2pDefaults = { + datastore: opts.datastore, peerInfo: opts.peerInfo, peerBook: opts.peerBook, config: { @@ -43,6 +45,14 @@ module.exports = function libp2p (self) { get(opts.config, 'relay.hop.active', false)) } }, + dht: { + validators: { + ipns: ipnsUtils.validator + }, + selectors: { + ipns: ipnsUtils.selector + } + }, EXPERIMENTAL: { dht: get(opts.options, 'EXPERIMENTAL.dht', false), pubsub: get(opts.options, 'EXPERIMENTAL.pubsub', false) @@ -72,6 +82,7 @@ module.exports = function libp2p (self) { self._libp2pNode = libp2pBundle({ options: self._options, config: config, + datastore: self._repo.datastore, peerInfo: self._peerInfo, peerBook: self._peerInfoBook }) diff --git a/src/core/components/start.js b/src/core/components/start.js index 518d613b0e..e7c823b2de 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -2,12 +2,14 @@ const series = require('async/series') const Bitswap = require('ipfs-bitswap') +const get = require('lodash/get') const setImmediate = require('async/setImmediate') const promisify = require('promisify-es6') const { TieredDatastore } = require('datastore-core') const IPNS = require('../ipns') const OfflineDatastore = require('../ipns/routing/offline-datastore') +const DhtDatastore = require('../ipns/routing/dht-datastore') module.exports = (self) => { return promisify((callback) => { @@ -43,10 +45,15 @@ module.exports = (self) => { // TODO Add IPNS pubsub if enabled - // NOTE: IPNS routing is being replaced by the local repo datastore while the IPNS over DHT is not ready - // When DHT is added, if local option enabled, should receive offlineDatastore as well - const offlineDatastore = new OfflineDatastore(self._repo) - ipnsStores.push(offlineDatastore) + // DHT should be added as routing if we are not running with local flag + // TODO: Need to change this logic once DHT is enabled by default, for now fallback to Offline datastore + if (get(self._options, 'EXPERIMENTAL.dht', false) && !self._options.local) { + const dhtDatastore = new DhtDatastore(self._libp2pNode.dht) + ipnsStores.push(dhtDatastore) + } else { + const offlineDatastore = new OfflineDatastore(self._repo) + ipnsStores.push(offlineDatastore) + } // Create ipns routing with a set of datastores const routing = new TieredDatastore(ipnsStores) diff --git a/src/core/ipns/publisher.js b/src/core/ipns/publisher.js index 9e8fcdf3f3..aa330ce053 100644 --- a/src/core/ipns/publisher.js +++ b/src/core/ipns/publisher.js @@ -1,7 +1,6 @@ 'use strict' const PeerId = require('peer-id') -const Record = require('libp2p-record').Record const { Key } = require('interface-datastore') const series = require('async/series') const errcode = require('err-code') @@ -57,7 +56,6 @@ class IpnsPublisher { log.error(errMsg) return callback(errcode(new Error(errMsg), 'ERR_INVALID_PEER_ID')) } - const publicKey = peerId._pubKey ipns.embedPublicKey(publicKey, record, (err, embedPublicKeyRecord) => { @@ -97,19 +95,17 @@ class IpnsPublisher { return callback(errcode(new Error(errMsg), 'ERR_INVALID_DATASTORE_KEY')) } - let rec + let entryData try { // Marshal record - const entryData = ipns.marshal(entry) - // Marshal to libp2p record - rec = new Record(key.toBuffer(), entryData) + entryData = ipns.marshal(entry) } catch (err) { log.error(err) return callback(err) } // Add record to routing (buffer key) - this._routing.put(key.toBuffer(), rec.serialize(), (err, res) => { + this._routing.put(key.toBuffer(), entryData, (err, res) => { if (err) { const errMsg = `ipns record for ${key.toString()} could not be stored in the routing` @@ -137,17 +133,8 @@ class IpnsPublisher { return callback(errcode(new Error(errMsg), 'ERR_UNDEFINED_PARAMETER')) } - let rec - try { - // Marshal to libp2p record - rec = new Record(key.toBuffer(), publicKey.bytes) - } catch (err) { - log.error(err) - return callback(err) - } - // Add public key to routing (buffer key) - this._routing.put(key.toBuffer(), rec.serialize(), (err, res) => { + this._routing.put(key.toBuffer(), publicKey.bytes, (err, res) => { if (err) { const errMsg = `public key for ${key.toString()} could not be stored in the routing` @@ -174,45 +161,55 @@ class IpnsPublisher { const checkRouting = !(options.checkRouting === false) this._repo.datastore.get(ipns.getLocalKey(peerId.id), (err, dsVal) => { - let result - if (err) { if (err.code !== 'ERR_NOT_FOUND') { const errMsg = `unexpected error getting the ipns record ${peerId.id} from datastore` log.error(errMsg) return callback(errcode(new Error(errMsg), 'ERR_UNEXPECTED_DATASTORE_RESPONSE')) - } else { - if (!checkRouting) { + } + + if (!checkRouting) { + return callback(null, null) + } + + // Try to get from routing + let keys + try { + keys = ipns.getIdKeys(peerId.toBytes()) + } catch (err) { + log.error(err) + return callback(err) + } + + this._routing.get(keys.routingKey, (err, res) => { + if (err) { + log(`error when determining the last published IPNS record for ${peerId.id}`) return callback(null, null) - } else { - // TODO ROUTING - get from DHT - return callback(new Error('not implemented yet')) } - } - } - if (Buffer.isBuffer(dsVal)) { - result = dsVal + // unmarshal data + this._unmarshalData(res, callback) + }) } else { - const errMsg = `found ipns record that we couldn't convert to a value` - - log.error(errMsg) - return callback(errcode(new Error(errMsg), 'ERR_INVALID_IPNS_RECORD')) + // unmarshal data + this._unmarshalData(dsVal, callback) } + }) + } - // unmarshal data - try { - result = ipns.unmarshal(dsVal) - } catch (err) { - const errMsg = `found ipns record that we couldn't convert to a value` + _unmarshalData (data, callback) { + let result + try { + result = ipns.unmarshal(data) + } catch (err) { + const errMsg = `found ipns record that we couldn't convert to a value` - log.error(errMsg) - return callback(null, null) - } + log.error(errMsg) + return callback(null, null) + } - callback(null, result) - }) + callback(null, result) } _updateOrCreateRecord (privKey, value, validity, peerId, callback) { @@ -224,7 +221,7 @@ class IpnsPublisher { } const getPublishedOptions = { - checkRouting: false // TODO ROUTING - change to true + checkRouting: true } this._getPublished(peerId, getPublishedOptions, (err, record) => { diff --git a/src/core/ipns/resolver.js b/src/core/ipns/resolver.js index f9fb81ccd5..f1f1931fd6 100644 --- a/src/core/ipns/resolver.js +++ b/src/core/ipns/resolver.js @@ -1,9 +1,10 @@ 'use strict' const ipns = require('ipns') -const Record = require('libp2p-record').Record +const crypto = require('libp2p-crypto') const PeerId = require('peer-id') const errcode = require('err-code') +const parallel = require('async/parallel') const debug = require('debug') const log = debug('jsipfs:ipns:resolver') @@ -97,13 +98,14 @@ class IpnsResolver { return callback(err) } - const { routingKey } = ipns.getIdKeys(peerId.toBytes()) + const { routingKey, routingPubKey } = ipns.getIdKeys(peerId.toBytes()) - // TODO DHT - get public key from routing? - // https://github.com/ipfs/go-ipfs/blob/master/namesys/routing.go#L70 - // https://github.com/libp2p/go-libp2p-routing/blob/master/routing.go#L99 - - this._routing.get(routingKey.toBuffer(), (err, res) => { + parallel([ + // Name should be the hash of a public key retrievable from ipfs. + // We retrieve public key to add it to the PeerId, as the IPNS record may not have it. + (cb) => this._routing.get(routingPubKey.toBuffer(), cb), + (cb) => this._routing.get(routingKey.toBuffer(), cb) + ], (err, res) => { if (err) { if (err.code !== 'ERR_NOT_FOUND') { const errMsg = `unexpected error getting the ipns record ${peerId.id}` @@ -117,10 +119,21 @@ class IpnsResolver { return callback(errcode(new Error(errMsg), 'ERR_NO_RECORD_FOUND')) } + // Public key + try { + // Insert it into the peer id public key, to be validated by IPNS validator + peerId.pubKey = crypto.keys.unmarshalPublicKey(res[0]) + } catch (err) { + const errMsg = `found public key record that we couldn't convert to a value` + + log.error(errMsg) + return callback(errcode(new Error(errMsg), 'ERR_INVALID_PUB_KEY_RECEIVED')) + } + + // IPNS entry let ipnsEntry try { - const record = Record.deserialize(res) - ipnsEntry = ipns.unmarshal(record.value) + ipnsEntry = ipns.unmarshal(res[1]) } catch (err) { const errMsg = `found ipns record that we couldn't convert to a value` diff --git a/src/core/ipns/routing/dht-datastore.js b/src/core/ipns/routing/dht-datastore.js new file mode 100644 index 0000000000..9746f2184d --- /dev/null +++ b/src/core/ipns/routing/dht-datastore.js @@ -0,0 +1,42 @@ +'use strict' + +// const { Key } = require('interface-datastore') +// const { encodeBase32 } = require('./utils') + +// Dht datastore sets the proper encoding for storing records +class DhtDatastore { + constructor (dht) { + this._dht = dht + } + + /** + * Put a value to the dht indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value. + * @param {Buffer} value value to be stored. + * @param {function(Error)} callback + * @returns {void} + */ + put (key, value, callback) { + // encode key properly - base32(/ipns/{cid}) TODO: remove this + // const routingKey = new Key('/' + encodeBase32(key), false) + + this._dht.put(key, value, callback) + // this._dht.put(routingKey.toBuffer(), value, callback) + } + + /** + * Get a value from the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value to be obtained. + * @param {function(Error, Buffer)} callback + * @returns {void} + */ + get (key, callback) { + // encode key properly - base32(/ipns/{cid}) TODO: remove this + // const routingKey = new Key('/' + encodeBase32(key), false) + + this._dht.get(key, callback) + // this._dht.get(routingKey.toBuffer(), callback) + } +} + +exports = module.exports = DhtDatastore diff --git a/src/core/ipns/routing/offline-datastore.js b/src/core/ipns/routing/offline-datastore.js index 26de52528c..84a49545eb 100644 --- a/src/core/ipns/routing/offline-datastore.js +++ b/src/core/ipns/routing/offline-datastore.js @@ -1,6 +1,7 @@ 'use strict' const { Key } = require('interface-datastore') +const Record = require('libp2p-record').Record const { encodeBase32 } = require('./utils') const errcode = require('err-code') @@ -48,7 +49,10 @@ class OfflineDatastore { return callback(errcode(new Error(errMsg), 'ERR_GENERATING_ROUTING_KEY')) } - this._repo.datastore.put(routingKey, value, callback) + // Marshal to libp2p record as the DHT does + let record = new Record(key, value) + + this._repo.datastore.put(routingKey, record.serialize(), callback) } /** @@ -76,7 +80,22 @@ class OfflineDatastore { return callback(errcode(new Error(errMsg), 'ERR_GENERATING_ROUTING_KEY')) } - this._repo.datastore.get(routingKey, callback) + this._repo.datastore.get(routingKey, (err, res) => { + if (err) { + return callback(err) + } + + // Unmarshal libp2p record as the DHT does + let record + try { + record = Record.deserialize(res) + } catch (err) { + log.error(err) + return callback(err) + } + + callback(null, record.value) + }) } // encode key properly - base32(/ipns/{cid}) diff --git a/src/core/ipns/routing/utils.js b/src/core/ipns/routing/utils.js index baa9ac16aa..958a82b954 100644 --- a/src/core/ipns/routing/utils.js +++ b/src/core/ipns/routing/utils.js @@ -1,7 +1,12 @@ 'use strict' const multibase = require('multibase') +const ipns = require('ipns') -module.exports.encodeBase32 = (buf) => { - return multibase.encode('base32', buf).slice(1) // slice off multibase codec +module.exports = { + encodeBase32: (buf) => multibase.encode('base32', buf).slice(1), // slice off multibase codec + validator: { + func: (key, record, cb) => ipns.validator.validate(record, key, cb) + }, + selector: (k, records) => ipns.validator.select(records[0], records[1]) } diff --git a/test/cli/name.js b/test/cli/name.js index 3acfb7b089..20aaca8400 100644 --- a/test/cli/name.js +++ b/test/cli/name.js @@ -16,169 +16,282 @@ const df = DaemonFactory.create({ type: 'js' }) const checkAll = (bits) => string => bits.every(bit => string.includes(bit)) describe('name', () => { - const passPhrase = hat() - const pass = '--pass ' + passPhrase - const name = 'test-key-' + hat() + describe('working locally', () => { + const passPhrase = hat() + const pass = '--pass ' + passPhrase + const name = 'test-key-' + hat() + + let ipfs + let ipfsd + + let cidAdded + let nodeId + let keyId + + before(function (done) { + this.timeout(80 * 1000) + + df.spawn({ + exec: `./src/cli/bin.js`, + config: { + Bootstrap: [] + }, + args: ['--pass', passPhrase, '--local'], + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + + ipfsd = _ipfsd + ipfs = ipfsExec(_ipfsd.repoPath) + + ipfs(`${pass} key gen ${name} --type rsa --size 2048`) + .then((out) => { + expect(out).to.include(name) + keyId = out.split(' ')[1] + + return ipfs('id') + }) + .then((res) => { + const id = JSON.parse(res) + expect(id).to.have.property('id') + nodeId = id.id - let ipfs - let ipfsd + return ipfs('add src/init-files/init-docs/readme') + }) + .then((out) => { + cidAdded = out.split(' ')[1] + done() + }) + }) + }) - let cidAdded - let nodeId - let keyId + after(function (done) { + if (ipfsd) { + ipfsd.stop(() => done()) + } else { + done() + } + }) - before(function (done) { - this.timeout(80 * 1000) + it('should publish correctly when the file was already added', function () { + this.timeout(60 * 1000) - df.spawn({ - exec: `./src/cli/bin.js`, - config: { - Bootstrap: [] - }, - args: ['--pass', passPhrase, '--local'], - initOptions: { bits: 512 } - }, (err, _ipfsd) => { - expect(err).to.not.exist() + return ipfs(`name publish ${cidAdded}`).then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded, nodeId])) + }) + }) - ipfsd = _ipfsd - ipfs = ipfsExec(_ipfsd.repoPath) + it('should publish and resolve an entry with the default options', function () { + this.timeout(60 * 1000) - ipfs(`${pass} key gen ${name} --type rsa --size 2048`).then((out) => { - expect(out).to.include(name) + return ipfs(`name publish ${cidAdded}`) + .then((res) => { + expect(res).to.exist() - keyId = out.split(' ')[1] + return ipfs('name resolve') + }) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded])) + }) + }) - ipfs('id').then((res) => { - const id = JSON.parse(res) + it('should publish correctly when the file was not added but resolve is disabled', function () { + this.timeout(60 * 1000) - expect(id).to.have.property('id') - nodeId = id.id + const notAddedCid = 'QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' - ipfs('add src/init-files/init-docs/readme').then((out) => { - cidAdded = out.split(' ')[1] - done() - }) - }) + return ipfs(`name publish ${notAddedCid} --resolve false`).then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([notAddedCid, nodeId])) }) }) - }) - after(function (done) { - if (ipfsd) { - ipfsd.stop(() => done()) - } else { - done() - } - }) + it('should not get the entry correctly if its validity time expired', function () { + this.timeout(60 * 1000) - it('should publish correctly when the file was already added', function (done) { - this.timeout(60 * 1000) + return ipfs(`name publish ${cidAdded} --lifetime 10ns`) + .then((res) => { + expect(res).to.exist() - ipfs(`name publish ${cidAdded}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeId])) + setTimeout(function () { + return ipfs('name resolve') + .then((res) => { + expect(res).to.not.exist() + }) + .catch((err) => { + expect(err).to.exist() + }) + }, 1) + }) + }) + + it('should publish correctly when a new key is used', function () { + this.timeout(60 * 1000) - done() + return ipfs(`name publish ${cidAdded} --key ${name}`).then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded, keyId])) + }) }) - }) - it('should publish and resolve an entry with the default options', function (done) { - this.timeout(60 * 1000) + it('should return the immediate pointing record, unless using the recursive parameter', function () { + this.timeout(60 * 1000) - ipfs(`name publish ${cidAdded}`).then((res) => { - expect(res).to.exist() + return ipfs(`name publish ${cidAdded}`) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded, nodeId])) - ipfs('name resolve').then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded])) + return ipfs(`name publish /ipns/${nodeId} --key ${name}`) + }) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([nodeId, keyId])) - done() - }) + return ipfs(`name resolve ${keyId}`) + }) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([nodeId])) + }) }) - }) - it('should publish correctly when the file was not added but resolve is disabled', function (done) { - this.timeout(60 * 1000) + it('should go recursively until finding an ipfs hash', function () { + this.timeout(60 * 1000) - const notAddedCid = 'QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' + return ipfs(`name publish ${cidAdded}`) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded, nodeId])) - ipfs(`name publish ${notAddedCid} --resolve false`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([notAddedCid, nodeId])) + return ipfs(`name publish /ipns/${nodeId} --key ${name}`) + }) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([nodeId, keyId])) - done() + return ipfs(`name resolve ${keyId} --recursive`) + }) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded])) + }) }) }) - it('should not get the entry correctly if its validity time expired', function (done) { - this.timeout(60 * 1000) + describe('using dht', () => { + const passPhrase = hat() + const pass = '--pass ' + passPhrase + const name = 'test-key-' + hat() + + let ipfs + let ipfsd + + let cidAdded + let nodeId + let keyId + + before(function (done) { + this.timeout(80 * 1000) - ipfs(`name publish ${cidAdded} --lifetime 10ns`).then((res) => { - expect(res).to.exist() + df.spawn({ + exec: `./src/cli/bin.js`, + config: { + Bootstrap: [] + }, + args: ['--pass', passPhrase, '--enable-dht-experiment'], + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() - setTimeout(function () { - ipfs('name resolve') + ipfsd = _ipfsd + ipfs = ipfsExec(_ipfsd.repoPath) + + ipfs(`${pass} key gen ${name} --type rsa --size 2048`) + .then((out) => { + expect(out).to.include(name) + keyId = out.split(' ')[1] + + return ipfs('id') + }) .then((res) => { - expect(res).to.not.exist() + const id = JSON.parse(res) + expect(id).to.have.property('id') + nodeId = id.id + + return ipfs('add src/init-files/init-docs/readme') }) - .catch((err) => { - expect(err).to.exist() + .then((out) => { + cidAdded = out.split(' ')[1] done() }) - }, 1) + }) }) - }) - - it('should publish correctly when a new key is used', function (done) { - this.timeout(60 * 1000) - ipfs(`name publish ${cidAdded} --key ${name}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, keyId])) - - done() + after(function (done) { + if (ipfsd) { + ipfsd.stop(() => done()) + } else { + done() + } }) - }) - it('should return the immediate pointing record, unless using the recursive parameter', function (done) { - this.timeout(60 * 1000) + it('should publish and resolve an entry with the default options', function () { + this.timeout(60 * 1000) - ipfs(`name publish ${cidAdded}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeId])) + return ipfs(`name publish ${cidAdded}`) + .then((res) => { + expect(res).to.exist() - ipfs(`name publish /ipns/${nodeId} --key ${name}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId, keyId])) + return ipfs('name resolve') + }) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded])) + }) + }) + + it('should not get the entry correctly if its validity time expired', function () { + this.timeout(60 * 1000) - ipfs(`name resolve ${keyId}`).then((res) => { + return ipfs(`name publish ${cidAdded} --lifetime 10ns`) + .then((res) => { expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId])) - done() + setTimeout(function () { + return ipfs('name resolve') + .then((res) => { + expect(res).to.not.exist() + }) + .catch((err) => { + expect(err).to.exist() + }) + }, 1) }) - }) }) - }) - - it('should go recursively until finding an ipfs hash', function (done) { - this.timeout(60 * 1000) - ipfs(`name publish ${cidAdded}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeId])) + it('should return the immediate pointing record, unless using the recursive parameter', function () { + this.timeout(60 * 1000) - ipfs(`name publish /ipns/${nodeId} --key ${name}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId, keyId])) + return ipfs(`name publish ${cidAdded}`) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([cidAdded, nodeId])) - ipfs(`name resolve ${keyId} --recursive`).then((res) => { + return ipfs(`name publish /ipns/${nodeId} --key ${name}`) + }) + .then((res) => { expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded])) + expect(res).to.satisfy(checkAll([nodeId, keyId])) - done() + return ipfs(`name resolve ${keyId}`) + }) + .then((res) => { + expect(res).to.exist() + expect(res).to.satisfy(checkAll([nodeId])) }) - }) }) }) }) diff --git a/test/core/name.js b/test/core/name.js index 08bb9b06da..7aa11ecdfe 100644 --- a/test/core/name.js +++ b/test/core/name.js @@ -10,6 +10,8 @@ chai.use(dirtyChai) const sinon = require('sinon') const fs = require('fs') +const parallel = require('async/parallel') +const series = require('async/series') const isNode = require('detect-node') const IPFS = require('../../src') @@ -21,6 +23,19 @@ const df = DaemonFactory.create({ type: 'proc' }) const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' +const publishAndResolve = (publisher, resolver, ipfsRef, publishOpts, nodeId, resolveOpts, callback) => { + series([ + (cb) => publisher.name.publish(ipfsRef, publishOpts, cb), + (cb) => resolver.name.resolve(nodeId, resolveOpts, cb) + ], (err, res) => { + expect(err).to.not.exist() + expect(res[0]).to.exist() + expect(res[1]).to.exist() + expect(res[1].path).to.equal(ipfsRef) + callback() + }) +} + describe('name', function () { if (!isNode) { return @@ -54,31 +69,16 @@ describe('name', function () { after((done) => ipfsd.stop(done)) it('should publish and then resolve correctly with the default options', function (done) { - node.name.publish(ipfsRef, { resolve: false }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - - node.name.resolve(nodeId, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.path).to.equal(ipfsRef) - done() - }) - }) + publishAndResolve(node, node, ipfsRef, { resolve: false }, nodeId, {}, done) }) it('should publish correctly with the lifetime option and resolve', function (done) { - node.name.publish(ipfsRef, { resolve: false, lifetime: '2h' }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() + const publishOpts = { + resolve: false, + lifetime: '2h' + } - node.name.resolve(nodeId, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.path).to.equal(ipfsRef) - done() - }) - }) + publishAndResolve(node, node, ipfsRef, publishOpts, nodeId, {}, done) }) it('should not get the entry correctly if its validity time expired', function (done) { @@ -101,20 +101,15 @@ describe('name', function () { node.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) { expect(err).to.not.exist() - - node.name.publish(ipfsRef, { resolve: false }, (err) => { + series([ + (cb) => node.name.publish(ipfsRef, { resolve: false }, cb), + (cb) => node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, cb), + (cb) => node.name.resolve(key.id, { recursive: true }, cb) + ], (err, res) => { expect(err).to.not.exist() - - node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, (err) => { - expect(err).to.not.exist() - - node.name.resolve(key.id, { recursive: true }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.path).to.equal(ipfsRef) - done() - }) - }) + expect(res[2]).to.exist() + expect(res[2].path).to.equal(ipfsRef) + done() }) }) }) @@ -125,20 +120,15 @@ describe('name', function () { node.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) { expect(err).to.not.exist() - - node.name.publish(ipfsRef, { resolve: false }, (err) => { + series([ + (cb) => node.name.publish(ipfsRef, { resolve: false }, cb), + (cb) => node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, cb), + (cb) => node.name.resolve(key.id, cb) + ], (err, res) => { expect(err).to.not.exist() - - node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, (err) => { - expect(err).to.not.exist() - - node.name.resolve(key.id, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.path).to.equal(`/ipns/${nodeId}`) - done() - }) - }) + expect(res[2]).to.exist() + expect(res[2].path).to.equal(`/ipns/${nodeId}`) + done() }) }) }) @@ -197,6 +187,78 @@ describe('name', function () { }) }) + describe('work with dht', () => { + let nodes + let nodeA + let nodeB + let nodeC + let idA + + const createNode = (callback) => { + df.spawn({ + exec: IPFS, + args: [`--pass ${hat()}`, '--enable-dht-experiment'], + config: { Bootstrap: [] } + }, callback) + } + + before(function (done) { + this.timeout(40 * 1000) + + parallel([ + (cb) => createNode(cb), + (cb) => createNode(cb), + (cb) => createNode(cb) + ], (err, _nodes) => { + expect(err).to.not.exist() + + nodes = _nodes + nodeA = _nodes[0].api + nodeB = _nodes[1].api + nodeC = _nodes[2].api + + parallel([ + (cb) => nodeA.id(cb), + (cb) => nodeB.id(cb) + ], (err, ids) => { + expect(err).to.not.exist() + + idA = ids[0] + parallel([ + (cb) => nodeC.swarm.connect(ids[0].addresses[0], cb), // C => A + (cb) => nodeC.swarm.connect(ids[1].addresses[0], cb) // C => B + ], done) + }) + }) + }) + + after((done) => parallel(nodes.map((node) => (cb) => node.stop(cb)), done)) + + it('should publish and then resolve correctly with the default options', function (done) { + this.timeout(50 * 1000) + publishAndResolve(nodeA, nodeB, ipfsRef, { resolve: false }, idA.id, {}, done) + }) + + it('should recursively resolve to an IPFS hash', function (done) { + this.timeout(80 * 1000) + const keyName = hat() + + nodeA.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) { + expect(err).to.not.exist() + series([ + (cb) => nodeA.name.publish(ipfsRef, { resolve: false }, cb), + (cb) => nodeA.name.publish(`/ipns/${idA.id}`, { resolve: false, key: keyName }, cb), + (cb) => nodeB.name.resolve(key.id, { recursive: true }, cb) + ], (err, res) => { + expect(err).to.not.exist() + expect(res[2]).to.exist() + expect(res[2].path).to.equal(ipfsRef) + done() + }) + }) + }) + }) + describe('errors', function () { if (!isNode) { return @@ -354,7 +416,7 @@ describe('name', function () { node.name.resolve(nodeId, { nocache: true }, (err, res) => { expect(err).to.exist() - expect(err.code).to.equal('ERR_INVALID_RECORD_RECEIVED') + expect(err.code).to.equal('ERR_INVALID_PUB_KEY_RECEIVED') stub.restore() done() })