diff --git a/package.json b/package.json index d29278f6de..2ff7382cb0 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,6 @@ "main": "src/core/index.js", "browser": { "./src/core/runtime/init-assets-nodejs.js": "./src/core/runtime/init-assets-browser.js", - "./src/core/runtime/add-from-fs-nodejs.js": "./src/core/runtime/add-from-fs-browser.js", "./src/core/runtime/config-nodejs.js": "./src/core/runtime/config-browser.js", "./src/core/runtime/dns-nodejs.js": "./src/core/runtime/dns-browser.js", "./src/core/runtime/libp2p-nodejs.js": "./src/core/runtime/libp2p-browser.js", @@ -96,7 +95,6 @@ "glob": "^7.1.3", "hapi-pino": "^6.1.0", "hashlru": "^2.3.0", - "human-to-milliseconds": "^2.0.0", "interface-datastore": "~0.8.0", "ipfs-bitswap": "^0.26.0", "ipfs-block": "~0.8.1", @@ -162,6 +160,7 @@ "node-fetch": "^2.3.0", "p-iteration": "^1.1.8", "p-queue": "^6.1.0", + "parse-duration": "^0.1.2", "peer-book": "^0.9.1", "peer-id": "~0.12.2", "peer-info": "~0.15.1", diff --git a/src/core/components/add/index.js b/src/core/components/add/index.js index ee655817e1..872ffbacd6 100644 --- a/src/core/components/add/index.js +++ b/src/core/components/add/index.js @@ -5,7 +5,7 @@ const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') const { parseChunkerString } = require('./utils') const pipe = require('it-pipe') -module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { +module.exports = ({ ipld, dag, gcLock, preload, pin, options: constructorOptions }) => { const isShardingEnabled = constructorOptions.EXPERIMENTAL && constructorOptions.EXPERIMENTAL.sharding return async function * add (source, options) { options = options || {} diff --git a/src/core/components/files-regular/cat-async-iterator.js b/src/core/components/cat.js similarity index 64% rename from src/core/components/files-regular/cat-async-iterator.js rename to src/core/components/cat.js index 6b7f1af116..14a85978d4 100644 --- a/src/core/components/files-regular/cat-async-iterator.js +++ b/src/core/components/cat.js @@ -1,20 +1,20 @@ 'use strict' const exporter = require('ipfs-unixfs-exporter') -const { normalizePath } = require('./utils') +const { normalizeCidPath } = require('../utils') -module.exports = function (self) { - return async function * catAsyncIterator (ipfsPath, options) { +module.exports = function ({ ipld, preload }) { + return async function * cat (ipfsPath, options) { options = options || {} - ipfsPath = normalizePath(ipfsPath) + ipfsPath = normalizeCidPath(ipfsPath) if (options.preload !== false) { const pathComponents = ipfsPath.split('/') - self._preload(pathComponents[0]) + preload(pathComponents[0]) } - const file = await exporter(ipfsPath, self._ipld, options) + const file = await exporter(ipfsPath, ipld, options) // File may not have unixfs prop if small & imported with rawLeaves true if (file.unixfs && file.unixfs.type.includes('dir')) { diff --git a/src/core/components/dns.js b/src/core/components/dns.js index 380be30329..3769d4f14e 100644 --- a/src/core/components/dns.js +++ b/src/core/components/dns.js @@ -2,7 +2,6 @@ // dns-nodejs gets replaced by dns-browser when webpacked/browserified const dns = require('../runtime/dns-nodejs') -const callbackify = require('callbackify') function fqdnFixups (domain) { // Allow resolution of .eth names via .eth.link @@ -14,7 +13,7 @@ function fqdnFixups (domain) { } module.exports = () => { - return callbackify.variadic(async (domain, opts) => { // eslint-disable-line require-await + return async (domain, opts) => { // eslint-disable-line require-await opts = opts || {} if (typeof domain !== 'string') { @@ -24,5 +23,5 @@ module.exports = () => { domain = fqdnFixups(domain) return dns(domain, opts) - }) + } } diff --git a/src/core/components/files-regular/add-async-iterator.js b/src/core/components/files-regular/add-async-iterator.js deleted file mode 100644 index f69d7268f6..0000000000 --- a/src/core/components/files-regular/add-async-iterator.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' - -const createAdd = require('../../components-ipfsx/add') - -module.exports = function (self) { - const { - _ipld: ipld, - dag, - _gcLock: gcLock, - _preload: preload, - pin, - _options: config - } = self - - const add = createAdd({ ipld, dag, gcLock, preload, pin, config }) - - return async function * addAsyncIterator (source, options) { - options = options || {} - - for await (const file of add(source, options)) { - yield { hash: file.cid.toString(), ...file } - } - } -} diff --git a/src/core/components/files-regular/add-from-fs.js b/src/core/components/files-regular/add-from-fs.js deleted file mode 100644 index 409c4e8d7e..0000000000 --- a/src/core/components/files-regular/add-from-fs.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = (self) => require('../../runtime/add-from-fs-nodejs')(self) diff --git a/src/core/components/files-regular/add-from-stream.js b/src/core/components/files-regular/add-from-stream.js deleted file mode 100644 index 6925dce2ba..0000000000 --- a/src/core/components/files-regular/add-from-stream.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = self => require('./add')(self) diff --git a/src/core/components/files-regular/add-from-url.js b/src/core/components/files-regular/add-from-url.js deleted file mode 100644 index bc12850cbe..0000000000 --- a/src/core/components/files-regular/add-from-url.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -const { URL } = require('iso-url') -const nodeify = require('promise-nodeify') -const { default: ky } = require('ky-universal') - -module.exports = (ipfs) => { - const addFromURL = async (url, opts) => { - opts = opts || {} - const res = await ky.get(url) - const path = decodeURIComponent(new URL(res.url).pathname.split('/').pop()) - const content = Buffer.from(await res.arrayBuffer()) - return ipfs.add({ content, path }, opts) - } - - return (name, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(addFromURL(name, opts), cb) - } -} diff --git a/src/core/components/files-regular/add-pull-stream.js b/src/core/components/files-regular/add-pull-stream.js deleted file mode 100644 index e3c1519531..0000000000 --- a/src/core/components/files-regular/add-pull-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function addPullStream (options) { - return toPullStream.transform((source) => { - return self._addAsyncIterator(source, options) - }) - } -} diff --git a/src/core/components/files-regular/add-readable-stream.js b/src/core/components/files-regular/add-readable-stream.js deleted file mode 100644 index 914a1fd9fb..0000000000 --- a/src/core/components/files-regular/add-readable-stream.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function addReadableStream (options) { - return toStream.transform(source => { - return self._addAsyncIterator(source, options) - }, { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/add.js b/src/core/components/files-regular/add.js deleted file mode 100644 index dcf9e7bf52..0000000000 --- a/src/core/components/files-regular/add.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict' - -const all = require('async-iterator-all') - -module.exports = function (self) { - // can't use callbackify because if `data` is a pull stream - // it thinks we are passing a callback. This is why we can't have nice things. - return function add (data, options, callback) { - if (!callback && typeof options === 'function') { - callback = options - options = {} - } - - const result = all(self._addAsyncIterator(data, options)) - - if (!callback) { - return result - } - - result.then((result) => callback(null, result), callback) - } -} diff --git a/src/core/components/files-regular/cat-pull-stream.js b/src/core/components/files-regular/cat-pull-stream.js deleted file mode 100644 index 5ad3f79411..0000000000 --- a/src/core/components/files-regular/cat-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function catPullStream (ipfsPath, options) { - return toPullStream.source(self._catAsyncIterator(ipfsPath, options)) - } -} diff --git a/src/core/components/files-regular/cat-readable-stream.js b/src/core/components/files-regular/cat-readable-stream.js deleted file mode 100644 index 70df087f16..0000000000 --- a/src/core/components/files-regular/cat-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function catReadableStream (ipfsPath, options) { - return toStream.readable(self._catAsyncIterator(ipfsPath, options), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/cat.js b/src/core/components/files-regular/cat.js deleted file mode 100644 index 656946ad03..0000000000 --- a/src/core/components/files-regular/cat.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function cat (ipfsPath, options) { - return Buffer.concat(await all(self._catAsyncIterator(ipfsPath, options))) - }) -} diff --git a/src/core/components/files-regular/get-pull-stream.js b/src/core/components/files-regular/get-pull-stream.js deleted file mode 100644 index fa769aa89c..0000000000 --- a/src/core/components/files-regular/get-pull-stream.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') -const pull = require('pull-stream/pull') -const map = require('pull-stream/throughs/map') - -module.exports = function (self) { - return function getPullStream (ipfsPath, options) { - return pull( - toPullStream.source(self._getAsyncIterator(ipfsPath, options)), - map(file => { - if (file.content) { - file.content = toPullStream.source(file.content()) - } - - return file - }) - ) - } -} diff --git a/src/core/components/files-regular/get-readable-stream.js b/src/core/components/files-regular/get-readable-stream.js deleted file mode 100644 index 3b6b78b3dd..0000000000 --- a/src/core/components/files-regular/get-readable-stream.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function getReadableStream (ipfsPath, options) { - return toStream.readable((async function * mapStreamFileContents () { - for await (const file of self._getAsyncIterator(ipfsPath, options)) { - if (file.content) { - file.content = toStream.readable(file.content()) - } - - yield file - } - })(), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/get.js b/src/core/components/files-regular/get.js deleted file mode 100644 index 58e0434dfc..0000000000 --- a/src/core/components/files-regular/get.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function get (ipfsPath, options) { // eslint-disable-line require-await - return all(async function * () { - for await (const file of self._getAsyncIterator(ipfsPath, options)) { - if (file.content) { - file.content = Buffer.concat(await all(file.content())) - } - - yield file - } - }()) - }) -} diff --git a/src/core/components/files-regular/index.js b/src/core/components/files-regular/index.js deleted file mode 100644 index 8bff57d331..0000000000 --- a/src/core/components/files-regular/index.js +++ /dev/null @@ -1,34 +0,0 @@ -'use strict' - -module.exports = (self) => { - const filesRegular = { - add: require('./add')(self), - addFromFs: require('./add-from-fs')(self), - addFromStream: require('./add-from-stream')(self), - addFromURL: require('./add-from-url')(self), - addPullStream: require('./add-pull-stream')(self), - addReadableStream: require('./add-readable-stream')(self), - _addAsyncIterator: require('./add-async-iterator')(self), - cat: require('./cat')(self), - catPullStream: require('./cat-pull-stream')(self), - catReadableStream: require('./cat-readable-stream')(self), - _catAsyncIterator: require('./cat-async-iterator')(self), - get: require('./get')(self), - getPullStream: require('./get-pull-stream')(self), - getReadableStream: require('./get-readable-stream')(self), - _getAsyncIterator: require('./get-async-iterator')(self), - ls: require('./ls')(self), - lsPullStream: require('./ls-pull-stream')(self), - lsReadableStream: require('./ls-readable-stream')(self), - _lsAsyncIterator: require('./ls-async-iterator')(self), - refs: require('./refs')(self), - refsReadableStream: require('./refs-readable-stream')(self), - refsPullStream: require('./refs-pull-stream')(self), - _refsAsyncIterator: require('./refs-async-iterator')(self) - } - filesRegular.refs.local = require('./refs-local')(self) - filesRegular.refs.localReadableStream = require('./refs-local-readable-stream')(self) - filesRegular.refs.localPullStream = require('./refs-local-pull-stream')(self) - filesRegular.refs._localAsyncIterator = require('./refs-local-async-iterator')(self) - return filesRegular -} diff --git a/src/core/components/files-regular/ls-pull-stream.js b/src/core/components/files-regular/ls-pull-stream.js deleted file mode 100644 index 3cf5a6cd74..0000000000 --- a/src/core/components/files-regular/ls-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function lsPullStream (ipfsPath, options) { - return toPullStream.source(self._lsAsyncIterator(ipfsPath, options)) - } -} diff --git a/src/core/components/files-regular/ls-readable-stream.js b/src/core/components/files-regular/ls-readable-stream.js deleted file mode 100644 index 794095f752..0000000000 --- a/src/core/components/files-regular/ls-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function lsReadableStream (ipfsPath, options) { - return toStream.readable(self._lsAsyncIterator(ipfsPath, options), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/ls.js b/src/core/components/files-regular/ls.js deleted file mode 100644 index 9ae4a71a97..0000000000 --- a/src/core/components/files-regular/ls.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function ls (ipfsPath, options) { // eslint-disable-line require-await - return all(self._lsAsyncIterator(ipfsPath, options)) - }) -} diff --git a/src/core/components/files-regular/refs-local-pull-stream.js b/src/core/components/files-regular/refs-local-pull-stream.js deleted file mode 100644 index 1bb02cec51..0000000000 --- a/src/core/components/files-regular/refs-local-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function refsLocalPullStream () { - return toPullStream.source(self.refs._localAsyncIterator()) - } -} diff --git a/src/core/components/files-regular/refs-local-readable-stream.js b/src/core/components/files-regular/refs-local-readable-stream.js deleted file mode 100644 index f66920ef51..0000000000 --- a/src/core/components/files-regular/refs-local-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function refsLocalReadableStream () { - return toStream.readable(self.refs._localAsyncIterator(), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/refs-local.js b/src/core/components/files-regular/refs-local.js deleted file mode 100644 index 799b384e30..0000000000 --- a/src/core/components/files-regular/refs-local.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function refsLocal (ipfsPath, options) { // eslint-disable-line require-await - return all(self.refs._localAsyncIterator(ipfsPath, options)) - }) -} diff --git a/src/core/components/files-regular/refs-pull-stream.js b/src/core/components/files-regular/refs-pull-stream.js deleted file mode 100644 index 4d10812c37..0000000000 --- a/src/core/components/files-regular/refs-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function refsPullStream (ipfsPath, options) { - return toPullStream.source(self._refsAsyncIterator(ipfsPath, options)) - } -} diff --git a/src/core/components/files-regular/refs-readable-stream.js b/src/core/components/files-regular/refs-readable-stream.js deleted file mode 100644 index 509e65a508..0000000000 --- a/src/core/components/files-regular/refs-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function refsReadableStream (ipfsPath, options) { - return toStream.readable(self._refsAsyncIterator(ipfsPath, options), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/refs.js b/src/core/components/files-regular/refs.js deleted file mode 100644 index 4876457606..0000000000 --- a/src/core/components/files-regular/refs.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function refs (ipfsPath, options) { // eslint-disable-line require-await - return all(self._refsAsyncIterator(ipfsPath, options)) - }) -} - -// Preset format strings -module.exports.Format = { - default: '', - edges: ' -> ' -} diff --git a/src/core/components/files-regular/utils.js b/src/core/components/files-regular/utils.js deleted file mode 100644 index 0a5c8a57ba..0000000000 --- a/src/core/components/files-regular/utils.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const CID = require('cids') -const { Buffer } = require('buffer') -const { cidToString } = require('../../../utils/cid') - -const normalizePath = (path) => { - if (Buffer.isBuffer(path)) { - return new CID(path).toString() - } - if (CID.isCID(path)) { - return path.toString() - } - if (path.indexOf('/ipfs/') === 0) { - path = path.substring('/ipfs/'.length) - } - if (path.charAt(path.length - 1) === '/') { - path = path.substring(0, path.length - 1) - } - return path -} - -const mapFile = (file, options) => { - options = options || {} - - let size = 0 - let type = 'dir' - - if (file.unixfs && file.unixfs.type === 'file') { - size = file.unixfs.fileSize() - type = 'file' - } - - const output = { - hash: cidToString(file.cid, { base: options.cidBase }), - path: file.path, - name: file.name, - depth: file.path.split('/').length, - size, - type - } - - if (options.includeContent && file.unixfs && file.unixfs.type === 'file') { - output.content = file.content - } - - return output -} - -module.exports = { - normalizePath, - mapFile -} diff --git a/src/core/components/files-regular/get-async-iterator.js b/src/core/components/get.js similarity index 53% rename from src/core/components/files-regular/get-async-iterator.js rename to src/core/components/get.js index b9ad234f4b..4872a7e7f5 100644 --- a/src/core/components/files-regular/get-async-iterator.js +++ b/src/core/components/get.js @@ -2,25 +2,25 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') -const { normalizePath, mapFile } = require('./utils') +const { normalizeCidPath, mapFile } = require('../utils') -module.exports = function (self) { - return async function * getAsyncIterator (ipfsPath, options) { +module.exports = function ({ ipld, preload }) { + return async function * get (ipfsPath, options) { options = options || {} if (options.preload !== false) { let pathComponents try { - pathComponents = normalizePath(ipfsPath).split('/') + pathComponents = normalizeCidPath(ipfsPath).split('/') } catch (err) { throw errCode(err, 'ERR_INVALID_PATH') } - self._preload(pathComponents[0]) + preload(pathComponents[0]) } - for await (const file of exporter.recursive(ipfsPath, self._ipld, options)) { + for await (const file of exporter.recursive(ipfsPath, ipld, options)) { yield mapFile(file, { ...options, includeContent: true diff --git a/src/core/components/index.js b/src/core/components/index.js index 1abb7f4499..809212bc10 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -1,14 +1,28 @@ 'use strict' exports.add = require('./add') +exports.cat = require('./cat') exports.bitswap = { stat: require('./bitswap/stat'), unwant: require('./bitswap/unwant'), wantlist: require('./bitswap/wantlist') } exports.config = require('./config') +exports.dns = require('./dns') +exports.get = require('./get') exports.id = require('./id') exports.init = require('./init') +exports.isOnline = require('./is-online') +exports.ls = require('./ls') +exports.name = { + publish: require('./name/publish'), + pubsub: { + cancel: require('./name/pubsub/cancel'), + state: require('./name/pubsub/state'), + subs: require('./name/pubsub/subs') + }, + resolve: require('./name/resolve') +} exports.object = { data: require('./object/data'), get: require('./object/get'), @@ -26,6 +40,9 @@ exports.object = { exports.ping = require('./ping') exports.start = require('./start') exports.stop = require('./stop') +exports.refs = require('./refs') +exports.refs.local = require('./refs/local') +exports.resolve = require('./resolve') exports.version = require('./version') exports.legacy = { // TODO: these will be removed as the new API is completed diff --git a/src/core/components/init.js b/src/core/components/init.js index e5b6788f63..fd49dabc5d 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -28,12 +28,12 @@ const IPNS = require('../ipns') const OfflineDatastore = require('../ipns/routing/offline-datastore') const initAssets = require('../runtime/init-assets-nodejs') const PinManager = require('./pin/pin-manager') -const Commands = require('./') +const Components = require('./') module.exports = ({ apiManager, print, - constructorOptions + options: constructorOptions }) => async function init (options) { const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) @@ -101,27 +101,27 @@ module.exports = ({ morticeId: repo.path }) - const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) const object = { - data: Commands.object.data({ ipld, preload }), - get: Commands.object.get({ ipld, preload }), - links: Commands.object.links({ dag }), - new: Commands.object.new({ ipld, preload }), + data: Components.object.data({ ipld, preload }), + get: Components.object.get({ ipld, preload }), + links: Components.object.links({ dag }), + new: Components.object.new({ ipld, preload }), patch: { - addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), - appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), - rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), - setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Components.object.patch.setData({ ipld, gcLock, preload }) }, - put: Commands.object.put({ ipld, gcLock, preload }), - stat: Commands.object.stat({ ipld, preload }) + put: Components.object.put({ ipld, gcLock, preload }), + stat: Components.object.stat({ ipld, preload }) } const pinManager = new PinManager(repo, dag) await pinManager.load() - const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) - const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, options: constructorOptions }) if (!isInitialized && !options.emptyRepo) { // add empty unixfs dir object (go-ipfs assumes this exists) @@ -297,29 +297,37 @@ function createApi ({ print, repo }) { - const start = Commands.start({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo - }) + const refs = () => { throw new NotStartedError() } + refs.local = Components.refs.local({ repo }) const api = { add, - config: Commands.config({ repo }), - id: Commands.id({ peerInfo }), + cat: Components.cat({ ipld, preload }), + config: Components.config({ repo }), + dns: Components.dns(), + get: Components.get({ ipld, preload }), + id: Components.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, + isOnline: Components.isOnline({}), + ls: Components.ls({ ipld, preload }), object, - start, - version: Commands.version({ repo }) + refs, + start: Components.start({ + apiManager, + options: constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }), + stop: () => apiManager.api, + version: Components.version({ repo }) } return api diff --git a/src/core/components/is-online.js b/src/core/components/is-online.js index 68abdebe61..3aad832f57 100644 --- a/src/core/components/is-online.js +++ b/src/core/components/is-online.js @@ -1,7 +1,5 @@ 'use strict' -module.exports = function isOnline (self) { - return () => { - return Boolean(self._bitswap && self.libp2p && self.libp2p.isStarted()) - } +module.exports = ({ libp2p }) => { + return () => Boolean(libp2p && libp2p.isStarted()) } diff --git a/src/core/components/files-regular/ls-async-iterator.js b/src/core/components/ls.js similarity index 71% rename from src/core/components/files-regular/ls-async-iterator.js rename to src/core/components/ls.js index 34777a523e..df3e0596dc 100644 --- a/src/core/components/files-regular/ls-async-iterator.js +++ b/src/core/components/ls.js @@ -2,21 +2,21 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') -const { normalizePath, mapFile } = require('./utils') +const { normalizeCidPath, mapFile } = require('../utils') -module.exports = function (self) { - return async function * lsAsyncIterator (ipfsPath, options) { +module.exports = function ({ ipld, preload }) { + return async function * ls (ipfsPath, options) { options = options || {} - const path = normalizePath(ipfsPath) + const path = normalizeCidPath(ipfsPath) const recursive = options.recursive const pathComponents = path.split('/') if (options.preload !== false) { - self._preload(pathComponents[0]) + preload(pathComponents[0]) } - const file = await exporter(ipfsPath, self._ipld, options) + const file = await exporter(ipfsPath, ipld, options) if (!file.unixfs) { throw errCode(new Error('dag node was not a UnixFS node'), 'ERR_NOT_UNIXFS') @@ -28,7 +28,7 @@ module.exports = function (self) { if (file.unixfs.type.includes('dir')) { if (recursive) { - for await (const child of exporter.recursive(file.cid, self._ipld, options)) { + for await (const child of exporter.recursive(file.cid, ipld, options)) { if (file.cid.toBaseEncodedString() === child.cid.toBaseEncodedString()) { continue } diff --git a/src/core/components/name-pubsub.js b/src/core/components/name-pubsub.js deleted file mode 100644 index 4fc4775713..0000000000 --- a/src/core/components/name-pubsub.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict' - -const debug = require('debug') -const errcode = require('err-code') -const callbackify = require('callbackify') - -const IpnsPubsubDatastore = require('../ipns/routing/pubsub-datastore') - -const log = debug('ipfs:name-pubsub') -log.error = debug('ipfs:name-pubsub:error') - -// Is pubsub enabled -const isNamePubsubEnabled = (node) => { - try { - return Boolean(getPubsubRouting(node)) - } catch (err) { - return false - } -} - -// Get pubsub from IPNS routing -const getPubsubRouting = (node) => { - if (!node._ipns || !node._options.EXPERIMENTAL.ipnsPubsub) { - throw errcode(new Error('IPNS pubsub subsystem is not enabled'), 'ERR_IPNS_PUBSUB_NOT_ENABLED') - } - - // Only one store and it is pubsub - if (IpnsPubsubDatastore.isIpnsPubsubDatastore(node._ipns.routing)) { - return node._ipns.routing - } - - // Find in tiered - const pubsub = (node._ipns.routing.stores || []).find(s => IpnsPubsubDatastore.isIpnsPubsubDatastore(s)) - - if (!pubsub) { - throw errcode(new Error('IPNS pubsub datastore not found'), 'ERR_PUBSUB_DATASTORE_NOT_FOUND') - } - - return pubsub -} - -module.exports = function namePubsub (self) { - return { - /** - * Query the state of IPNS pubsub. - * - * @returns {Promise|void} - */ - state: callbackify(async () => { // eslint-disable-line require-await - return { - enabled: isNamePubsubEnabled(self) - } - }), - /** - * Cancel a name subscription. - * - * @param {String} name subscription name. - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - cancel: callbackify(async (name) => { // eslint-disable-line require-await - const pubsub = getPubsubRouting(self) - - return pubsub.cancel(name) - }), - /** - * Show current name subscriptions. - * - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - subs: callbackify(async () => { // eslint-disable-line require-await - const pubsub = getPubsubRouting(self) - - return pubsub.getSubscriptions() - }) - } -} diff --git a/src/core/components/name.js b/src/core/components/name.js deleted file mode 100644 index 96614eda4d..0000000000 --- a/src/core/components/name.js +++ /dev/null @@ -1,179 +0,0 @@ -'use strict' - -const debug = require('debug') -const callbackify = require('callbackify') -const human = require('human-to-milliseconds') -const crypto = require('libp2p-crypto') -const errcode = require('err-code') -const mergeOptions = require('merge-options') -const CID = require('cids') -const isDomain = require('is-domain-name') -const promisify = require('promisify-es6') - -const log = debug('ipfs:name') -log.error = debug('ipfs:name:error') - -const namePubsub = require('./name-pubsub') -const utils = require('../utils') -const path = require('../ipns/path') - -const keyLookup = async (ipfsNode, kname) => { - if (kname === 'self') { - return ipfsNode._peerInfo.id.privKey - } - - try { - const pass = ipfsNode._options.pass - const pem = await ipfsNode._keychain.exportKey(kname, pass) - const privateKey = await promisify(crypto.keys.import.bind(crypto.keys))(pem, pass) - - return privateKey - } catch (err) { - log.error(err) - - throw errcode(err, 'ERR_CANNOT_GET_KEY') - } -} - -const appendRemainder = async (result, remainder) => { - result = await result - - if (remainder.length) { - return result + '/' + remainder.join('/') - } - - return result -} - -/** - * @typedef { import("../index") } IPFS - */ - -/** - * IPNS - Inter-Planetary Naming System - * - * @param {IPFS} self - * @returns {Object} - */ -module.exports = function name (self) { - return { - /** - * IPNS is a PKI namespace, where names are the hashes of public keys, and - * the private key enables publishing new (signed) values. In both publish - * and resolve, the default name used is the node's own PeerID, - * which is the hash of its public key. - * - * @param {String} value ipfs path of the object to be published. - * @param {Object} options ipfs publish options. - * @param {boolean} options.resolve resolve given path before publishing. - * @param {String} options.lifetime time duration that the record will be valid for. - This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are - "ns", "ms", "s", "m", "h". Default is 24h. - * @param {String} options.ttl time duration this record should be cached for (NOT IMPLEMENTED YET). - * This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are - "ns", "ms", "s", "m", "h" (caution: experimental). - * @param {String} options.key name of the key to be used, as listed by 'ipfs key list -l'. - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - publish: callbackify.variadic(async (value, options) => { - options = options || {} - - const resolve = !(options.resolve === false) - const lifetime = options.lifetime || '24h' - const key = options.key || 'self' - - if (!self.isOnline()) { - throw errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR') - } - - // TODO: params related logic should be in the core implementation - - // Normalize path value - try { - value = utils.normalizePath(value) - } catch (err) { - log.error(err) - - throw err - } - - let pubLifetime - try { - pubLifetime = human(lifetime) - - // Calculate lifetime with nanoseconds precision - pubLifetime = pubLifetime.toFixed(6) - } catch (err) { - log.error(err) - - throw err - } - - // TODO: ttl human for cache - const results = await Promise.all([ - // verify if the path exists, if not, an error will stop the execution - keyLookup(self, key), - resolve.toString() === 'true' ? path.resolvePath(self, value) : Promise.resolve() - ]) - - // Start publishing process - return self._ipns.publish(results[0], value, pubLifetime) - }), - - /** - * Given a key, query the DHT for its best value. - * - * @param {String} name ipns name to resolve. Defaults to your node's peerID. - * @param {Object} options ipfs resolve options. - * @param {boolean} options.nocache do not use cached entries. - * @param {boolean} options.recursive resolve until the result is not an IPNS name. - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - resolve: callbackify.variadic(async (name, options) => { // eslint-disable-line require-await - options = mergeOptions({ - nocache: false, - recursive: true - }, options || {}) - - const offline = self._options.offline - - // TODO: params related logic should be in the core implementation - if (offline && options.nocache) { - throw errcode(new Error('cannot specify both offline and nocache'), 'ERR_NOCACHE_AND_OFFLINE') - } - - // Set node id as name for being resolved, if it is not received - if (!name) { - name = self._peerInfo.id.toB58String() - } - - if (!name.startsWith('/ipns/')) { - name = `/ipns/${name}` - } - - const [namespace, hash, ...remainder] = name.slice(1).split('/') - try { - new CID(hash) // eslint-disable-line no-new - } catch (err) { - // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns - if (isDomain(hash)) { - return appendRemainder(self.dns(hash, options), remainder) - } - - log.error(err) - throw errcode(new Error('Invalid IPNS name'), 'ERR_IPNS_INVALID_NAME') - } - - // multihash is valid lets resolve with IPNS - // IPNS resolve needs a online daemon - if (!self.isOnline() && !offline) { - throw errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR') - } - - return appendRemainder(self._ipns.resolve(`/${namespace}/${hash}`, options), remainder) - }), - pubsub: namePubsub(self) - } -} diff --git a/src/core/components/name/publish.js b/src/core/components/name/publish.js new file mode 100644 index 0000000000..ae1b00b40b --- /dev/null +++ b/src/core/components/name/publish.js @@ -0,0 +1,102 @@ +'use strict' + +const debug = require('debug') +const parseDuration = require('parse-duration') +const crypto = require('libp2p-crypto') +const errcode = require('err-code') + +const log = debug('ipfs:name:publish') +log.error = debug('ipfs:name:publish:error') + +const { OFFLINE_ERROR, normalizePath } = require('../utils') +const { resolvePath } = require('./utils') + +/** + * @typedef { import("../index") } IPFS + */ + +/** + * IPNS - Inter-Planetary Naming System + * + * @param {IPFS} self + * @returns {Object} + */ +module.exports = ({ ipns, dag, peerInfo, isOnline, keychain, options: constructorOptions }) => { + const lookupKey = async keyName => { + if (keyName === 'self') { + return peerInfo.id.privKey + } + + try { + const pass = constructorOptions.pass + const pem = await keychain.exportKey(keyName, pass) + const privateKey = await crypto.keys.import(pem, pass) + return privateKey + } catch (err) { + log.error(err) + throw errcode(err, 'ERR_CANNOT_GET_KEY') + } + } + + /** + * IPNS is a PKI namespace, where names are the hashes of public keys, and + * the private key enables publishing new (signed) values. In both publish + * and resolve, the default name used is the node's own PeerID, + * which is the hash of its public key. + * + * @param {String} value ipfs path of the object to be published. + * @param {Object} options ipfs publish options. + * @param {boolean} options.resolve resolve given path before publishing. + * @param {String} options.lifetime time duration that the record will be valid for. + This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are + "ns", "ms", "s", "m", "h". Default is 24h. + * @param {String} options.ttl time duration this record should be cached for (NOT IMPLEMENTED YET). + * This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are + "ns", "ms", "s", "m", "h" (caution: experimental). + * @param {String} options.key name of the key to be used, as listed by 'ipfs key list -l'. + * @param {function(Error)} [callback] + * @returns {Promise|void} + */ + return async function publish (value, options) { + options = options || {} + + const resolve = !(options.resolve === false) + const lifetime = options.lifetime || '24h' + const key = options.key || 'self' + + if (!isOnline()) { + throw errcode(new Error(OFFLINE_ERROR), 'OFFLINE_ERROR') + } + + // TODO: params related logic should be in the core implementation + + // Normalize path value + try { + value = normalizePath(value) + } catch (err) { + log.error(err) + throw err + } + + let pubLifetime + try { + pubLifetime = parseDuration(lifetime) + + // Calculate lifetime with nanoseconds precision + pubLifetime = pubLifetime.toFixed(6) + } catch (err) { + log.error(err) + throw err + } + + // TODO: ttl human for cache + const results = await Promise.all([ + // verify if the path exists, if not, an error will stop the execution + lookupKey(key), + resolve ? resolvePath({ ipns, dag }, value) : Promise.resolve() + ]) + + // Start publishing process + return ipns.publish(results[0], value, pubLifetime) + } +} diff --git a/src/core/components/name/pubsub/cancel.js b/src/core/components/name/pubsub/cancel.js new file mode 100644 index 0000000000..b0ce98c6ef --- /dev/null +++ b/src/core/components/name/pubsub/cancel.js @@ -0,0 +1,17 @@ +'use strict' + +const { getPubsubRouting } = require('./utils') + +module.exports = ({ ipns, options: constructorOptions }) => { + /** + * Cancel a name subscription. + * + * @param {String} name subscription name. + * @param {function(Error)} [callback] + * @returns {Promise<{ canceled: boolean }>} + */ + return function cancel (name) { + const pubsub = getPubsubRouting(ipns, constructorOptions) + return pubsub.cancel(name) + } +} diff --git a/src/core/components/name/pubsub/state.js b/src/core/components/name/pubsub/state.js new file mode 100644 index 0000000000..83033c7875 --- /dev/null +++ b/src/core/components/name/pubsub/state.js @@ -0,0 +1,18 @@ +'use strict' + +const { getPubsubRouting } = require('./utils') + +module.exports = ({ ipns, options: constructorOptions }) => { + /** + * Query the state of IPNS pubsub. + * + * @returns {Promise} + */ + return async function state () { // eslint-disable-line require-await + try { + return { enabled: Boolean(getPubsubRouting(ipns, constructorOptions)) } + } catch (err) { + return false + } + } +} diff --git a/src/core/components/name/pubsub/subs.js b/src/core/components/name/pubsub/subs.js new file mode 100644 index 0000000000..bcf6ede16e --- /dev/null +++ b/src/core/components/name/pubsub/subs.js @@ -0,0 +1,16 @@ +'use strict' + +const { getPubsubRouting } = require('./utils') + +module.exports = ({ ipns, options: constructorOptions }) => { + /** + * Show current name subscriptions. + * + * @param {function(Error)} [callback] + * @returns {Promise} + */ + return function subs () { + const pubsub = getPubsubRouting(ipns, constructorOptions) + return pubsub.getSubscriptions() + } +} diff --git a/src/core/components/name/pubsub/utils.js b/src/core/components/name/pubsub/utils.js new file mode 100644 index 0000000000..ee53a96f9c --- /dev/null +++ b/src/core/components/name/pubsub/utils.js @@ -0,0 +1,25 @@ +'use strict' + +const IpnsPubsubDatastore = require('../../../ipns/routing/pubsub-datastore') +const errcode = require('err-code') + +// Get pubsub from IPNS routing +exports.getPubsubRouting = (ipns, options) => { + if (!ipns || !(options.EXPERIMENTAL && options.EXPERIMENTAL.ipnsPubsub)) { + throw errcode(new Error('IPNS pubsub subsystem is not enabled'), 'ERR_IPNS_PUBSUB_NOT_ENABLED') + } + + // Only one store and it is pubsub + if (IpnsPubsubDatastore.isIpnsPubsubDatastore(ipns.routing)) { + return ipns.routing + } + + // Find in tiered + const pubsub = (ipns.routing.stores || []).find(s => IpnsPubsubDatastore.isIpnsPubsubDatastore(s)) + + if (!pubsub) { + throw errcode(new Error('IPNS pubsub datastore not found'), 'ERR_PUBSUB_DATASTORE_NOT_FOUND') + } + + return pubsub +} diff --git a/src/core/components/name/resolve.js b/src/core/components/name/resolve.js new file mode 100644 index 0000000000..3d52651a02 --- /dev/null +++ b/src/core/components/name/resolve.js @@ -0,0 +1,89 @@ +'use strict' + +const debug = require('debug') +const errcode = require('err-code') +const mergeOptions = require('merge-options') +const CID = require('cids') +const isDomain = require('is-domain-name') + +const log = debug('ipfs:name:resolve') +log.error = debug('ipfs:name:resolve:error') + +const { OFFLINE_ERROR } = require('../utils') + +const appendRemainder = async (result, remainder) => { + result = await result + + if (remainder.length) { + return result + '/' + remainder.join('/') + } + + return result +} + +/** + * @typedef { import("../index") } IPFS + */ + +/** + * IPNS - Inter-Planetary Naming System + * + * @param {IPFS} self + * @returns {Object} + */ +module.exports = ({ dns, ipns, peerInfo, isOnline, options: constructorOptions }) => { + /** + * Given a key, query the DHT for its best value. + * + * @param {String} name ipns name to resolve. Defaults to your node's peerID. + * @param {Object} options ipfs resolve options. + * @param {boolean} options.nocache do not use cached entries. + * @param {boolean} options.recursive resolve until the result is not an IPNS name. + * @param {function(Error)} [callback] + * @returns {Promise|void} + */ + return async function * resolve (name, options) { // eslint-disable-line require-await + options = mergeOptions({ + nocache: false, + recursive: true + }, options || {}) + + const { offline } = constructorOptions + + // TODO: params related logic should be in the core implementation + if (offline && options.nocache) { + throw errcode(new Error('cannot specify both offline and nocache'), 'ERR_NOCACHE_AND_OFFLINE') + } + + // Set node id as name for being resolved, if it is not received + if (!name) { + name = peerInfo.id.toB58String() + } + + if (!name.startsWith('/ipns/')) { + name = `/ipns/${name}` + } + + const [namespace, hash, ...remainder] = name.slice(1).split('/') + try { + new CID(hash) // eslint-disable-line no-new + } catch (err) { + // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns + if (isDomain(hash)) { + return appendRemainder(dns(hash, options), remainder) + } + + log.error(err) + throw errcode(new Error('Invalid IPNS name'), 'ERR_IPNS_INVALID_NAME') + } + + // multihash is valid lets resolve with IPNS + // IPNS resolve needs a online daemon + if (!isOnline() && !offline) { + throw errcode(new Error(OFFLINE_ERROR), 'OFFLINE_ERROR') + } + + // TODO: convert ipns.resolve to return an iterator + yield appendRemainder(ipns.resolve(`/${namespace}/${hash}`, options), remainder) + } +} diff --git a/src/core/components/name/utils.js b/src/core/components/name/utils.js new file mode 100644 index 0000000000..acfb307fbd --- /dev/null +++ b/src/core/components/name/utils.js @@ -0,0 +1,15 @@ +'use strict' + +const isIPFS = require('is-ipfs') + +// resolves the given path by parsing out protocol-specific entries +// (e.g. /ipns/) and then going through the /ipfs/ entries and returning the final node +exports.resolvePath = ({ ipns, dag }, name) => { + // ipns path + if (isIPFS.ipnsPath(name)) { + return ipns.resolve(name) + } + + // ipfs path + return dag.get(name.substring('/ipfs/'.length)) +} diff --git a/src/core/components/files-regular/refs-async-iterator.js b/src/core/components/refs/index.js similarity index 78% rename from src/core/components/files-regular/refs-async-iterator.js rename to src/core/components/refs/index.js index 0d3dbe08d3..39e61b4468 100644 --- a/src/core/components/files-regular/refs-async-iterator.js +++ b/src/core/components/refs/index.js @@ -3,13 +3,17 @@ const isIpfs = require('is-ipfs') const CID = require('cids') const { DAGNode } = require('ipld-dag-pb') -const { normalizePath } = require('./utils') -const { Format } = require('./refs') +const { normalizeCidPath } = require('../../utils') const { Errors } = require('interface-datastore') const ERR_NOT_FOUND = Errors.notFoundError().code -module.exports = function (self) { - return async function * refsAsyncIterator (ipfsPath, options) { // eslint-disable-line require-await +const Format = { + default: '', + edges: ' -> ' +} + +module.exports = function ({ ipld, resolve, preload }) { + return async function * refs (ipfsPath, options) { // eslint-disable-line require-await options = options || {} if (options.maxDepth === 0) { @@ -27,18 +31,18 @@ module.exports = function (self) { } const rawPaths = Array.isArray(ipfsPath) ? ipfsPath : [ipfsPath] - const paths = rawPaths.map(p => getFullPath(self, p, options)) + const paths = rawPaths.map(p => getFullPath(preload, p, options)) for (const path of paths) { - yield * refsStream(self, path, options) + yield * refsStream(resolve, ipld, path, options) } } } -function getFullPath (ipfs, ipfsPath, options) { - // normalizePath() strips /ipfs/ off the front of the path so the CID will +function getFullPath (preload, ipfsPath, options) { + // normalizeCidPath() strips /ipfs/ off the front of the path so the CID will // be at the front of the path - const path = normalizePath(ipfsPath) + const path = normalizeCidPath(ipfsPath) const pathComponents = path.split('/') const cid = pathComponents[0] @@ -47,22 +51,22 @@ function getFullPath (ipfs, ipfsPath, options) { } if (options.preload !== false) { - ipfs._preload(cid) + preload(cid) } return '/ipfs/' + path } // Get a stream of refs at the given path -async function * refsStream (ipfs, path, options) { +async function * refsStream (resolve, ipld, path, options) { // Resolve to the target CID of the path - const resPath = await ipfs.resolve(path) + const resPath = await resolve(path) // path is /ipfs/ const parts = resPath.split('/') const cid = parts[2] // Traverse the DAG, converting it into a stream - for await (const obj of objectStream(ipfs, cid, options.maxDepth, options.unique)) { + for await (const obj of objectStream(ipld, cid, options.maxDepth, options.unique)) { // Root object will not have a parent if (!obj.parent) { continue @@ -90,7 +94,7 @@ function formatLink (srcCid, dstCid, linkName, format) { } // Do a depth first search of the DAG, starting from the given root cid -async function * objectStream (ipfs, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await +async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await const seen = new Set() async function * traverseLevel (parent, depth) { @@ -104,7 +108,7 @@ async function * objectStream (ipfs, rootCid, maxDepth, uniqueOnly) { // eslint- // Get this object's links try { // Look at each link, parent and the new depth - for (const link of await getLinks(ipfs, parent.cid)) { + for (const link of await getLinks(ipld, parent.cid)) { yield { parent: parent, node: link, @@ -130,8 +134,8 @@ async function * objectStream (ipfs, rootCid, maxDepth, uniqueOnly) { // eslint- } // Fetch a node from IPLD then get all its links -async function getLinks (ipfs, cid) { - const node = await ipfs._ipld.get(new CID(cid)) +async function getLinks (ipld, cid) { + const node = await ipld.get(new CID(cid)) if (DAGNode.isDAGNode(node)) { return node.Links.map(({ Name, Hash }) => ({ name: Name, cid: new CID(Hash) })) diff --git a/src/core/components/files-regular/refs-local-async-iterator.js b/src/core/components/refs/local.js similarity index 75% rename from src/core/components/files-regular/refs-local-async-iterator.js rename to src/core/components/refs/local.js index 62029cbac9..365f82d326 100644 --- a/src/core/components/files-regular/refs-local-async-iterator.js +++ b/src/core/components/refs/local.js @@ -3,9 +3,9 @@ const CID = require('cids') const base32 = require('base32.js') -module.exports = function (self) { - return async function * refsLocalAsyncIterator () { - for await (const result of self._repo.blocks.query({ keysOnly: true })) { +module.exports = function ({ repo }) { + return async function * refsLocal () { + for await (const result of repo.blocks.query({ keysOnly: true })) { yield dsKeyToRef(result.key) } } diff --git a/src/core/components/resolve.js b/src/core/components/resolve.js index 268952dfe7..c8eac5bb64 100644 --- a/src/core/components/resolve.js +++ b/src/core/components/resolve.js @@ -2,7 +2,6 @@ const isIpfs = require('is-ipfs') const CID = require('cids') -const nodeify = require('promise-nodeify') const { cidToString } = require('../../utils/cid') /** @@ -32,26 +31,28 @@ const { cidToString } = require('../../utils/cid') * @param {IPFS} ipfs * @returns {ResolveWrapper} */ -module.exports = (ipfs) => { +module.exports = ({ name, ipld }) => { /** * IPFS Resolve - Resolve the value of names to IPFS * - * @param {String} name + * @param {String} path * @param {ResolveOptions} [opts={}] * @returns {Promise} */ - const resolve = async (name, opts) => { + return async function resolve (path, opts) { opts = opts || {} - if (!isIpfs.path(name)) { - throw new Error('invalid argument ' + name) + if (!isIpfs.path(path)) { + throw new Error('invalid argument ' + path) } - if (isIpfs.ipnsPath(name)) { - name = await ipfs.name.resolve(name, opts) + if (isIpfs.ipnsPath(path)) { + for await (const resolvedPath of name.resolve(path, opts)) { + path = resolvedPath + } } - const [, , hash, ...rest] = name.split('/') // ['', 'ipfs', 'hash', ...path] + const [, , hash, ...rest] = path.split('/') // ['', 'ipfs', 'hash', ...path] const cid = new CID(hash) // nothing to resolve return the input @@ -59,8 +60,9 @@ module.exports = (ipfs) => { return `/ipfs/${cidToString(cid, { base: opts.cidBase })}` } - const path = rest.join('/') - const results = ipfs._ipld.resolve(cid, path) + path = rest.join('/') + + const results = ipld.resolve(cid, path) let value = cid let remainderPath = path @@ -73,13 +75,4 @@ module.exports = (ipfs) => { return `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}` } - - return (name, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - opts = opts || {} - return nodeify(resolve(name, opts), cb) - } } diff --git a/src/core/components/start.js b/src/core/components/start.js index 5234820fa8..6ed02157d9 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -6,11 +6,11 @@ const IPNS = require('../ipns') const routingConfig = require('../ipns/routing/config') const defer = require('p-defer') const { AlreadyInitializedError, NotEnabledError } = require('../errors') -const Commands = require('./') +const Components = require('./') module.exports = ({ apiManager, - constructorOptions, + options: constructorOptions, blockService, gcLock, initOptions, @@ -34,7 +34,7 @@ module.exports = ({ const config = await repo.config.get() const peerBook = new PeerBook() - const libp2p = Commands.legacy.libp2p({ + const libp2p = Components.legacy.libp2p({ _options: constructorOptions, _repo: repo, _peerInfo: peerInfo, @@ -107,59 +107,77 @@ function createApi ({ print, repo }) { - const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) const object = { - data: Commands.object.data({ ipld, preload }), - get: Commands.object.get({ ipld, preload }), - links: Commands.object.links({ dag }), - new: Commands.object.new({ ipld, preload }), + data: Components.object.data({ ipld, preload }), + get: Components.object.get({ ipld, preload }), + links: Components.object.links({ dag }), + new: Components.object.new({ ipld, preload }), patch: { - addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), - appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), - rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), - setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Components.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Components.object.put({ ipld, gcLock, preload }), + stat: Components.object.stat({ ipld, preload }) + } + const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, options: constructorOptions }) + const isOnline = Components.isOnline({ libp2p }) + const dns = Components.dns() + const name = { + pubsub: { + cancel: Components.name.pubsub.cancel({ ipns, options: constructorOptions }), + state: Components.name.pubsub.state({ ipns, options: constructorOptions }), + subs: Components.name.pubsub.subs({ ipns, options: constructorOptions }) }, - put: Commands.object.put({ ipld, gcLock, preload }), - stat: Commands.object.stat({ ipld, preload }) + publish: Components.name.publish({ ipns, dag, peerInfo, isOnline, keychain, options: constructorOptions }), + resolve: Components.name.resolve({ dns, ipns, peerInfo, isOnline, options: constructorOptions }) } - const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) - const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) - - const stop = Commands.stop({ - apiManager, - bitswap, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - ipns, - keychain, - libp2p, - object, - peerInfo, - preload, - print, - repo - }) + const resolve = Components.resolve({ name, ipld }) + const refs = Components.refs({ ipld, resolve, preload }) + refs.local = Components.refs.local({ repo }) const api = { add, bitswap: { - stat: Commands.bitswap.stat({ bitswap }), - unwant: Commands.bitswap.unwant({ bitswap }), - wantlist: Commands.bitswap.wantlist({ bitswap }) + stat: Components.bitswap.stat({ bitswap }), + unwant: Components.bitswap.unwant({ bitswap }), + wantlist: Components.bitswap.wantlist({ bitswap }) }, - config: Commands.config({ repo }), - id: Commands.id({ peerInfo }), + cat: Components.cat({ ipld, preload }), + config: Components.config({ repo }), + dns, + get: Components.get({ ipld, preload }), + id: Components.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, - ping: Commands.ping({ libp2p }), + ls: Components.ls({ ipld, preload }), + name, + ping: Components.ping({ libp2p }), pubsub: libp2p.pubsub - ? Commands.pubsub({ libp2p }) + ? Components.pubsub({ libp2p }) : () => { throw new NotEnabledError('pubsub not enabled') }, + refs, + resolve, start: () => apiManager.api, - stop, - version: Commands.version({ repo }) + stop: Components.stop({ + apiManager, + bitswap, + options: constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + preload, + print, + repo + }), + version: Components.version({ repo }) } return api diff --git a/src/core/components/stats.js b/src/core/components/stats.js index 88c19b352e..33bac0f606 100644 --- a/src/core/components/stats.js +++ b/src/core/components/stats.js @@ -3,7 +3,7 @@ const callbackify = require('callbackify') const Big = require('bignumber.js') const Pushable = require('pull-pushable') -const human = require('human-to-milliseconds') +const parseDuration = require('parse-duration') const toStream = require('pull-stream-to-stream') const errCode = require('err-code') @@ -51,7 +51,7 @@ module.exports = function stats (self) { if (opts.poll) { let value try { - value = human(opts.interval || '1s') + value = parseDuration(opts.interval || '1s') } catch (err) { // Pull stream expects async work, so we need to simulate it. process.nextTick(() => { diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 9be69ec1d6..545aaf8f63 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -2,11 +2,11 @@ const defer = require('p-defer') const { NotStartedError, AlreadyInitializedError } = require('../errors') -const Commands = require('./') +const Components = require('./') module.exports = ({ apiManager, - constructorOptions, + options: constructorOptions, bitswap, blockService, gcLock, @@ -76,48 +76,53 @@ function createApi ({ print, repo }) { - const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) const object = { - data: Commands.object.data({ ipld, preload }), - get: Commands.object.get({ ipld, preload }), - links: Commands.object.links({ dag }), - new: Commands.object.new({ ipld, preload }), + data: Components.object.data({ ipld, preload }), + get: Components.object.get({ ipld, preload }), + links: Components.object.links({ dag }), + new: Components.object.new({ ipld, preload }), patch: { - addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), - appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), - rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), - setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Components.object.patch.setData({ ipld, gcLock, preload }) }, - put: Commands.object.put({ ipld, gcLock, preload }), - stat: Commands.object.stat({ ipld, preload }) + put: Components.object.put({ ipld, gcLock, preload }), + stat: Components.object.stat({ ipld, preload }) } - const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) - const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) - - const start = Commands.start({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - object, - peerInfo, - pinManager, - preload, - print, - repo - }) + const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, options: constructorOptions }) + const refs = () => { throw new NotStartedError() } + refs.local = Components.refs.local({ repo }) const api = { add, - config: Commands.config({ repo }), - id: Commands.id({ peerInfo }), + cat: Components.cat({ ipld, preload }), + config: Components.config({ repo }), + dns: Components.dns(), + get: Components.get({ ipld, preload }), + id: Components.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, - start, + isOnline: Components.isOnline({}), + ls: Components.ls({ ipld, preload }), + refs, + start: Components.start({ + apiManager, + options: constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }), stop: () => apiManager.api, - version: Commands.version({ repo }) + version: Components.version({ repo }) } return api diff --git a/src/core/index.js b/src/core/index.js index 878e1c4957..92530e2779 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -17,7 +17,7 @@ const multicodec = require('multicodec') const multihashing = require('multihashing-async') const CID = require('cids') const { NotInitializedError } = require('./errors') -const createInitApi = require('./components/init') +const Components = require('./components') const ApiManager = require('./api-manager') const getDefaultOptions = () => ({ @@ -40,8 +40,12 @@ async function create (options) { const print = options.silent ? log : console.log const apiManager = new ApiManager() - const init = createInitApi({ apiManager, print, constructorOptions: options }) - const { api } = apiManager.update({ init }, () => { throw new NotInitializedError() }) + + const { api } = apiManager.update({ + init: Components.init({ apiManager, print, constructorOptions: options }), + dns: Components.dns(), + isOnline: Components.isOnline({}) + }, () => { throw new NotInitializedError() }) if (!options.init) { return api diff --git a/src/core/ipns/path.js b/src/core/ipns/path.js deleted file mode 100644 index 0fb9e34ff7..0000000000 --- a/src/core/ipns/path.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' - -const isIPFS = require('is-ipfs') - -const debug = require('debug') -const log = debug('ipfs:ipns:path') -log.error = debug('ipfs:ipns:path:error') - -// resolves the given path by parsing out protocol-specific entries -// (e.g. /ipns/) and then going through the /ipfs/ entries and returning the final node -const resolvePath = (ipfsNode, name) => { - // ipns path - if (isIPFS.ipnsPath(name)) { - log(`resolve ipns path ${name}`) - - return ipfsNode._ipns.resolve(name) - } - - // ipfs path - return ipfsNode.dag.get(name.substring('/ipfs/'.length)) -} - -module.exports = { - resolvePath -} diff --git a/src/core/runtime/add-from-fs-browser.js b/src/core/runtime/add-from-fs-browser.js deleted file mode 100644 index aaf9691c7c..0000000000 --- a/src/core/runtime/add-from-fs-browser.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') - -module.exports = () => { - return callbackify(async () => { // eslint-disable-line require-await - throw new Error('not available in the browser') - }) -} diff --git a/src/core/runtime/add-from-fs-nodejs.js b/src/core/runtime/add-from-fs-nodejs.js deleted file mode 100644 index 33bc3954e2..0000000000 --- a/src/core/runtime/add-from-fs-nodejs.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const globSource = require('ipfs-utils/src/files/glob-source') -const all = require('async-iterator-all') - -module.exports = self => { - return callbackify.variadic(async (...args) => { // eslint-disable-line require-await - const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() - - return all(self._addAsyncIterator(globSource(...args, options), options)) - }) -} diff --git a/src/core/utils.js b/src/core/utils.js index 8373797dde..f2675bf20c 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -2,6 +2,7 @@ const isIpfs = require('is-ipfs') const CID = require('cids') +const { cidToString } = require('../utils/cid') const ERR_BAD_PATH = 'ERR_BAD_PATH' exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' @@ -39,11 +40,6 @@ function parseIpfsPath (ipfsPath) { /** * Returns a well-formed ipfs Path. * The returned path will always be prefixed with /ipfs/ or /ipns/. - * If the received string is not a valid ipfs path, an error will be returned - * examples: - * b58Hash -> { hash: 'b58Hash', links: [] } - * b58Hash/mercury/venus -> { hash: 'b58Hash', links: ['mercury', 'venus']} - * /ipfs/b58Hash/links/by/name -> { hash: 'b58Hash', links: ['links', 'by', 'name'] } * * @param {String} pathStr An ipfs-path, or ipns-path or a cid * @return {String} ipfs-path or ipns-path @@ -51,12 +47,29 @@ function parseIpfsPath (ipfsPath) { */ const normalizePath = (pathStr) => { if (isIpfs.cid(pathStr)) { - return `/ipfs/${pathStr}` + return `/ipfs/${new CID(pathStr)}` } else if (isIpfs.path(pathStr)) { return pathStr } else { - throw Object.assign(new Error(`invalid ${pathStr} path`), { code: ERR_BAD_PATH }) + throw Object.assign(new Error(`invalid path: ${pathStr}`), { code: ERR_BAD_PATH }) + } +} + +// TODO: do we need both normalizePath and normalizeCidPath? +const normalizeCidPath = (path) => { + if (Buffer.isBuffer(path)) { + return new CID(path).toString() + } + if (CID.isCID(path)) { + return path.toString() + } + if (path.indexOf('/ipfs/') === 0) { + path = path.substring('/ipfs/'.length) + } + if (path.charAt(path.length - 1) === '/') { + path = path.substring(0, path.length - 1) } + return path } /** @@ -124,6 +137,35 @@ const resolvePath = async function (objectAPI, ipfsPaths) { return cids } +const mapFile = (file, options) => { + options = options || {} + + let size = 0 + let type = 'dir' + + if (file.unixfs && file.unixfs.type === 'file') { + size = file.unixfs.fileSize() + type = 'file' + } + + const output = { + hash: cidToString(file.cid, { base: options.cidBase }), + path: file.path, + name: file.name, + depth: file.path.split('/').length, + size, + type + } + + if (options.includeContent && file.unixfs && file.unixfs.type === 'file') { + output.content = file.content + } + + return output +} + exports.normalizePath = normalizePath +exports.normalizeCidPath = normalizeCidPath exports.parseIpfsPath = parseIpfsPath exports.resolvePath = resolvePath +exports.mapFile = mapFile diff --git a/src/index.js b/src/index.js index aec25fb4ae..8140941bf4 100644 --- a/src/index.js +++ b/src/index.js @@ -2,4 +2,4 @@ const IPFS = require('./core') -exports = module.exports = IPFS +module.exports = IPFS