From 039675e443c00fc08601ecfe619d3bbc796f22f7 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Wed, 4 Sep 2019 18:17:17 +0100 Subject: [PATCH] feat: add support for ipns and recursive to ipfs resolve (#2297) This PR adds IPNS support to resolve, makes the recursive option true by default and reworks the tests. Jsdocs were add to the resolve methods. Depends on: * [x] https://github.com/ipfs/interface-js-ipfs-core/pull/504 BREAKING CHANGE: `recursive` is now `true` by default in `ipfs resolve` --- package.json | 4 +- src/cli/commands/resolve.js | 4 +- src/core/components/files-mfs.js | 361 +++++++++++++++++++++++++----- src/core/components/resolve.js | 98 ++++---- src/http/api/resources/resolve.js | 10 +- test/cli/resolve.js | 74 ------ test/core/interface.spec.js | 25 +-- test/http-api/inject/resolve.js | 26 --- test/http-api/interface.js | 31 ++- 9 files changed, 398 insertions(+), 235 deletions(-) delete mode 100644 test/cli/resolve.js diff --git a/package.json b/package.json index 0b87ec4e7b..e7cfec839b 100644 --- a/package.json +++ b/package.json @@ -75,7 +75,6 @@ "bs58": "^4.0.1", "buffer-peek-stream": "^1.0.1", "byteman": "^1.3.5", - "callbackify": "^1.1.0", "cid-tool": "~0.3.0", "cids": "~0.7.1", "class-is": "^1.1.0", @@ -155,6 +154,7 @@ "peer-id": "~0.12.3", "peer-info": "~0.15.0", "progress": "^2.0.1", + "promise-nodeify": "^3.0.1", "promisify-es6": "^1.0.3", "protons": "^1.0.1", "pull-abortable": "^4.1.1", @@ -194,8 +194,8 @@ "execa": "^2.0.4", "form-data": "^2.5.1", "hat": "0.0.3", - "interface-ipfs-core": "^0.111.0", "ipfsd-ctl": "~0.45.0", + "interface-ipfs-core": "^0.111.1", "libp2p-websocket-star": "~0.10.2", "ncp": "^2.0.0", "p-event": "^4.1.0", diff --git a/src/cli/commands/resolve.js b/src/cli/commands/resolve.js index beefd749a3..5d2fcfdb21 100644 --- a/src/cli/commands/resolve.js +++ b/src/cli/commands/resolve.js @@ -20,11 +20,11 @@ module.exports = { } }, - handler ({ getIpfs, print, name, recursive, cidBase, resolve }) { + handler ({ getIpfs, name, recursive, cidBase, resolve }) { resolve((async () => { const ipfs = await getIpfs() const res = await ipfs.resolve(name, { recursive, cidBase }) - print(res) + return res })()) } } diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js index 94a5ea8594..0bdb95890c 100644 --- a/src/core/components/files-mfs.js +++ b/src/core/components/files-mfs.js @@ -6,13 +6,18 @@ const toPullStream = require('async-iterator-to-pull-stream') const toReadableStream = require('async-iterator-to-stream') const pullStreamToAsyncIterator = require('pull-stream-to-async-iterator') const all = require('async-iterator-all') -const callbackify = require('callbackify') +const nodeify = require('promise-nodeify') const PassThrough = require('stream').PassThrough const pull = require('pull-stream/pull') const map = require('pull-stream/throughs/map') const isIpfs = require('is-ipfs') const { cidToString } = require('../../utils/cid') +/** + * @typedef { import("readable-stream").Readable } ReadableStream + * @typedef { import("pull-stream") } PullStream + */ + const mapLsFile = (options = {}) => { const long = options.long || options.l @@ -26,12 +31,12 @@ const mapLsFile = (options = {}) => { } } -module.exports = self => { - const methods = mfs({ - ipld: self._ipld, - blocks: self._blockService, - datastore: self._repo.root, - repoOwner: self._options.repoOwner +module.exports = (/** @type { import("../index") } */ ipfs) => { + const methodsOriginal = mfs({ + ipld: ipfs._ipld, + blocks: ipfs._blockService, + datastore: ipfs._repo.root, + repoOwner: ipfs._options.repoOwner }) const withPreload = fn => (...args) => { @@ -40,74 +45,328 @@ module.exports = self => { if (paths.length) { const options = args[args.length - 1] if (options.preload !== false) { - paths.forEach(path => self._preload(path)) + paths.forEach(path => ipfs._preload(path)) } } return fn(...args) } + const methods = { + ...methodsOriginal, + cp: withPreload(methodsOriginal.cp), + ls: withPreload(methodsOriginal.ls), + mv: withPreload(methodsOriginal.mv), + read: withPreload(methodsOriginal.read), + stat: withPreload(methodsOriginal.stat) + } + return { - cp: callbackify.variadic(withPreload(methods.cp)), - flush: callbackify.variadic(methods.flush), - ls: callbackify.variadic(withPreload(async (path, options = {}) => { - const files = await all(methods.ls(path, options)) - - return files.map(mapLsFile(options)) - })), - lsReadableStream: withPreload((path, options = {}) => { - const stream = toReadableStream.obj(methods.ls(path, options)) + /** + * Copy files + * + * @param {String | Array} from - The path(s) of the source to copy. + * @param {String} to - The path of the destination to copy to. + * @param {Object} [opts] - Options for copy. + * @param {boolean} [opts.parents=false] - Whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb) + * @param {String} [opts.hashAlg=sha2-256] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + cp: (from, to, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.cp(from, to, opts), cb) + }, + + /** + * Make a directory + * + * @param {String} path - The path to the directory to make. + * @param {Object} [opts] - Options for mkdir. + * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). + * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + mkdir: (path, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.mkdir(path, opts), cb) + }, + + /** + * @typedef {Object} StatOutput + * @prop {String} hash - Output hash. + * @prop {number} size - File size in bytes. + * @prop {number} cumulativeSize - Integer with the size of the DAGNodes making up the file in Bytes. + * @prop {string} type - Output type either 'directory' or 'file'. + * @prop {number} blocks - If type is directory, this is the number of files in the directory. If it is file it is the number of blocks that make up the file. + * @prop {boolean} withLocality - Indicate if locality information is present. + * @prop {boolean} local - Indicate if the queried dag is fully present locally. + * @prop {number} sizeLocal - Integer indicating the cumulative size of the data present locally. + */ + + /** + * Get file or directory status. + * + * @param {String} path - Path to the file or directory to stat. + * @param {Object} [opts] - Options for stat. + * @param {boolean} [opts.hash=false] - Return only the hash. (default: false) + * @param {boolean} [opts.size=false] - Return only the size. (default: false) + * @param {boolean} [opts.withLocal=false] - Compute the amount of the dag that is local, and if possible the total size. (default: false) + * @param {String} [opts.cidBase=base58btc] - Which number base to use to format hashes - e.g. base32, base64 etc. (default: base58btc) + * @param {function(Error, StatOutput): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + stat: (path, opts, cb) => { + const stat = async (path, opts = {}) => { + const stats = await methods.stat(path, opts) + + stats.hash = stats.cid.toBaseEncodedString(opts && opts.cidBase) + delete stats.cid + + return stats + } + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + return nodeify(stat(path, opts), cb) + }, + + /** + * Remove a file or directory. + * + * @param {String | Array} paths - One or more paths to remove. + * @param {Object} [opts] - Options for remove. + * @param {boolean} [opts.recursive=false] - Whether or not to remove directories recursively. (default: false) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + rm: (paths, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.rm(paths, opts), cb) + }, + + /** + * @typedef {Object} ReadOptions + * @prop {number} [opts.offset=0] - Integer with the byte offset to begin reading from (default: 0). + * @prop {number} [opts.length] - Integer with the maximum number of bytes to read (default: Read to the end of stream). + */ + + /** + * Read a file into a Buffer. + * + * @param {string} path - Path of the file to read and must point to a file (and not a directory). + * @param {ReadOptions} [opts] - Object for read. + * @param {function(Error, Buffer): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + read: (path, opts, cb) => { + const read = async (path, opts = {}) => { + return Buffer.concat(await all(methods.read(path, opts))) + } + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(read(path, opts), cb) + }, + + /** + * Read a file into a ReadableStream. + * + * @param {string} path - Path of the file to read and must point to a file (and not a directory). + * @param {ReadOptions} [opts] - Object for read. + * @returns {ReadableStream} Returns a ReadableStream with the contents of path. + */ + readReadableStream: (path, opts = {}) => toReadableStream(methods.read(path, opts)), + + /** + * Read a file into a PullStrean. + * + * @param {string} path - Path of the file to read and must point to a file (and not a directory). + * @param {ReadOptions} [opts] - Object for read. + * @returns {PullStream} Returns a PullStream with the contents of path. + */ + readPullStream: (path, opts = {}) => toPullStream.source(methods.read(path, opts)), + + /** + * Write to a file. + * + * @param {string} path - Path of the file to write. + * @param {Buffer | PullStream | ReadableStream | Blob | string} content - Content to write. + * @param {Object} opts - Options for write. + * @param {number} [opts.offset=0] - Integer with the byte offset to begin writing at. (default: 0) + * @param {boolean} [opts.create=false] - Indicate to create the file if it doesn't exist. (default: false) + * @param {boolean} [opts.truncate=false] - Indicate if the file should be truncated after writing all the bytes from content. (default: false) + * @param {boolena} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {number} [opts.length] - Maximum number of bytes to read. (default: Read all bytes from content) + * @param {boolean} [opts.rawLeaves=false] - If true, DAG leaves will contain raw file data and not be wrapped in a protobuf. (default: false) + * @param {number} [opts.cidVersion=0] - The CID version to use when storing the data (storage keys are based on the CID, including its version). (default: 0) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + write: (path, content, opts, cb) => { + const write = async (path, content, opts = {}) => { + if (isPullStream.isSource(content)) { + content = pullStreamToAsyncIterator(content) + } + + await methods.write(path, content, opts) + } + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(write(path, content, opts), cb) + }, + + /** + * Move files. + * + * @param {string | Array} from - Path(s) of the source to move. + * @param {string} to - Path of the destination to move to. + * @param {Object} opts - Options for mv. + * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). + * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Value to decide whether or not to immediately flush MFS changes to disk. (default: true) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + * @description + * If from has multiple values then to must be a directory. + * + * If from has a single value and to exists and is a directory, from will be moved into to. + * + * If from has a single value and to exists and is a file, from must be a file and the contents of to will be replaced with the contents of from otherwise an error will be returned. + * + * If from is an IPFS path, and an MFS path exists with the same name, the IPFS path will be chosen. + * + * All values of from will be removed after the operation is complete unless they are an IPFS path. + */ + mv: (from, to, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(methods.mv(from, to, opts), cb) + }, + + /** + * Flush a given path's data to the disk. + * + * @param {string | Array} [paths] - String paths to flush. (default: /) + * @param {function(Error): void} [cb] - Callback function. + * @returns {Promise | void} When callback is provided nothing is returned. + */ + flush: (paths, cb) => { + if (typeof paths === 'function') { + cb = paths + paths = undefined + } + return nodeify(methods.flush(paths), cb) + }, + + /** + * @typedef {Object} ListOutputFile + * @prop {string} name - Which is the file's name. + * @prop {string} type - Which is the object's type (directory or file). + * @prop {number} size - The size of the file in bytes. + * @prop {string} hash - The hash of the file. + */ + + /** + * @typedef {Object} ListOptions + * @prop {boolean} [long=false] - Value to decide whether or not to populate type, size and hash. (default: false) + * @prop {string} [cidBase=base58btc] - Which number base to use to format hashes - e.g. base32, base64 etc. (default: base58btc) + * @prop {boolean} [sort=false] - If true entries will be sorted by filename. (default: false) + */ + + /** + * List directories in the local mutable namespace. + * + * @param {string} [path="/"] - String to show listing for. (default: /) + * @param {ListOptions} [opts] - Options for list. + * @param {function(Error, Array): void} [cb] - Callback function. + * @returns {Promise> | void} When callback is provided nothing is returned. + */ + ls: (path, opts, cb) => { + const ls = async (path, opts = {}) => { + const files = await all(methods.ls(path, opts)) + + return files.map(mapLsFile(opts)) + } + + if (typeof path === 'function') { + cb = path + path = '/' + opts = {} + } + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(ls(path, opts), cb) + }, + + /** + * Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams. + * + * @param {string} [path="/"] - String to show listing for. (default: /) + * @param {ListOptions} [opts] - Options for list. + * @returns {ReadableStream} It returns a Readable Stream in Object mode that will yield {@link ListOutputFile} + */ + lsReadableStream: (path, opts = {}) => { + const stream = toReadableStream.obj(methods.ls(path, opts)) const through = new PassThrough({ objectMode: true }) stream.on('data', (file) => { - through.write(mapLsFile(options)(file)) + through.write(mapLsFile(opts)(file)) }) stream.on('error', (err) => { through.destroy(err) }) stream.on('end', (file, enc, cb) => { if (file) { - file = mapLsFile(options)(file) + file = mapLsFile(opts)(file) } through.end(file, enc, cb) }) return through - }), - lsPullStream: withPreload((path, options = {}) => { + }, + + /** + * Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as PullStreams. + * + * @param {string} [path="/"] - String to show listing for. (default: /) + * @param {ListOptions} [opts] - Options for list. + * @returns {PullStream} It returns a PullStream that will yield {@link ListOutputFile} + */ + lsPullStream: (path, opts = {}) => { return pull( - toPullStream.source(methods.ls(path, options)), - map(mapLsFile(options)) + toPullStream.source(methods.ls(path, opts)), + map(mapLsFile(opts)) ) - }), - mkdir: callbackify.variadic(methods.mkdir), - mv: callbackify.variadic(withPreload(methods.mv)), - read: callbackify.variadic(withPreload(async (path, options = {}) => { - return Buffer.concat(await all(methods.read(path, options))) - })), - readPullStream: withPreload((path, options = {}) => { - return toPullStream.source(methods.read(path, options)) - }), - readReadableStream: withPreload((path, options = {}) => { - return toReadableStream(methods.read(path, options)) - }), - rm: callbackify.variadic(methods.rm), - stat: callbackify.variadic(withPreload(async (path, options = {}) => { - const stats = await methods.stat(path, options) - - stats.hash = cidToString(stats.cid, { base: options.cidBase }) - delete stats.cid - - return stats - })), - write: callbackify.variadic(async (path, content, options = {}) => { - if (isPullStream.isSource(content)) { - content = pullStreamToAsyncIterator(content) - } - - await methods.write(path, content, options) - }) + } } } diff --git a/src/core/components/resolve.js b/src/core/components/resolve.js index db9039551e..497ba2447a 100644 --- a/src/core/components/resolve.js +++ b/src/core/components/resolve.js @@ -1,64 +1,82 @@ 'use strict' -const promisify = require('promisify-es6') const isIpfs = require('is-ipfs') -const setImmediate = require('async/setImmediate') const CID = require('cids') +const nodeify = require('promise-nodeify') const { cidToString } = require('../../utils/cid') -module.exports = (self) => { - return promisify(async (name, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } +/** + * @typedef { import("../index") } IPFS + */ - opts = opts || {} +/** + * @typedef {Object} ResolveOptions + * @prop {string} cidBase - Multibase codec name the CID in the resolved path will be encoded with + * @prop {boolean} [recursive=true] - Resolve until the result is an IPFS name + * + */ +/** @typedef {(err: Error, path: string) => void} ResolveCallback */ + +/** + * @callback ResolveWrapper - This wrapper adds support for callbacks and promises + * @param {string} name - Path to resolve + * @param {ResolveOptions} opts - Options for resolve + * @param {ResolveCallback} [cb] - Optional callback function + * @returns {Promise | void} - When callback is provided nothing is returned + */ + +/** + * IPFS Resolve factory + * + * @param {IPFS} ipfs + * @returns {ResolveWrapper} + */ +module.exports = (ipfs) => { + /** + * IPFS Resolve - Resolve the value of names to IPFS + * + * @param {String} name + * @param {ResolveOptions} [opts={}] + * @returns {Promise} + */ + const resolve = async (name, opts = {}) => { if (!isIpfs.path(name)) { - return setImmediate(() => cb(new Error('invalid argument ' + name))) + throw new Error('invalid argument ' + name) } - // TODO remove this and update subsequent code when IPNS is implemented - if (!isIpfs.ipfsPath(name)) { - return setImmediate(() => cb(new Error('resolve non-IPFS names is not implemented'))) + if (isIpfs.ipnsPath(name)) { + name = await ipfs.name.resolve(name, opts) } - const split = name.split('/') // ['', 'ipfs', 'hash', ...path] - const cid = new CID(split[2]) + const [, , hash, ...rest] = name.split('/') // ['', 'ipfs', 'hash', ...path] + const cid = new CID(hash) - if (split.length === 3) { - return setImmediate(() => cb(null, `/ipfs/${cidToString(cid, { base: opts.cidBase })}`)) + // nothing to resolve return the input + if (rest.length === 0) { + return `/ipfs/${cidToString(cid, { base: opts.cidBase })}` } - const path = split.slice(3).join('/') - - const results = self._ipld.resolve(cid, path) + const path = rest.join('/') + const results = ipfs._ipld.resolve(cid, path) let value = cid let remainderPath = path - try { - for await (const result of results) { - if (result.remainderPath === '') { - // Use values from previous iteration if the value isn't a CID - if (CID.isCID(result.value)) { - value = result.value - remainderPath = '' - } - - if (result.value && CID.isCID(result.value.Hash)) { - value = result.value.Hash - remainderPath = '' - } - - break - } + for await (const result of results) { + if (CID.isCID(result.value)) { value = result.value remainderPath = result.remainderPath } - } catch (error) { - return cb(error) } - return cb(null, `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}`) - }) + + return `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}` + } + + return (name, opts = {}, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return nodeify(resolve(name, opts), cb) + } } diff --git a/src/http/api/resources/resolve.js b/src/http/api/resources/resolve.js index e55455cd9b..906e0f0e9d 100644 --- a/src/http/api/resources/resolve.js +++ b/src/http/api/resources/resolve.js @@ -10,13 +10,7 @@ log.error = debug('ipfs:http-api:resolve:error') module.exports = { validate: { query: Joi.object().keys({ - r: Joi.alternatives() - .when('recursive', { - is: Joi.any().exist(), - then: Joi.any().forbidden(), - otherwise: Joi.boolean() - }), - recursive: Joi.boolean(), + recursive: Joi.boolean().default(true), arg: Joi.string().required(), 'cid-base': Joi.string().valid(multibase.names) }).unknown() @@ -24,7 +18,7 @@ module.exports = { async handler (request, h) { const { ipfs } = request.server.app const name = request.query.arg - const recursive = request.query.r || request.query.recursive || false + const recursive = request.query.recursive const cidBase = request.query['cid-base'] log(name, { recursive, cidBase }) diff --git a/test/cli/resolve.js b/test/cli/resolve.js deleted file mode 100644 index 190eba0c66..0000000000 --- a/test/cli/resolve.js +++ /dev/null @@ -1,74 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const path = require('path') -const expect = require('chai').expect -const isIpfs = require('is-ipfs') -const CID = require('cids') - -const runOnAndOff = require('../utils/on-and-off') - -describe('resolve', () => runOnAndOff((thing) => { - let ipfs - - before(() => { - ipfs = thing.ipfs - }) - - it('should resolve an IPFS hash', () => { - const filePath = path.join(path.resolve(__dirname, '..', '..'), 'src/init-files/init-docs/readme') - let hash - - return ipfs(`add ${filePath}`) - .then((out) => { - hash = out.split(' ')[1] - expect(isIpfs.cid(hash)).to.be.true() - return ipfs(`resolve /ipfs/${hash}`) - }) - .then((out) => { - expect(out).to.contain(`/ipfs/${hash}`) - }) - }) - - it('should resolve an IPFS hash and print CID encoded in specified base', function () { - this.timeout(10 * 1000) - - const filePath = path.join(path.resolve(__dirname, '..', '..'), 'src/init-files/init-docs/readme') - let b58Hash - let b64Hash - - return ipfs(`add ${filePath}`) - .then((out) => { - b58Hash = out.split(' ')[1] - expect(isIpfs.cid(b58Hash)).to.be.true() - b64Hash = new CID(b58Hash).toV1().toBaseEncodedString('base64') - return ipfs(`resolve /ipfs/${b58Hash} --cid-base=base64`) - }) - .then((out) => { - expect(out).to.contain(`/ipfs/${b64Hash}`) - }) - }) - - it('should resolve an IPFS path link', function () { - this.timeout(10 * 1000) - - const filePath = path.join(path.resolve(__dirname, '..', '..'), 'src/init-files/init-docs/readme') - let fileHash, rootHash - - return ipfs(`add ${filePath} --wrap-with-directory`) - .then((out) => { - const lines = out.split('\n') - - fileHash = lines[0].split(' ')[1] - rootHash = lines[1].split(' ')[1] - - expect(isIpfs.cid(fileHash)).to.be.true() - expect(isIpfs.cid(rootHash)).to.be.true() - - return ipfs(`resolve /ipfs/${rootHash}/readme`) - }) - .then((out) => { - expect(out).to.contain(`/ipfs/${fileHash}`) - }) - }) -})) diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index f2151a88df..4da19bc846 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -92,19 +92,11 @@ describe('interface-ipfs-core tests', function () { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. - createTeardown: () => cb => cb() - }), { - skip: [ - { - name: 'should resolve an IPNS DNS link', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - }, - { - name: 'should resolve IPNS link recursively', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - } - ] - }) + createTeardown: () => cb => cb(), + spawnOptions: { + args: ['--pass ipfs-is-awesome-software', '--offline'] + } + })) tests.name(CommonFactory.create({ spawnOptions: { @@ -153,7 +145,12 @@ describe('interface-ipfs-core tests', function () { initOptions: { bits: 512 } } }), { - skip: isNode ? null : { + skip: isNode ? [ + { + name: 'should publish message from string', + reason: 'not implemented' + } + ] : { reason: 'FIXME: disabled because no swarm addresses' } }) diff --git a/test/http-api/inject/resolve.js b/test/http-api/inject/resolve.js index 21baf6ba71..c228a06607 100644 --- a/test/http-api/inject/resolve.js +++ b/test/http-api/inject/resolve.js @@ -1,11 +1,9 @@ /* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ 'use strict' const expect = require('chai').expect const FormData = require('form-data') const streamToPromise = require('stream-to-promise') -const multibase = require('multibase') module.exports = (http) => { describe('resolve', () => { @@ -15,30 +13,6 @@ module.exports = (http) => { api = http.api._httpApi._apiServers[0] }) - it('should resolve a path and return a base2 encoded CID', async () => { - const form = new FormData() - form.append('data', Buffer.from('TEST' + Date.now())) - const headers = form.getHeaders() - - const payload = await streamToPromise(form) - let res = await api.inject({ - method: 'POST', - url: '/api/v0/add', - headers: headers, - payload: payload - }) - expect(res.statusCode).to.equal(200) - const hash = JSON.parse(res.result).Hash - - res = await api.inject({ - method: 'POST', - url: `/api/v0/resolve?arg=/ipfs/${hash}&cid-base=base2` - }) - - expect(res.statusCode).to.equal(200) - expect(multibase.isEncoded(res.result.Path.replace('/ipfs/', ''))).to.deep.equal('base2') - }) - it('should not resolve a path for invalid cid-base option', async () => { const form = new FormData() form.append('data', Buffer.from('TEST' + Date.now())) diff --git a/test/http-api/interface.js b/test/http-api/interface.js index 23b5916624..f0b2cbc1f8 100644 --- a/test/http-api/interface.js +++ b/test/http-api/interface.js @@ -91,23 +91,11 @@ describe('interface-ipfs-core over ipfs-http-client tests', () => { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. - createTeardown: () => cb => cb() - }), { - skip: [ - { - name: 'should resolve an IPNS DNS link', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - }, - { - name: 'should resolve IPNS link recursively', - reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - }, - { - name: 'should recursively resolve ipfs.io', - reason: 'TODO: ipfs.io dnslink=/ipns/website.ipfs.io & IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - } - ] - }) + createTeardown: () => cb => cb(), + spawnOptions: { + args: ['--pass ipfs-is-awesome-software', '--offline'] + } + })) tests.name(CommonFactory.create({ spawnOptions: { @@ -151,7 +139,14 @@ describe('interface-ipfs-core over ipfs-http-client tests', () => { args: ['--enable-pubsub'], initOptions: { bits: 512 } } - })) + }), { + skip: [ + { + name: 'should publish message from string', + reason: 'not implemented' + } + ] + }) tests.repo(defaultCommonFactory)