From d9569ab4364bdea407f97ee4beb22d21ef484d72 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 23 Jan 2020 11:27:27 +0000 Subject: [PATCH] refactor: remove Node.js and pull streams (#66) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: remove Node.js and pull streams Also changes `stat` and `ls` to return CID's not strings from core and removes the `long` option from `ls` in core (is applied by CLI/HTTP API). * fix: cid formatting in files.stat CLI and HTTP API * fix: revert to older dependencies 😢 * chore: eslint-disable-line require-await * fix: stream is objectMode * chore: rebase and fix tests Co-authored-by: Alex Potsides --- package.json | 9 ++-- src/cli/flush.js | 3 +- src/cli/ls.js | 66 ++++++++------------------- src/cli/read.js | 28 +++--------- src/cli/stat.js | 8 ++-- src/core/write.js | 8 ++-- src/http/flush.js | 4 +- src/http/ls.js | 30 ++++++------ src/http/read.js | 5 +- src/http/stat.js | 7 ++- test/cli/ls.js | 113 ++++++++++++++++++++++++++++------------------ test/cli/read.js | 23 +++++----- test/cli/stat.js | 8 ++-- test/http/ls.js | 93 +++++++------------------------------- test/http/read.js | 28 ++++-------- test/http/stat.js | 15 +++--- 16 files changed, 178 insertions(+), 270 deletions(-) diff --git a/package.json b/package.json index ee8b0b8..859ef07 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,6 @@ "ipfs-block-service": "~0.16.0", "ipfs-repo": "^0.30.1", "ipld": "~0.25.0", - "it-all": "^1.0.1", "memdown": "^5.1.0", "nyc": "^15.0.0", "sinon": "^8.0.4", @@ -75,16 +74,16 @@ "ipfs-unixfs": "^0.3.0", "ipfs-unixfs-exporter": "^0.41.0", "ipfs-unixfs-importer": "^0.44.0", - "ipfs-utils": "^0.4.2", + "ipfs-utils": "^0.7.0", "ipld-dag-pb": "^0.18.0", + "it-all": "^1.0.1", "it-last": "^1.0.1", + "it-to-stream": "^0.1.1", "it-pipe": "^1.0.1", "joi-browser": "^13.4.0", "mortice": "^2.0.0", "multicodec": "^1.0.0", - "multihashes": "^0.4.14", - "once": "^1.4.0", - "pull-stream": "^3.6.9" + "multihashes": "^0.4.14" }, "contributors": [ "Alan Shaw ", diff --git a/src/cli/flush.js b/src/cli/flush.js index 9fa587d..4da1f86 100644 --- a/src/cli/flush.js +++ b/src/cli/flush.js @@ -11,7 +11,6 @@ module.exports = { builder: { 'cid-base': { - default: 'base58btc', describe: 'CID base to use.' } }, @@ -28,7 +27,7 @@ module.exports = { const ipfs = await getIpfs() let cid = await ipfs.files.flush(path || FILE_SEPARATOR, {}) - if (cidBase !== 'base58btc' && cid.version === 0) { + if (cidBase && cidBase !== 'base58btc' && cid.version === 0) { cid = cid.toV1() } diff --git a/src/cli/ls.js b/src/cli/ls.js index c66cc77..863f40f 100644 --- a/src/cli/ls.js +++ b/src/cli/ls.js @@ -1,8 +1,6 @@ 'use strict' -const pull = require('pull-stream/pull') -const onEnd = require('pull-stream/sinks/on-end') -const through = require('pull-stream/throughs/through') +const all = require('it-all') const { asBoolean } = require('./utils') @@ -33,7 +31,6 @@ module.exports = { describe: 'Sort entries by name' }, 'cid-base': { - default: 'base58btc', describe: 'CID base to use.' } }, @@ -50,53 +47,30 @@ module.exports = { argv.resolve((async () => { const ipfs = await getIpfs() - return new Promise((resolve, reject) => { - if (sort) { - ipfs.files.ls(path || FILE_SEPARATOR) - .then(files => { - // https://github.com/ipfs/go-ipfs/issues/5181 - if (sort) { - files = files.sort((a, b) => { - return a.name.localeCompare(b.name) - }) - } - if (long) { - files.forEach(file => { - print(`${formatMode(file.mode, file.type === 1)}\t${formatMtime(file.mtime)}\t${file.name}\t${file.hash}\t${file.size}`) - }) - } else { - files.forEach(link => print(link.name)) - } + const printListing = file => { + if (long) { + print(`${formatMode(file.mode, file.type === 1)}\t${formatMtime(file.mtime)}\t${file.name}\t${file.cid.toString(cidBase)}\t${file.size}`) + } else { + print(file.name) + } + } - resolve() - }) - .catch(reject) + // https://github.com/ipfs/go-ipfs/issues/5181 + if (sort) { + let files = await all(ipfs.files.ls(path || FILE_SEPARATOR)) - return - } + files = files.sort((a, b) => { + return a.name.localeCompare(b.name) + }) - pull( - ipfs.files.lsPullStream(path, { - long, - cidBase - }), - through(file => { - if (long) { - print(`${formatMode(file.mode, file.type === 1)}\t${formatMtime(file.mtime)}\t${file.name}\t${file.hash}\t${file.size}`) - } else { - print(file.name) - } - }), - onEnd((error) => { - if (error) { - return reject(error) - } + files.forEach(printListing) + return + } - resolve() - }) - ) - }) + for await (const file of ipfs.files.ls(path || FILE_SEPARATOR)) { + printListing(file) + } })()) } } diff --git a/src/cli/read.js b/src/cli/read.js index a0cf1b3..512b9db 100644 --- a/src/cli/read.js +++ b/src/cli/read.js @@ -1,9 +1,5 @@ 'use strict' -const pull = require('pull-stream/pull') -const through = require('pull-stream/throughs/through') -const onEnd = require('pull-stream/sinks/on-end') - module.exports = { command: 'read ', @@ -34,24 +30,12 @@ module.exports = { argv.resolve((async () => { const ipfs = await getIpfs() - return new Promise((resolve, reject) => { - pull( - ipfs.files.readPullStream(path, { - offset, - length - }), - through(buffer => { - print(buffer, false) - }), - onEnd((error) => { - if (error) { - return reject(error) - } - - resolve() - }) - ) - }) + for await (const buffer of ipfs.files.read(path, { + offset, + length + })) { + print(buffer, false) + } })()) } } diff --git a/src/cli/stat.js b/src/cli/stat.js index 15f18ef..f0ea53b 100644 --- a/src/cli/stat.js +++ b/src/cli/stat.js @@ -46,7 +46,6 @@ Mtime: `, describe: 'Compute the amount of the dag that is local, and if possible the total size' }, 'cid-base': { - default: 'base58btc', describe: 'CID base to use.' } }, @@ -59,7 +58,8 @@ Mtime: `, format, hash, size, - withLocal + withLocal, + cidBase } = argv argv.resolve((async () => { @@ -70,7 +70,7 @@ Mtime: `, }) .then((stats) => { if (hash) { - return print(stats.hash) + return print(stats.cid.toString(cidBase)) } if (size) { @@ -78,7 +78,7 @@ Mtime: `, } print(format - .replace('', stats.hash) + .replace('', stats.cid.toString(cidBase)) .replace('', stats.size) .replace('', stats.cumulativeSize) .replace('', stats.blocks) diff --git a/src/core/write.js b/src/core/write.js index 2c0c237..5a10e36 100644 --- a/src/core/write.js +++ b/src/core/write.js @@ -160,7 +160,7 @@ const write = async (context, source, destination, options) => { limitAsyncStreamBytes(source, options.length) ) - const content = countBytesStreamed(catAsyncInterators(sources), (bytesWritten) => { + const content = countBytesStreamed(catAsyncIterators(sources), (bytesWritten) => { if (destination.unixfs && !options.truncate) { // if we've done reading from the new source and we are not going // to truncate the file, add the end of the existing file to the output @@ -254,11 +254,9 @@ const asyncZeroes = (count, chunkSize = MAX_CHUNK_SIZE) => { return limitAsyncStreamBytes(stream, count) } -const catAsyncInterators = async function * (sources) { +const catAsyncIterators = async function * (sources) { // eslint-disable-line require-await for (let i = 0; i < sources.length; i++) { - for await (const buf of sources[i]()) { - yield buf - } + yield * sources[i]() } } diff --git a/src/http/flush.js b/src/http/flush.js index 65dc11b..dda494d 100644 --- a/src/http/flush.js +++ b/src/http/flush.js @@ -20,7 +20,7 @@ const mfsFlush = { let cid = await ipfs.files.flush(arg || FILE_SEPARATOR, {}) - if (cidBase !== 'base58btc' && cid.version === 0) { + if (cidBase && cidBase !== 'base58btc' && cid.version === 0) { cid = cid.toV1() } @@ -36,7 +36,7 @@ const mfsFlush = { }, query: Joi.object().keys({ arg: Joi.string(), - cidBase: Joi.string().default('base58btc') + cidBase: Joi.string() }) } } diff --git a/src/http/ls.js b/src/http/ls.js index 5b00562..3da9257 100644 --- a/src/http/ls.js +++ b/src/http/ls.js @@ -4,13 +4,17 @@ const Joi = require('@hapi/joi') const { PassThrough } = require('stream') +const toStream = require('it-to-stream') +const all = require('it-all') + +const mapEntry = (entry, options) => { + options = options || {} -const mapEntry = (entry) => { const output = { Name: entry.name, - Type: entry.type, - Size: entry.size, - Hash: entry.hash, + Type: options.long ? entry.type : 0, + Size: options.long ? entry.size || 0 : 0, + Hash: entry.cid.toString(options.cidBase), Mode: entry.mode.toString(8).padStart(4, '0') } @@ -41,21 +45,18 @@ const mfsLs = { if (stream) { const responseStream = await new Promise((resolve, reject) => { - const readableStream = ipfs.files.lsReadableStream(arg, { - long, - cidBase - }) + const readableStream = toStream.readable(ipfs.files.ls(arg), { objectMode: true }) const passThrough = new PassThrough() readableStream.on('data', (entry) => { resolve(passThrough) - passThrough.write(JSON.stringify(mapEntry(entry)) + '\n') + passThrough.write(JSON.stringify(mapEntry(entry, { cidBase, long })) + '\n') }) readableStream.once('end', (entry) => { resolve(passThrough) - passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined) + passThrough.end(entry ? JSON.stringify(mapEntry(entry, { cidBase, long })) + '\n' : undefined) }) readableStream.once('error', (err) => { @@ -67,13 +68,10 @@ const mfsLs = { return h.response(responseStream).header('X-Stream-Output', '1') } - const files = await ipfs.files.ls(arg, { - long, - cidBase - }) + const files = await all(ipfs.files.ls(arg)) return h.response({ - Entries: files.map(mapEntry) + Entries: files.map(entry => mapEntry(entry, { cidBase, long })) }) }, options: { @@ -85,7 +83,7 @@ const mfsLs = { query: Joi.object().keys({ arg: Joi.string().default('/'), long: Joi.boolean().default(false), - cidBase: Joi.string().default('base58btc'), + cidBase: Joi.string(), stream: Joi.boolean().default(false) }) .rename('l', 'long', { diff --git a/src/http/read.js b/src/http/read.js index 6be1aee..c2e40d0 100644 --- a/src/http/read.js +++ b/src/http/read.js @@ -4,6 +4,7 @@ const Joi = require('@hapi/joi') const { PassThrough } = require('stream') +const toStream = require('it-to-stream') const mfsRead = { method: 'POST', @@ -19,10 +20,10 @@ const mfsRead = { } = request.query const responseStream = await new Promise((resolve, reject) => { - const stream = ipfs.files.readReadableStream(arg, { + const stream = toStream.readable(ipfs.files.read(arg, { offset, length - }) + })) stream.once('data', (chunk) => { const passThrough = new PassThrough() diff --git a/src/http/stat.js b/src/http/stat.js index d8b5563..08aee3b 100644 --- a/src/http/stat.js +++ b/src/http/stat.js @@ -20,15 +20,14 @@ const mfsStat = { const stats = await ipfs.files.stat(arg, { hash, size, - withLocal, - cidBase + withLocal }) return h.response({ Type: stats.type, Blocks: stats.blocks, Size: stats.size, - Hash: stats.hash, + Hash: stats.cid.toString(cidBase), CumulativeSize: stats.cumulativeSize, WithLocality: stats.withLocality, Local: stats.local, @@ -49,7 +48,7 @@ const mfsStat = { hash: Joi.boolean().default(false), size: Joi.boolean().default(false), withLocal: Joi.boolean().default(false), - cidBase: Joi.string().default('base58btc') + cidBase: Joi.string() }) } } diff --git a/test/cli/ls.js b/test/cli/ls.js index 478f7fe..cfdedf2 100644 --- a/test/cli/ls.js +++ b/test/cli/ls.js @@ -4,8 +4,9 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') -const values = require('pull-stream/sources/values') const isNode = require('detect-node') +const CID = require('cids') +const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') describe('ls', () => { if (!isNode) { @@ -20,7 +21,7 @@ describe('ls', () => { output = '' ipfs = { files: { - ls: sinon.stub().resolves([]) + ls: sinon.stub().returns([]) } } print = (msg = '', newline = true) => { @@ -50,148 +51,172 @@ describe('ls', () => { it('should list a path with details', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.ls = sinon.stub().resolves(files) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls --long /foo', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) - expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].cid.toString()) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) it('should list a path with details (short option)', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.ls = sinon.stub().resolves(files) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls -l /foo', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) - expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].cid.toString()) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) it('should list a path with details', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.ls = sinon.stub().resolves(files) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls --long /foo', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) - expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].cid.toString()) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) it('should list a path with details (short option)', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.ls = sinon.stub().resolves(files) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls -l /foo', { ipfs, print }) expect(ipfs.files.ls.callCount).to.equal(1) - expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].cid.toString()) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) it('should list a path without sorting', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls --sort false /foo', { ipfs, print }) - expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(ipfs.files.ls.callCount).to.equal(1) expect(output).to.include(files[0].name) }) it('should list a path without sorting (short option)', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls -s false /foo', { ipfs, print }) - expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(ipfs.files.ls.callCount).to.equal(1) expect(output).to.include(files[0].name) }) it('should list a path with details without sorting', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls --long --sort false /foo', { ipfs, print }) - expect(ipfs.files.lsPullStream.callCount).to.equal(1) - expect(output).to.include(files[0].hash) + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].cid.toString()) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) it('should list a path with details without sorting (short option)', async () => { const files = [{ - hash: 'file-name', + cid: fileCid, name: 'file-name', size: 'file-size', - mode: 'file-mode', - mtime: 'file-mtime' + mode: 0o755, + mtime: { + secs: Date.now() / 1000, + nsecs: 0 + } }] - ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + ipfs.files.ls = sinon.stub().returns(files) await cli('files ls -l -s false /foo', { ipfs, print }) - expect(ipfs.files.lsPullStream.callCount).to.equal(1) - expect(output).to.include(files[0].hash) + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].cid.toString()) expect(output).to.include(files[0].name) expect(output).to.include(files[0].size) }) diff --git a/test/cli/read.js b/test/cli/read.js index 3c5620f..34aa0e4 100644 --- a/test/cli/read.js +++ b/test/cli/read.js @@ -4,7 +4,6 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') -const values = require('pull-stream/sources/values') const isNode = require('detect-node') function defaultOptions (modification = {}) { @@ -34,7 +33,7 @@ describe('read', () => { output = '' ipfs = { files: { - readPullStream: sinon.stub().returns(values(['hello world'])) + read: sinon.stub().returns(['hello world']) } } print = (msg = '', newline = true) => { @@ -45,8 +44,8 @@ describe('read', () => { it('should read a path', async () => { await cli(`files read ${path}`, { ipfs, print }) - expect(ipfs.files.readPullStream.callCount).to.equal(1) - expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions() ]) @@ -58,8 +57,8 @@ describe('read', () => { await cli(`files read --offset ${offset} ${path}`, { ipfs, print }) - expect(ipfs.files.readPullStream.callCount).to.equal(1) - expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions({ offset @@ -73,8 +72,8 @@ describe('read', () => { await cli(`files read -o ${offset} ${path}`, { ipfs, print }) - expect(ipfs.files.readPullStream.callCount).to.equal(1) - expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, { offset, length: undefined @@ -88,8 +87,8 @@ describe('read', () => { await cli(`files read --length ${length} ${path}`, { ipfs, print }) - expect(ipfs.files.readPullStream.callCount).to.equal(1) - expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions({ length @@ -103,8 +102,8 @@ describe('read', () => { await cli(`files read -l ${length} ${path}`, { ipfs, print }) - expect(ipfs.files.readPullStream.callCount).to.equal(1) - expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions({ length diff --git a/test/cli/stat.js b/test/cli/stat.js index 7f3e441..20cded1 100644 --- a/test/cli/stat.js +++ b/test/cli/stat.js @@ -5,6 +5,8 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') const isNode = require('detect-node') +const CID = require('cids') +const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') function defaultOptions (modification = {}) { const options = { @@ -33,7 +35,7 @@ describe('stat', () => { ipfs = { files: { stat: sinon.stub().resolves({ - hash: 'stats-hash', + cid: fileCid, size: 'stats-size', cumulativeSize: 'stats-cumulativeSize', blocks: 'stats-blocks', @@ -93,7 +95,7 @@ describe('stat', () => { path, defaultOptions() ]) - expect(output).to.equal('stats-hash\n') + expect(output).to.equal(`${fileCid}\n`) }) it('should stat a path and only show hashes (short option)', async () => { @@ -104,7 +106,7 @@ describe('stat', () => { path, defaultOptions() ]) - expect(output).to.equal('stats-hash\n') + expect(output).to.equal(`${fileCid}\n`) }) it('should stat a path and only show sizes', async () => { diff --git a/test/http/ls.js b/test/http/ls.js index cb7c602..0d68d56 100644 --- a/test/http/ls.js +++ b/test/http/ls.js @@ -4,20 +4,8 @@ const expect = require('../helpers/chai') const http = require('../helpers/http') const sinon = require('sinon') -const PassThrough = require('stream').PassThrough - -function defaultOptions (modification = {}) { - const options = { - cidBase: 'base58btc', - long: false - } - - Object.keys(modification).forEach(key => { - options[key] = modification[key] - }) - - return options -} +const CID = require('cids') +const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') describe('ls', () => { const path = '/foo' @@ -25,7 +13,7 @@ describe('ls', () => { name: 'file-name', type: 'file-type', size: 'file-size', - hash: 'file-hash', + cid: fileCid, mode: 'file-mode', mtime: { secs: 'file-mtime-secs', @@ -37,13 +25,13 @@ describe('ls', () => { beforeEach(() => { ipfs = { files: { - ls: sinon.stub().resolves([]) + ls: sinon.stub().returns([]) } } }) it('should list a path', async () => { - ipfs.files.ls = sinon.stub().resolves([file]) + ipfs.files.ls = sinon.stub().returns([file]) const response = await http({ method: 'POST', @@ -52,14 +40,13 @@ describe('ls', () => { expect(ipfs.files.ls.callCount).to.equal(1) expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ - path, - defaultOptions() + path ]) expect(response).to.have.nested.property('result.Entries.length', 1) expect(response).to.have.nested.property('result.Entries[0].Name', file.name) - expect(response).to.have.nested.property('result.Entries[0].Type', file.type) - expect(response).to.have.nested.property('result.Entries[0].Size', file.size) - expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + expect(response).to.have.nested.property('result.Entries[0].Type', 0) + expect(response).to.have.nested.property('result.Entries[0].Size', 0) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.cid.toString()) }) it('should list without a path', async () => { @@ -70,8 +57,7 @@ describe('ls', () => { expect(ipfs.files.ls.callCount).to.equal(1) expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ - '/', - defaultOptions() + '/' ]) }) @@ -80,14 +66,14 @@ describe('ls', () => { name: 'file-name', type: 'file-type', size: 'file-size', - hash: 'file-hash', + cid: fileCid, mode: 'file-mode', mtime: { secs: 'file-mtime-secs', nsecs: 'file-mtime-nsecs' } } - ipfs.files.ls = sinon.stub().resolves([file]) + ipfs.files.ls = sinon.stub().returns([file]) const response = await http({ method: 'POST', @@ -96,74 +82,29 @@ describe('ls', () => { expect(ipfs.files.ls.callCount).to.equal(1) expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ - path, - defaultOptions({ - long: true - }) + path ]) + expect(response).to.have.nested.property('result.Entries.length', 1) expect(response).to.have.nested.property('result.Entries[0].Name', file.name) expect(response).to.have.nested.property('result.Entries[0].Type', file.type) expect(response).to.have.nested.property('result.Entries[0].Size', file.size) - expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.cid.toString()) expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime.secs) expect(response).to.have.nested.property('result.Entries[0].MtimeNsecs', file.mtime.nsecs) }) it('should stream a path', async () => { - const stream = new PassThrough({ - objectMode: true - }) - stream.emit('data', file) - stream.end() - ipfs.files.lsReadableStream = sinon.stub().returns(stream) + ipfs.files.ls = sinon.stub().returns([file]) await http({ method: 'POST', url: `/api/v0/files/ls?arg=${path}&stream=true` }, { ipfs }) - expect(ipfs.files.lsReadableStream.callCount).to.equal(1) - expect(ipfs.files.lsReadableStream.getCall(0).args).to.deep.equal([ - path, - defaultOptions() - ]) - }) - - it('should list a path with details', async () => { - const file = { - name: 'file-name', - type: 'file-type', - size: 'file-size', - hash: 'file-hash', - mode: 'file-mode', - mtime: { - secs: 'file-mtime-secs', - nsecs: 'file-mtime-nsecs' - } - } - ipfs.files.ls = sinon.stub().resolves([file]) - - const response = await http({ - method: 'POST', - url: `/api/v0/files/ls?arg=${path}&long=true` - }, { ipfs }) - expect(ipfs.files.ls.callCount).to.equal(1) expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ - path, - defaultOptions({ - long: true - }) + path ]) - - expect(response).to.have.nested.property('result.Entries.length', 1) - expect(response).to.have.nested.property('result.Entries[0].Name', file.name) - expect(response).to.have.nested.property('result.Entries[0].Type', file.type) - expect(response).to.have.nested.property('result.Entries[0].Size', file.size) - expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) - expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) - expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime.secs) - expect(response).to.have.nested.property('result.Entries[0].MtimeNsecs', file.mtime.nsecs) }) }) diff --git a/test/http/read.js b/test/http/read.js index ad747da..4f25525 100644 --- a/test/http/read.js +++ b/test/http/read.js @@ -4,7 +4,6 @@ const expect = require('../helpers/chai') const http = require('../helpers/http') const sinon = require('sinon') -const PassThrough = require('stream').PassThrough function defaultOptions (modification = {}) { const options = { @@ -26,16 +25,7 @@ describe('read', () => { beforeEach(() => { ipfs = { files: { - readReadableStream: sinon.stub().callsFake(() => { - const stream = new PassThrough() - - setImmediate(() => { - stream.emit('data', Buffer.from('hello world')) - stream.end() - }) - - return stream - }) + read: sinon.stub().returns([Buffer.from('hello world')]) } } }) @@ -46,8 +36,8 @@ describe('read', () => { url: `/api/v0/files/read?arg=${path}` }, { ipfs }) - expect(ipfs.files.readReadableStream.callCount).to.equal(1) - expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions() ]) @@ -61,8 +51,8 @@ describe('read', () => { url: `/api/v0/files/read?arg=${path}&offset=${offset}` }, { ipfs }) - expect(ipfs.files.readReadableStream.callCount).to.equal(1) - expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions({ offset @@ -78,8 +68,8 @@ describe('read', () => { url: `/api/v0/files/read?arg=${path}&length=${length}` }, { ipfs }) - expect(ipfs.files.readReadableStream.callCount).to.equal(1) - expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions({ length @@ -95,8 +85,8 @@ describe('read', () => { url: `/api/v0/files/read?arg=${path}&count=${length}` }, { ipfs }) - expect(ipfs.files.readReadableStream.callCount).to.equal(1) - expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + expect(ipfs.files.read.callCount).to.equal(1) + expect(ipfs.files.read.getCall(0).args).to.deep.equal([ path, defaultOptions({ length diff --git a/test/http/stat.js b/test/http/stat.js index c8536a0..d302895 100644 --- a/test/http/stat.js +++ b/test/http/stat.js @@ -4,13 +4,14 @@ const expect = require('../helpers/chai') const http = require('../helpers/http') const sinon = require('sinon') +const CID = require('cids') +const fileCid = new CID('bafybeigyov3nzxrqjismjpq7ghkkjorcmozy5rgaikvyieakoqpxfc3rvu') function defaultOptions (modification = {}) { const options = { withLocal: false, hash: false, - size: false, - cidBase: 'base58btc' + size: false } Object.keys(modification).forEach(key => { @@ -23,7 +24,7 @@ function defaultOptions (modification = {}) { describe('stat', () => { const path = '/foo' const stats = { - hash: 'stats-hash', + cid: fileCid, size: 'stats-size', cumulativeSize: 'stats-cumulativeSize', blocks: 'stats-blocks', @@ -83,7 +84,7 @@ describe('stat', () => { hash: true }) ]) - expect(response).to.have.nested.property('result.Hash', stats.hash) + expect(response).to.have.nested.property('result.Hash', stats.cid.toString()) }) it('should stat a path and only show sizes', async () => { @@ -111,10 +112,8 @@ describe('stat', () => { expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ path, - defaultOptions({ - cidBase: 'base64' - }) + defaultOptions() ]) - expect(response).to.have.nested.property('result.Hash', stats.hash) + expect(response).to.have.nested.property('result.Hash', stats.cid.toString('base64')) }) })