diff --git a/README.md b/README.md
index 727fc2147..19a1ca749 100644
--- a/README.md
+++ b/README.md
@@ -8,10 +8,8 @@
-
-
diff --git a/package.json b/package.json index 4d2d5dddc..d23c0a111 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,7 @@ "release-minor": "aegir release --type minor ", "release-major": "aegir release --type major ", "coverage": "aegir coverage --timeout 100000", - "coverage-publish": "aegir coverage --provider coveralls k--timeout 100000" + "coverage-publish": "aegir coverage --provider coveralls --timeout 100000" }, "dependencies": { "async": "^2.6.0", @@ -45,9 +45,13 @@ "peer-info": "~0.11.1", "promisify-es6": "^1.0.3", "pump": "^1.0.3", + "pull": "^2.1.1", + "pull-defer": "^0.2.2", + "pull-pushable": "^2.1.1", "qs": "^6.5.1", "readable-stream": "^2.3.3", "stream-http": "^2.7.2", + "stream-to-pull-stream": "^1.7.2", "streamifier": "^0.1.1", "tar-stream": "^1.5.5" }, @@ -65,7 +69,7 @@ "dirty-chai": "^2.0.1", "eslint-plugin-react": "^7.4.0", "gulp": "^3.9.1", - "interface-ipfs-core": "~0.35.0", + "interface-ipfs-core": "~0.36.4", "hapi": "^16.6.2", "ipfsd-ctl": "~0.24.1", "pre-commit": "^1.2.2", diff --git a/src/files/add-pull-stream.js b/src/files/add-pull-stream.js new file mode 100644 index 000000000..8c23abc23 --- /dev/null +++ b/src/files/add-pull-stream.js @@ -0,0 +1,30 @@ +'use strict' + +const addCmd = require('./add.js') +const pull = require('pull-stream') +const pushable = require('pull-pushable') + +module.exports = (send) => { + const add = addCmd(send) + + return (options) => { + options = options || {} + + const source = pushable() + const sink = pull.collect((err, tuples) => { + if (err) { return source.end(err) } + + add(tuples, options, (err, filesAdded) => { + if (err) { return source.end(err) } + + filesAdded.forEach((file) => source.push(file)) + source.end() + }) + }) + + return { + sink: sink, + source: source + } + } +} diff --git a/src/files/add-readable-stream.js b/src/files/add-readable-stream.js new file mode 100644 index 000000000..cb4364d1d --- /dev/null +++ b/src/files/add-readable-stream.js @@ -0,0 +1,31 @@ +'use strict' + +const addCmd = require('./add.js') +const Duplex = require('readable-stream').Duplex + +module.exports = (send) => { + const add = addCmd(send) + + return (options) => { + options = options || {} + + const tuples = [] + + const ds = new Duplex({ objectMode: true }) + ds._read = (n) => {} + + ds._write = (file, enc, next) => { + tuples.push(file) + next() + } + + ds.end = () => add(tuples, options, (err, res) => { + if (err) { return ds.emit('error', err) } + + res.forEach((tuple) => ds.push(tuple)) + ds.push(null) + }) + + return ds + } +} diff --git a/src/files/cat-pull-stream.js b/src/files/cat-pull-stream.js new file mode 100644 index 000000000..13cd06e07 --- /dev/null +++ b/src/files/cat-pull-stream.js @@ -0,0 +1,30 @@ +'use strict' + +const cleanCID = require('../utils/clean-cid') +const v = require('is-ipfs') +const toPull = require('stream-to-pull-stream') +const deferred = require('pull-defer') + +module.exports = (send) => { + return (hash, opts) => { + opts = opts || {} + + const p = deferred.source() + + try { + hash = cleanCID(hash) + } catch (err) { + if (!v.ipfsPath(hash)) { + return p.end(err) + } + } + + send({ path: 'cat', args: hash, buffer: opts.buffer }, (err, stream) => { + if (err) { return p.end(err) } + + p.resolve(toPull(stream)) + }) + + return p + } +} diff --git a/src/files/cat-readable-stream.js b/src/files/cat-readable-stream.js new file mode 100644 index 000000000..0ad80f919 --- /dev/null +++ b/src/files/cat-readable-stream.js @@ -0,0 +1,30 @@ +'use strict' + +const cleanCID = require('../utils/clean-cid') +const v = require('is-ipfs') +const Stream = require('readable-stream') +const pump = require('pump') + +module.exports = (send) => { + return (hash, opts) => { + opts = opts || {} + + const pt = new Stream.PassThrough() + + try { + hash = cleanCID(hash) + } catch (err) { + if (!v.ipfsPath(hash)) { + return pt.destroy(err) + } + } + + send({ path: 'cat', args: hash, buffer: opts.buffer }, (err, stream) => { + if (err) { return pt.destroy(err) } + + pump(stream, pt) + }) + + return pt + } +} diff --git a/src/files/cat.js b/src/files/cat.js index 2382c2a20..65046fb9b 100644 --- a/src/files/cat.js +++ b/src/files/cat.js @@ -3,6 +3,7 @@ const promisify = require('promisify-es6') const cleanCID = require('../utils/clean-cid') const v = require('is-ipfs') +const bl = require('bl') module.exports = (send) => { return promisify((hash, opts, callback) => { @@ -19,10 +20,14 @@ module.exports = (send) => { } } - send({ - path: 'cat', - args: hash, - buffer: opts.buffer - }, callback) + send({ path: 'cat', args: hash, buffer: opts.buffer }, (err, stream) => { + if (err) { return callback(err) } + + stream.pipe(bl((err, data) => { + if (err) { return callback(err) } + + callback(null, data) + })) + }) }) } diff --git a/src/files/create-add-stream.js b/src/files/create-add-stream.js deleted file mode 100644 index 47c178459..000000000 --- a/src/files/create-add-stream.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' - -const addCmd = require('./add.js') -const Duplex = require('stream').Duplex -const promisify = require('promisify-es6') - -module.exports = (send) => { - const add = addCmd(send) - - return promisify((options, callback) => { - if (typeof options === 'function') { - callback = options - options = undefined - } - - const tuples = [] - - const ds = new Duplex({ objectMode: true }) - ds._read = (n) => {} - - ds._write = (file, enc, next) => { - tuples.push(file) - next() - } - - ds.end = () => { - add(tuples, options, (err, res) => { - if (err) { return ds.emit('error', err) } - res.forEach((tuple) => ds.push(tuple)) - ds.push(null) - }) - } - callback(null, ds) - }) -} diff --git a/src/files/get-pull-stream.js b/src/files/get-pull-stream.js new file mode 100644 index 000000000..d86088e30 --- /dev/null +++ b/src/files/get-pull-stream.js @@ -0,0 +1,44 @@ +'use strict' + +const cleanCID = require('../utils/clean-cid') +const TarStreamToObjects = require('../utils/tar-stream-to-objects') +const v = require('is-ipfs') +const through = require('through2') +const pull = require('pull-stream') +const toPull = require('stream-to-pull-stream') +const deferred = require('pull-defer') + +module.exports = (send) => { + return (path, opts) => { + opts = opts || {} + + const p = deferred.source() + + try { + path = cleanCID(path) + } catch (err) { + if (!v.ipfsPath(path)) { + return p.end(err) + } + } + + const request = { path: 'get', args: path, qs: opts } + + // Convert the response stream to TarStream objects + send.andTransform(request, TarStreamToObjects, (err, stream) => { + if (err) { return p.end(err) } + + const files = [] + stream.pipe(through.obj((file, enc, next) => { + if (file.content) { + files.push({ path: file.path, content: toPull(file.content) }) + } else { + files.push(file) + } + next() + }, () => p.resolve(pull.values(files)))) + }) + + return p + } +} diff --git a/src/files/get-readable-stream.js b/src/files/get-readable-stream.js new file mode 100644 index 000000000..4c74dd0a7 --- /dev/null +++ b/src/files/get-readable-stream.js @@ -0,0 +1,34 @@ +'use strict' + +const cleanCID = require('../utils/clean-cid') +const TarStreamToObjects = require('../utils/tar-stream-to-objects') +const v = require('is-ipfs') +const Stream = require('readable-stream') +const pump = require('pump') + +module.exports = (send) => { + return (path, opts) => { + opts = opts || {} + + const pt = new Stream.PassThrough({objectMode: true}) + + try { + path = cleanCID(path) + } catch (err) { + if (!v.ipfsPath(path)) { + return pt.destroy(err) + } + } + + const request = { path: 'get', args: path, qs: opts } + + // Convert the response stream to TarStream objects + send.andTransform(request, TarStreamToObjects, (err, stream) => { + if (err) { return pt.destroy(err) } + + pump(stream, pt) + }) + + return pt + } +} diff --git a/src/files/get.js b/src/files/get.js index 056c46d01..6597db677 100644 --- a/src/files/get.js +++ b/src/files/get.js @@ -3,6 +3,8 @@ const promisify = require('promisify-es6') const cleanCID = require('../utils/clean-cid') const TarStreamToObjects = require('../utils/tar-stream-to-objects') +const concat = require('concat-stream') +const through = require('through2') const v = require('is-ipfs') module.exports = (send) => { @@ -14,8 +16,7 @@ module.exports = (send) => { // opts is the real callback -- // 'callback' is being injected by promisify - if (typeof opts === 'function' && - typeof callback === 'function') { + if (typeof opts === 'function' && typeof callback === 'function') { callback = opts opts = {} } @@ -28,13 +29,23 @@ module.exports = (send) => { } } - const request = { - path: 'get', - args: path, - qs: opts - } + const request = { path: 'get', args: path, qs: opts } // Convert the response stream to TarStream objects - send.andTransform(request, TarStreamToObjects, callback) + send.andTransform(request, TarStreamToObjects, (err, stream) => { + if (err) { return callback(err) } + + const files = [] + stream.pipe(through.obj((file, enc, next) => { + if (file.content) { + file.content.pipe(concat((content) => { + files.push({ path: file.path, content: content }) + })) + } else { + files.push(file) + } + next() + }, () => callback(null, files))) + }) }) } diff --git a/src/files/index.js b/src/files/index.js index 2c2927af4..4b9796998 100644 --- a/src/files/index.js +++ b/src/files/index.js @@ -7,14 +7,21 @@ module.exports = (arg) => { return { add: require('./add')(send), - createAddStream: require('./create-add-stream')(send), - get: require('./get')(send), + addReadableStream: require('./add-readable-stream')(send), + addPullStream: require('./add-pull-stream')(send), cat: require('./cat')(send), + catReadableStream: require('./cat-readable-stream')(send), + catPullStream: require('./cat-pull-stream')(send), + get: require('./get')(send), + getReadableStream: require('./get-readable-stream')(send), + getPullStream: require('./get-pull-stream')(send), + + // Specific to MFS (for now) cp: require('./cp')(send), - ls: require('./ls')(send), mkdir: require('./mkdir')(send), stat: require('./stat')(send), rm: require('./rm')(send), + ls: require('./ls')(send), read: require('./read')(send), write: require('./write')(send), mv: require('./mv')(send) diff --git a/src/ls-pull-stream.js b/src/ls-pull-stream.js new file mode 100644 index 000000000..eacb7e391 --- /dev/null +++ b/src/ls-pull-stream.js @@ -0,0 +1,64 @@ +'use strict' + +const moduleConfig = require('./utils/module-config') +const pull = require('pull-stream') +const deferred = require('pull-defer') + +module.exports = (arg) => { + const send = moduleConfig(arg) + + return (args, opts, callback) => { + if (typeof (opts) === 'function') { + callback = opts + opts = {} + } + + const p = deferred.source() + + send({ path: 'ls', args: args, qs: opts }, (err, results) => { + if (err) { + return callback(err) + } + + let result = results.Objects + if (!result) { + return callback(new Error('expected .Objects in results')) + } + + result = result[0] + if (!result) { + return callback(new Error('expected one array in results.Objects')) + } + + result = result.Links + if (!Array.isArray(result)) { + return callback(new Error('expected one array in results.Objects[0].Links')) + } + + result = result.map((link) => ({ + depth: 1, + name: link.Name, + path: args + '/' + link.Name, + size: link.Size, + hash: link.Hash, + type: typeOf(link) + })) + + p.resolve(pull.values(result)) + }) + + return p + } +} + +function typeOf (link) { + switch (link.Type) { + case 1: + case 5: + return 'dir' + case 2: + return 'file' + default: + return 'unknown' + } +} diff --git a/src/ls-readable-stream.js b/src/ls-readable-stream.js new file mode 100644 index 000000000..26005c5ee --- /dev/null +++ b/src/ls-readable-stream.js @@ -0,0 +1,62 @@ +'use strict' + +const moduleConfig = require('./utils/module-config') +const Stream = require('readable-stream') + +module.exports = (arg) => { + const send = moduleConfig(arg) + + return (args, opts, callback) => { + if (typeof (opts) === 'function') { + callback = opts + opts = {} + } + + const pt = new Stream.PassThrough({objectMode: true}) + + send({ path: 'ls', args: args, qs: opts }, (err, results) => { + if (err) { return callback(err) } + + let result = results.Objects + if (!result) { + return callback(new Error('expected .Objects in results')) + } + + result = result[0] + if (!result) { + return callback(new Error('expected one array in results.Objects')) + } + + result = result.Links + if (!Array.isArray(result)) { + return callback(new Error('expected one array in results.Objects[0].Links')) + } + + result = result.map((link) => ({ + depth: 1, + name: link.Name, + path: args + '/' + link.Name, + size: link.Size, + hash: link.Hash, + type: typeOf(link) + })) + + result.forEach((item) => pt.write(item)) + pt.end() + }) + + return pt + } +} + +function typeOf (link) { + switch (link.Type) { + case 1: + case 5: + return 'dir' + case 2: + return 'file' + default: + return 'unknown' + } +} diff --git a/src/utils/load-commands.js b/src/utils/load-commands.js index 860a8f7f1..ff46271c9 100644 --- a/src/utils/load-commands.js +++ b/src/utils/load-commands.js @@ -2,10 +2,20 @@ function requireCommands () { const cmds = { - // add and createAddStream alias + // Files (not MFS) add: require('../files/add'), + addReadableStream: require('../files/add-readable-stream'), + addPullStream: require('../files/add-pull-stream'), cat: require('../files/cat'), - createAddStream: require('../files/create-add-stream'), + catReadableStream: require('../files/cat-readable-stream'), + catPullStream: require('../files/cat-pull-stream'), + get: require('../files/get'), + getReadableStream: require('../files/get-readable-stream'), + getPullStream: require('../files/get-pull-stream'), + ls: require('../ls'), + lsReadableStream: require('../ls-readable-stream'), + lsPullStream: require('../ls-pull-stream'), + bitswap: require('../bitswap'), block: require('../block'), bootstrap: require('../bootstrap'), @@ -15,9 +25,7 @@ function requireCommands () { diag: require('../diag'), id: require('../id'), key: require('../key'), - get: require('../files/get'), log: require('../log'), - ls: require('../ls'), mount: require('../mount'), name: require('../name'), object: require('../object'), diff --git a/test/add.spec.js b/test/add.spec.js deleted file mode 100644 index 68f9fd529..000000000 --- a/test/add.spec.js +++ /dev/null @@ -1,40 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 8] */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const isNode = require('detect-node') -const path = require('path') - -const FactoryClient = require('./ipfs-factory/client') - -describe('.add (extra tests)', () => { - if (!isNode) { return } - - let ipfs - let fc - - before(function (done) { - this.timeout(20 * 1000) // slow CI - fc = new FactoryClient() - fc.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - - after((done) => fc.dismantle(done)) - - it('add by path fails', (done) => { - const validPath = path.join(process.cwd() + '/package.json') - - ipfs.files.add(validPath, (err, res) => { - expect(err).to.exist() - done() - }) - }) -}) diff --git a/test/get.spec.js b/test/get.spec.js index f401235d1..008905bba 100644 --- a/test/get.spec.js +++ b/test/get.spec.js @@ -8,212 +8,105 @@ const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) const isNode = require('detect-node') -const fs = require('fs') -const concat = require('concat-stream') -const through = require('through2') -const streamEqual = require('stream-equal') -const path = require('path') +const series = require('async/series') const loadFixture = require('aegir/fixtures') const FactoryClient = require('./ipfs-factory/client') -const testfile = loadFixture(__dirname, '/fixtures/testfile.txt') -let testfileBig -let tfbPath +describe('.get (specific go-ipfs features)', function () { + this.timeout(20 * 1000) -if (isNode) { - tfbPath = path.join(__dirname, '/fixtures/15mb.random') - testfileBig = fs.createReadStream(tfbPath, { bufferSize: 128 }) -} + function fixture (path) { + return loadFixture(__dirname, path, 'interface-ipfs-core') + } -describe('.get', function () { - this.timeout(80 * 1000) + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: fixture('../test/fixtures/testfile.txt') + } let ipfs let fc before((done) => { fc = new FactoryClient() - fc.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - - after((done) => fc.dismantle(done)) - - describe('Callback API', () => { - this.timeout(80 * 1000) - - it('add file for testing', (done) => { - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - ipfs.files.add(testfile, (err, res) => { - expect(err).to.not.exist() - - expect(res).to.have.length(1) - expect(res[0].hash).to.equal(expectedMultihash) - expect(res[0].path).to.equal(expectedMultihash) - done() - }) - }) - it('get with no compression args', (done) => { - ipfs.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', (err, res) => { + series([ + (cb) => fc.spawnNode((err, node) => { expect(err).to.not.exist() + ipfs = node + cb() + }), + (cb) => ipfs.files.add(smallFile.data, cb) + ], done) + }) - // accumulate the files and their content - var files = [] - - res.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain(testfile.toString()) - done() - })) - }) - }) + after((done) => fc.dismantle(done)) - it('get with archive true', (done) => { - ipfs.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', { - archive: true - }, (err, res) => { - expect(err).to.not.exist() + it('no compression args', (done) => { + ipfs.get(smallFile.cid, (err, files) => { + expect(err).to.not.exist() - // accumulate the files and their content - var files = [] - - res.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain(testfile.toString()) - done() - })) - }) + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(smallFile.data.toString()) + done() }) + }) - it('get err with out of range compression level', (done) => { - ipfs.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', { - compress: true, - 'compression-level': 10 - }, (err, res) => { - expect(err).to.exist() - expect(err.toString()).to.equal('Error: Compression level must be between 1 and 9') - done() - }) - }) + it('archive true', (done) => { + ipfs.get(smallFile.cid, { archive: true }, (err, files) => { + expect(err).to.not.exist() - it('get with compression level', (done) => { - ipfs.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', { - compress: true, - 'compression-level': 1 - }, (err, res) => { - expect(err).to.not.exist() - done() - }) + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(smallFile.data.toString()) + done() }) + }) - it('add a BIG file (for testing get)', (done) => { - if (!isNode) { return done() } - - const bigFile = fs.readFileSync(tfbPath) - const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - - ipfs.files.add(bigFile, (err, res) => { - expect(err).to.not.exist() - - expect(res).to.have.length(1) - expect(res[0].path).to.equal(expectedMultihash) - expect(res[0].hash).to.equal(expectedMultihash) - done() - }) + it('err with out of range compression level', (done) => { + ipfs.get(smallFile.cid, { + compress: true, + 'compression-level': 10 + }, (err, files) => { + expect(err).to.exist() + expect(err.toString()).to.equal('Error: Compression level must be between 1 and 9') + done() }) + }) - it('get BIG file', (done) => { - if (!isNode) { return done() } + it('with compression level', (done) => { + ipfs.get(smallFile.cid, { compress: true, 'compression-level': 1 }, done) + }) - ipfs.get('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, files) => { - expect(err).to.not.exist() + it('add path containing "+"s (for testing get)', (done) => { + if (!isNode) { return done() } - files.on('data', (file) => { - // Do not blow out the memory of nodejs :) - streamEqual(file.content, testfileBig, (err, equal) => { - expect(err).to.not.exist() - expect(equal).to.equal(true) - done() - }) - }) - }) + const filename = 'ti,c64x+mega++mod-pic.txt' + const subdir = 'tmp/c++files' + const expectedCid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' + ipfs.add([{ + path: subdir + '/' + filename, + content: Buffer.from(subdir + '/' + filename, 'utf-8') + }], (err, files) => { + expect(err).to.not.exist() + expect(files[2].hash).to.equal(expectedCid) + done() }) + }) - it('add path containing "+"s (for testing get)', (done) => { - if (!isNode) { return done() } - const filename = 'ti,c64x+mega++mod-pic.txt' - const subdir = 'tmp/c++files' - const expectedMultihash = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' - ipfs.files.add([{ - path: subdir + '/' + filename, - content: Buffer.from(subdir + '/' + filename, 'utf-8') - }], (err, res) => { - if (err) done(err) - expect(res[2].hash).to.equal(expectedMultihash) - done() - }) - }) + it('get path containing "+"s', (done) => { + if (!isNode) { return done() } - it('get path containing "+"s', (done) => { - if (!isNode) { return done() } - const multihash = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' - let count = 0 - ipfs.get(multihash, (err, files) => { - expect(err).to.not.exist() - files.on('data', (file) => { - if (file.path !== multihash) { - count++ - expect(file.path).to.contain('+') - if (count === 2) done() - } - }) + const cid = 'QmPkmARcqjo5fqK1V1o8cFsuaXxWYsnwCNLJUYS4KeZyff' + let count = 0 + ipfs.get(cid, (err, files) => { + expect(err).to.not.exist() + files.forEach((file) => { + if (file.path !== cid) { + count++ + expect(file.path).to.contain('+') + if (count === 2) done() + } }) }) }) - - describe('Promise API', () => { - this.timeout(80 * 1000) - - it('get', (done) => { - ipfs.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - .then((files) => { - files.on('data', (file) => { - let buf = '' - file.content - .on('error', (err) => expect(err).to.not.exist()) - .on('data', (data) => { - buf += data.toString() - }) - .on('end', () => { - expect(buf).to.contain(testfile.toString()) - done() - }) - }) - }) - .catch((err) => { - expect(err).to.not.exist() - }) - }) - }) }) diff --git a/test/pin.spec.js b/test/interface/pin.spec.js similarity index 85% rename from test/pin.spec.js rename to test/interface/pin.spec.js index fbffe385d..b841568ac 100644 --- a/test/pin.spec.js +++ b/test/interface/pin.spec.js @@ -3,7 +3,7 @@ 'use strict' const test = require('interface-ipfs-core') -const FactoryClient = require('./ipfs-factory/client') +const FactoryClient = require('../ipfs-factory/client') let fc diff --git a/test/ls.spec.js b/test/ls.spec.js deleted file mode 100644 index 4cbf845f4..000000000 --- a/test/ls.spec.js +++ /dev/null @@ -1,71 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const isNode = require('detect-node') -const waterfall = require('async/waterfall') -const path = require('path') - -const FactoryClient = require('./ipfs-factory/client') - -describe('.ls', function () { - this.timeout(50 * 1000) - - if (!isNode) { return } - - let ipfs - let fc - let folder - - before((done) => { - fc = new FactoryClient() - waterfall([ - (cb) => fc.spawnNode(cb), - (node, cb) => { - ipfs = node - const filesPath = path.join(__dirname, '/fixtures/test-folder') - ipfs.util.addFromFs(filesPath, { recursive: true }, cb) - }, - (hashes, cb) => { - folder = hashes[hashes.length - 1].hash - expect(folder).to.be.eql('QmQao3KNcpCsdXaLGpjieFGMfXzsSXgsf6Rnc5dJJA3QMh') - cb() - } - ], done) - }) - - after((done) => fc.dismantle(done)) - - describe('Callback API', () => { - it('should correctly handle a nonexist()ing hash', function (done) { - ipfs.ls('surelynotavalidhashheh?', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) - }) - - it('should correctly handle a nonexist()ing path', function (done) { - ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) - }) - }) - - describe('Promises API', () => { - it('should correctly handle a nonexist()ing hash', () => { - return ipfs.ls('surelynotavalidhashheh?') - .catch((err) => expect(err).to.exist()) - }) - - it('should correctly handle a nonexist()ing path', () => { - return ipfs.ls('QmRNjDeKStKGTQXnJ3NFqeQ9oW/folder_that_isnt_there') - .catch((err) => expect(err).to.exist()) - }) - }) -}) diff --git a/test/sub-modules.spec.js b/test/sub-modules.spec.js index bd6d00c66..a74ac1234 100644 --- a/test/sub-modules.spec.js +++ b/test/sub-modules.spec.js @@ -157,9 +157,14 @@ describe('submodules', () => { const files = require('../src/files')(config) expect(files.add).to.be.a('function') - expect(files.createAddStream).to.be.a('function') + expect(files.addReadableStream).to.be.a('function') + expect(files.addPullStream).to.be.a('function') expect(files.get).to.be.a('function') + expect(files.getReadableStream).to.be.a('function') + expect(files.getPullStream).to.be.a('function') expect(files.cat).to.be.a('function') + expect(files.catReadableStream).to.be.a('function') + expect(files.catPullStream).to.be.a('function') expect(files.cp).to.be.a('function') expect(files.ls).to.be.a('function') expect(files.mkdir).to.be.a('function') diff --git a/test/util.spec.js b/test/util.spec.js index 20743057f..e0a43d7d0 100644 --- a/test/util.spec.js +++ b/test/util.spec.js @@ -19,6 +19,7 @@ describe('.util', () => { before(function (done) { this.timeout(20 * 1000) // slow CI + fc = new FactoryClient() fc.spawnNode((err, node) => { expect(err).to.not.exist() @@ -29,93 +30,91 @@ describe('.util', () => { after((done) => fc.dismantle(done)) - describe('Callback API', () => { - it('.streamAdd', (done) => { - const tfpath = path.join(__dirname, '/fixtures/testfile.txt') - const rs = fs.createReadStream(tfpath) - rs.path = '' // clean the path for testing purposes + it('.streamAdd', (done) => { + const tfpath = path.join(__dirname, '/fixtures/testfile.txt') + const rs = fs.createReadStream(tfpath) + rs.path = '' // clean the path for testing purposes + + ipfs.util.addFromStream(rs, (err, result) => { + expect(err).to.not.exist() + expect(result.length).to.equal(1) + done() + }) + }) - ipfs.util.addFromStream(rs, (err, result) => { + describe('.fsAdd should add', () => { + it('a directory', (done) => { + const filesPath = path.join(__dirname, '/fixtures/test-folder') + ipfs.util.addFromFs(filesPath, { recursive: true }, (err, result) => { expect(err).to.not.exist() - expect(result.length).to.equal(1) + expect(result.length).to.be.above(8) done() }) }) - describe('.fsAdd should add', () => { - it('a directory', (done) => { - const filesPath = path.join(__dirname, '/fixtures/test-folder') - ipfs.util.addFromFs(filesPath, { recursive: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(8) - done() - }) - }) - - it('a directory with an odd name', (done) => { - const filesPath = path.join(__dirname, '/fixtures/weird name folder [v0]') - ipfs.util.addFromFs(filesPath, { recursive: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(8) - done() - }) - }) - - it('add and ignore a directory', (done) => { - const filesPath = path.join(__dirname, '/fixtures/test-folder') - ipfs.util.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.below(9) - done() - }) - }) - - it('a file', (done) => { - const filePath = path.join(__dirname, '/fixtures/testfile.txt') - ipfs.util.addFromFs(filePath, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.equal(1) - expect(result[0].path).to.be.equal('testfile.txt') - done() - }) - }) - - it('a hidden file in a directory', (done) => { - const filesPath = path.join(__dirname, '/fixtures/test-folder') - ipfs.util.addFromFs(filesPath, { recursive: true, hidden: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(10) - expect(result.map(object => object.path)).to.include('test-folder/.hiddenTest.txt') - expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') - done() - }) + it('a directory with an odd name', (done) => { + const filesPath = path.join(__dirname, '/fixtures/weird name folder [v0]') + ipfs.util.addFromFs(filesPath, { recursive: true }, (err, result) => { + expect(err).to.not.exist() + expect(result.length).to.be.above(8) + done() }) }) - it('.urlAdd http', (done) => { - ipfs.util.addFromURL('http://example.com/', (err, result) => { + it('add and ignore a directory', (done) => { + const filesPath = path.join(__dirname, '/fixtures/test-folder') + ipfs.util.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] }, (err, result) => { expect(err).to.not.exist() - expect(result.length).to.equal(1) + expect(result.length).to.be.below(9) done() }) }) - it('.urlAdd https', (done) => { - ipfs.util.addFromURL('https://example.com/', (err, result) => { + it('a file', (done) => { + const filePath = path.join(__dirname, '/fixtures/testfile.txt') + ipfs.util.addFromFs(filePath, (err, result) => { expect(err).to.not.exist() - expect(result.length).to.equal(1) + expect(result.length).to.be.equal(1) + expect(result[0].path).to.be.equal('testfile.txt') done() }) }) - it('.urlAdd http with redirection', (done) => { - ipfs.util.addFromURL('http://covers.openlibrary.org/book/id/969165.jpg', (err, result) => { + it('a hidden file in a directory', (done) => { + const filesPath = path.join(__dirname, '/fixtures/test-folder') + ipfs.util.addFromFs(filesPath, { recursive: true, hidden: true }, (err, result) => { expect(err).to.not.exist() - expect(result[0].hash).to.equal('QmaL9zy7YUfvWmtD5ZXp42buP7P4xmZJWFkm78p8FJqgjg') + expect(result.length).to.be.above(10) + expect(result.map(object => object.path)).to.include('test-folder/.hiddenTest.txt') + expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') done() }) }) }) - describe('Promise API', () => {}) + it('.urlAdd http', function (done) { + this.timeout(20 * 1000) + + ipfs.util.addFromURL('http://example.com/', (err, result) => { + expect(err).to.not.exist() + expect(result.length).to.equal(1) + done() + }) + }) + + it('.urlAdd https', (done) => { + ipfs.util.addFromURL('https://example.com/', (err, result) => { + expect(err).to.not.exist() + expect(result.length).to.equal(1) + done() + }) + }) + + it('.urlAdd http with redirection', (done) => { + ipfs.util.addFromURL('http://covers.openlibrary.org/book/id/969165.jpg', (err, result) => { + expect(err).to.not.exist() + expect(result[0].hash).to.equal('QmaL9zy7YUfvWmtD5ZXp42buP7P4xmZJWFkm78p8FJqgjg') + done() + }) + }) })