diff --git a/.aegir.js b/.aegir.js new file mode 100644 index 00000000..827b37d2 --- /dev/null +++ b/.aegir.js @@ -0,0 +1,36 @@ +'use strict' + +const parallel = require('async/parallel') +const ads = require('./test/utils/another-daemon-spawner') +const js = ads.spawnJsNode +const go = ads.spawnGoNode +const stop = ads.stopNodes + +/* + * spawns a daemon with ports numbers starting in 10 and ending in `num` + */ +function start (done) { + const base = '/ip4/127.0.0.1/tcp' + parallel([ + (cb) => go([`${base}/10027`, `${base}/20027/ws`], true, 33027, 44027, cb), + (cb) => go([`${base}/10028`, `${base}/20028/ws`], true, 33028, 44028, cb), + (cb) => go([`${base}/10031`, `${base}/20031/ws`], true, 33031, 44031, cb), + (cb) => go([`${base}/10032`, `${base}/20032/ws`], true, 33032, 44032, cb) + ], done) +} + +module.exports = { + karma: { + files: [{ + pattern: 'node_modules/interface-ipfs-core/test/fixtures/**/*', + watched: false, + served: true, + included: false, + singleRun: false + }] + }, + hooks: { + pre: start, + post: stop + } +} diff --git a/.gitignore b/.gitignore index e920c167..18ddbab6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,13 @@ +**/node_modules/ +**/*.log +test/repo-tests* +**/bundle.js +docs # Logs logs *.log -npm-debug.log* + +coverage # Runtime data pids @@ -20,14 +26,17 @@ coverage # node-waf configuration .lock-wscript -# Compiled binary addons (http://nodejs.org/api/addons.html) -build/Release +build # Dependency directory +# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git node_modules -# Optional npm cache directory -.npm +lib +dist +test/test-data/go-ipfs-repo/LOCK +test/test-data/go-ipfs-repo/LOG +test/test-data/go-ipfs-repo/LOG.old -# Optional REPL history -.node_repl_history +# while testing npm5 +package-lock.json diff --git a/.npmignore b/.npmignore new file mode 100644 index 00000000..59335fda --- /dev/null +++ b/.npmignore @@ -0,0 +1,34 @@ +**/node_modules/ +**/*.log +test/repo-tests* + +# Logs +logs +*.log + +coverage + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# node-waf configuration +.lock-wscript + +build + +# Dependency directory +# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git +node_modules + +test diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..8253ebb9 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,26 @@ +sudo: false +language: node_js + +matrix: + include: + - node_js: 6 + env: CXX=g++-4.8 + - node_js: 8 + env: CXX=g++-4.8 + +script: + - npm run lint + - npm run test + - make test + +before_script: + - export DISPLAY=:99.0 + - sh -e /etc/init.d/xvfb start + +addons: + firefox: 'latest' + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-4.8 diff --git a/README.md b/README.md index 94f7cd1c..8abfed18 100644 --- a/README.md +++ b/README.md @@ -4,11 +4,51 @@ [![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) [![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) [![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) +[![circle](https://circleci.com/gh/ipfs/interop/tree/master.svg?style=svg)](https://circleci.com/gh/ipfs/interop/tree/master) +[![travis](https://travis-ci.org/ipfs/interop.svg?branch=port-tests)](https://travis-ci.org/ipfs/interop) +[![appveyor](https://ci.appveyor.com/api/projects/status/fvth3vq3h0rd9rf5/branch/master?svg=true)](https://ci.appveyor.com/project/wubalubadubdub/interop/branch/master) > Interoperability tests for IPFS Implementations This repository will be used for interop tests. Please jump into the issues if you'd like to help out setting this up! +## Usage + +### Install + +``` +> git clone git@github.com:ipfs/interop.git +> cd interop +> npm install +``` + +### Run the tests + +``` +> npm test +``` + +### Test with a non yet released version of js-ipfs + +``` +# Do the steps in the install section, then +> cd .. +> git clone git@github.com:ipfs/js-ipfs.git +> cd js-ipfs +> npm install +> npm link +> cd ../interop +> npm link ipfs +> npm test +``` + +### Test with a non yet released version of go-ipfs + +``` +> Do the steps in the install section, then +> IPFS_EXEC= npm test +``` + ## Contribute Feel free to join in. All welcome. Open an [issue](https://github.com/ipfs/ipfs-interop/issues)! @@ -19,4 +59,4 @@ This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/c ## License -MIT +[MIT](./LICENSE) diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 00000000..58aef650 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,28 @@ +version: "{build}" + +environment: + matrix: + - nodejs_version: "6" + - nodejs_version: "8" + +matrix: + fast_finish: true + +install: + # Install Node.js + - ps: Install-Product node $env:nodejs_version + + # Upgrade npm + - npm install -g npm + + # Output our current versions for debugging + - node --version + - npm --version + + # Install our package dependencies + - npm install + +test_script: + - npm run test:node + +build: off diff --git a/circle.yml b/circle.yml new file mode 100644 index 00000000..56f7efbe --- /dev/null +++ b/circle.yml @@ -0,0 +1,14 @@ +machine: + node: + version: stable + +dependencies: + pre: + - google-chrome --version + - curl -L -o google-chrome.deb https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb + - sudo dpkg -i google-chrome.deb || true + - sudo apt-get update + - sudo apt-get install -f + - sudo apt-get install --only-upgrade lsb-base + - sudo dpkg -i google-chrome.deb + - google-chrome --version diff --git a/package.json b/package.json new file mode 100644 index 00000000..9e9eddd9 --- /dev/null +++ b/package.json @@ -0,0 +1,71 @@ +{ + "name": "interop-ipfs", + "version": "0.0.0", + "description": "Interoperability Tests for IPFS", + "main": "", + "browser": { + "./src/core/components/init-assets.js": false, + "./src/core/runtime/config-nodejs.json": "./src/core/runtime/config-browser.json", + "./src/core/runtime/libp2p-nodejs.js": "./src/core/runtime/libp2p-browser.js", + "./src/core/runtime/repo-nodejs.js": "./src/core/runtime/repo-browser.js", + "./test/utils/create-repo-nodejs.js": "./test/utils/create-repo-browser.js", + "stream": "readable-stream" + }, + "engines": { + "node": ">=6.0.0", + "npm": ">=3.0.0" + }, + "scripts": { + "lint": "aegir lint", + "test": "aegir test -t node", + "test:nodejs": "aegir test -t node -f test/node.js", + "test:browser": "aegir test -t browser -f test/browser.js" + }, + "pre-commit": [ + "lint" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/ipfs/interop.git" + }, + "keywords": [ + "IPFS" + ], + "author": "David Dias ", + "license": "MIT", + "bugs": { + "url": "https://github.com/ipfs/interop/issues" + }, + "homepage": "https://github.com/ipfs/interop#readme", + "devDependencies": { + "aegir": "^12.2.0", + "async": "^2.6.0", + "bl": "^1.2.1", + "bs58": "^4.0.1", + "buffer-loader": "0.0.1", + "chai": "^4.1.2", + "cids": "^0.5.2", + "detect-node": "^2.0.3", + "dir-compare": "^1.4.0", + "dirty-chai": "^2.0.1", + "eslint-plugin-react": "^7.5.1", + "expose-loader": "^0.7.4", + "form-data": "^2.3.1", + "hat": "0.0.3", + "ipfs": "^0.27.5", + "ipfs-api": "^17.2.4", + "ipfsd-ctl": "~0.26.0", + "left-pad": "^1.2.0", + "lodash": "^4.17.4", + "mocha": "^4.0.1", + "ncp": "^2.0.0", + "pre-commit": "^1.2.2", + "pretty-bytes": "^4.0.2", + "random-fs": "^1.0.3", + "rimraf": "^2.6.2", + "stream-to-promise": "^2.2.0", + "transform-loader": "^0.2.4" + }, + "dependencies": {}, + "contributors": [] +} diff --git a/test/.gitignore b/test/.gitignore new file mode 100644 index 00000000..ae3c1726 --- /dev/null +++ b/test/.gitignore @@ -0,0 +1 @@ +/bin/ diff --git a/test/browser.js b/test/browser.js new file mode 100644 index 00000000..40a2a297 --- /dev/null +++ b/test/browser.js @@ -0,0 +1,10 @@ +/* eslint-env mocha */ +'use strict' + +describe('browser interop tests', () => { + it('need to get written', function (done) { + this.timeout(10 * 1000) + // for teardown time + setTimeout(done, 5 * 1000) + }) +}) diff --git a/test/circuit-relay.js b/test/circuit-relay.js new file mode 100644 index 00000000..3c60bfbb --- /dev/null +++ b/test/circuit-relay.js @@ -0,0 +1,167 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const parallel = require('async/parallel') +const series = require('async/series') +const bl = require('bl') +const waterfall = require('async/waterfall') +const multiaddr = require('multiaddr') +const crypto = require('crypto') + +const ads = require('./utils/another-daemon-spawner') +const js = ads.spawnJsNode +const go = ads.spawnGoNode +const stop = ads.stopNodes + +describe.skip('circuit interop', () => { + let jsTCP + let jsTCPAddrs + let jsWS + let jsWSAddrs + let jsRelayAddrs + + let goRelayAddrs + + let goTCPAddrs + let goTCP + + let goWSAddrs + let goWS + + beforeEach((done) => { + const base = '/ip4/127.0.0.1/tcp' + + parallel([ + (cb) => js([`${base}/61454/ws`, `${base}/61453`], true, cb), + (cb) => js([`${base}/9002`], cb), + (cb) => js([`${base}/9003/ws`], cb), + (cb) => go([`${base}/0/ws`, `${base}/0`], true, cb), + (cb) => go([`${base}/0`], cb), + (cb) => go([`${base}/0/ws`], cb) + ], (err, nodes) => { + expect(err).to.not.exist() + + jsRelayAddrs = nodes[0][1].map((a) => a.toString()).filter((a) => !a.includes('/p2p-circuit')) + jsTCP = nodes[1][0] + jsTCPAddrs = nodes[1][1].map((a) => a.toString()).filter((a) => a.includes('/p2p-circuit')) + jsWS = nodes[2][0] + jsWSAddrs = nodes[2][1].map((a) => a.toString()).filter((a) => a.includes('/p2p-circuit')) + + goRelayAddrs = nodes[3][1] + goTCP = nodes[4][0].api + goTCPAddrs = nodes[4][1] + goWS = nodes[5][0].api + goWSAddrs = nodes[5][1] + done() + }) + }) + + afterEach(() => stop()) + + it('jsWS <-> jsRelay <-> jsTCP', (done) => { + const data = crypto.randomBytes(128) + series([ + (cb) => jsWS.swarm.connect(jsRelayAddrs[0], cb), + (cb) => jsTCP.swarm.connect(jsRelayAddrs[1], cb), + (cb) => setTimeout(cb, 1000), + (cb) => jsTCP.swarm.connect(jsWSAddrs[0], cb) + ], (err) => { + expect(err).to.not.exist() + waterfall([ + (cb) => jsTCP.files.add(data, cb), + (res, cb) => jsWS.files.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], done) + }) + }) + + it('goWS <-> jsRelay <-> goTCP', (done) => { + const data = crypto.randomBytes(128) + series([ + (cb) => goWS.swarm.connect(jsRelayAddrs[0], cb), + (cb) => goTCP.swarm.connect(jsRelayAddrs[1], cb), + (cb) => setTimeout(cb, 1000), + (cb) => goTCP.swarm.connect(`/p2p-circuit/ipfs/${multiaddr(goWSAddrs[0]).getPeerId()}`, cb) + ], (err) => { + expect(err).to.not.exist() + waterfall([ + (cb) => goTCP.files.add(data, cb), + (res, cb) => goWS.files.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], done) + }) + }) + + it('jsWS <-> jsRelay <-> goTCP', (done) => { + const data = crypto.randomBytes(128) + series([ + (cb) => jsWS.swarm.connect(jsRelayAddrs[0], cb), + (cb) => goTCP.swarm.connect(jsRelayAddrs[1], cb), + (cb) => setTimeout(cb, 1000), + (cb) => goTCP.swarm.connect(jsWSAddrs[0], cb) + ], (err) => { + expect(err).to.not.exist() + waterfall([ + (cb) => goTCP.files.add(data, cb), + (res, cb) => jsWS.files.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], done) + }) + }) + + it('jsTCP <-> goRelay <-> jsWS', (done) => { + const data = crypto.randomBytes(128) + series([ + (cb) => jsTCP.swarm.connect(goRelayAddrs[2], cb), + (cb) => jsWS.swarm.connect(goRelayAddrs[0], cb), + (cb) => setTimeout(cb, 1000), + (cb) => jsWS.swarm.connect(jsTCPAddrs[0], cb) + ], (err) => { + expect(err).to.not.exist() + waterfall([ + (cb) => jsTCP.files.add(data, cb), + (res, cb) => jsWS.files.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], done) + }) + }) + + it('goTCP <-> goRelay <-> goWS', (done) => { + const data = crypto.randomBytes(128) + series([ + (cb) => goWS.swarm.connect(goRelayAddrs[0], cb), + (cb) => goTCP.swarm.connect(goRelayAddrs[2], cb), + (cb) => setTimeout(cb, 1000), + (cb) => goWS.swarm.connect(`/p2p-circuit/ipfs/${multiaddr(goTCPAddrs[0]).getPeerId()}`, cb) + ], (err) => { + expect(err).to.not.exist() + waterfall([ + (cb) => goTCP.files.add(data, cb), + (res, cb) => goWS.files.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], done) + }) + }) + + it('jsWS <-> goRelay <-> goTCP', (done) => { + const data = crypto.randomBytes(128) + series([ + (cb) => jsWS.swarm.connect(goRelayAddrs[0], cb), + (cb) => goTCP.swarm.connect(goRelayAddrs[2], cb), + (cb) => setTimeout(cb, 1000), + (cb) => goTCP.swarm.connect(`/p2p-circuit/ipfs/${multiaddr(jsWSAddrs[0]).getPeerId()}`, cb) + ], (err) => { + expect(err).to.not.exist() + waterfall([ + (cb) => goTCP.files.add(data, cb), + (res, cb) => jsWS.files.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], done) + }) + }) +}) diff --git a/test/exchange-files.js b/test/exchange-files.js new file mode 100644 index 00000000..7c1e65e8 --- /dev/null +++ b/test/exchange-files.js @@ -0,0 +1,218 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const series = require('async/series') +const parallel = require('async/parallel') +const waterfall = require('async/waterfall') +const crypto = require('crypto') +const pretty = require('pretty-bytes') +const randomFs = require('random-fs') +const promisify = require('promisify-es6') +const rimraf = require('rimraf') + +const rmDir = promisify(rimraf) + +const tmpDir = require('./utils/interop-daemon-spawner/util').tmpDir +const GoDaemon = require('./utils/interop-daemon-spawner/go') +const JsDaemon = require('./utils/interop-daemon-spawner/js') + +const sizes = [ + 1024, + 1024 * 62, + // starts failing with spdy + 1024 * 64, + 1024 * 512, + 1024 * 768, + 1024 * 1023, + 1024 * 1024, + 1024 * 1024 * 4, + 1024 * 1024 * 8 +] + +const dirs = [ + 5, + 10, + 50, + 100 +] + +describe('exchange files', function () { + this.timeout(20 * 1000) + + let goDaemon + let jsDaemon + let js2Daemon + + before(function (done) { + this.timeout(15 * 1000) + goDaemon = new GoDaemon() + jsDaemon = new JsDaemon({port: 1}) + js2Daemon = new JsDaemon({port: 2}) + + parallel([ + (cb) => goDaemon.start(cb), + (cb) => jsDaemon.start(cb), + (cb) => js2Daemon.start(cb) + ], done) + }) + + after((done) => { + series([ + (cb) => goDaemon.stop(cb), + (cb) => jsDaemon.stop(cb), + (cb) => js2Daemon.stop(cb) + ], done) + }) + + it('connect go <-> js', (done) => { + let jsId + let goId + + series([ + (cb) => parallel([ + (cb) => jsDaemon.api.id(cb), + (cb) => goDaemon.api.id(cb) + ], (err, ids) => { + expect(err).to.not.exist() + jsId = ids[0] + goId = ids[1] + cb() + }), + (cb) => goDaemon.api.swarm.connect(jsId.addresses[0], cb), + (cb) => jsDaemon.api.swarm.connect(goId.addresses[0], cb), + (cb) => parallel([ + (cb) => goDaemon.api.swarm.peers(cb), + (cb) => jsDaemon.api.swarm.peers(cb) + ], (err, peers) => { + expect(err).to.not.exist() + expect(peers[0].map((p) => p.peer.toB58String())).to.include(jsId.id) + expect(peers[1].map((p) => p.peer.toB58String())).to.include(goId.id) + cb() + }) + ], done) + }) + + it('connect js <-> js', (done) => { + let jsId + let js2Id + + series([ + (cb) => parallel([ + (cb) => jsDaemon.api.id(cb), + (cb) => js2Daemon.api.id(cb) + ], (err, ids) => { + expect(err).to.not.exist() + jsId = ids[0] + js2Id = ids[1] + cb() + }), + (cb) => js2Daemon.api.swarm.connect(jsId.addresses[0], cb), + (cb) => jsDaemon.api.swarm.connect(js2Id.addresses[0], cb), + (cb) => parallel([ + (cb) => js2Daemon.api.swarm.peers(cb), + (cb) => jsDaemon.api.swarm.peers(cb) + ], (err, peers) => { + expect(err).to.not.exist() + expect(peers[0].map((p) => p.peer.toB58String())).to.include(jsId.id) + expect(peers[1].map((p) => p.peer.toB58String())).to.include(js2Id.id) + cb() + }) + ], done) + }) + + describe('cat file', () => sizes.forEach((size) => { + it(`go -> js: ${pretty(size)}`, (done) => { + const data = crypto.randomBytes(size) + waterfall([ + (cb) => goDaemon.api.add(data, cb), + (res, cb) => jsDaemon.api.cat(res[0].hash, cb) + ], (err, file) => { + expect(err).to.not.exist() + expect(file).to.be.eql(data) + done() + }) + }) + + it(`js -> go: ${pretty(size)}`, (done) => { + const data = crypto.randomBytes(size) + waterfall([ + (cb) => jsDaemon.api.add(data, cb), + (res, cb) => goDaemon.api.cat(res[0].hash, cb) + ], (err, file) => { + expect(err).to.not.exist() + expect(file).to.be.eql(data) + done() + }) + }) + + it(`js -> js: ${pretty(size)}`, (done) => { + const data = crypto.randomBytes(size) + waterfall([ + (cb) => js2Daemon.api.add(data, cb), + (res, cb) => jsDaemon.api.cat(res[0].hash, cb) + ], (err, file) => { + expect(err).to.not.exist() + expect(file).to.be.eql(data) + done() + }) + }) + })) + + // TODO these tests are not fetching the full dir?? + describe('get directory', () => dirs.forEach((num) => { + it(`go -> js: depth: 5, num: ${num}`, () => { + const dir = tmpDir() + return randomFs({ + path: dir, + depth: 5, + number: num + }).then(() => { + return goDaemon.api.util.addFromFs(dir, { recursive: true }) + }).then((res) => { + const hash = res[res.length - 1].hash + return jsDaemon.api.object.get(hash) + }).then((res) => { + expect(res).to.exist() + return rmDir(dir) + }) + }) + + it(`js -> go: depth: 5, num: ${num}`, () => { + const dir = tmpDir() + return randomFs({ + path: dir, + depth: 5, + number: num + }).then(() => { + return jsDaemon.api.util.addFromFs(dir, { recursive: true }) + }).then((res) => { + const hash = res[res.length - 1].hash + return goDaemon.api.object.get(hash) + }).then((res) => { + expect(res).to.exist() + return rmDir(dir) + }) + }) + + it(`js -> js: depth: 5, num: ${num}`, () => { + const dir = tmpDir() + return randomFs({ + path: dir, + depth: 5, + number: num + }).then(() => { + return js2Daemon.api.util.addFromFs(dir, { recursive: true }) + }).then((res) => { + const hash = res[res.length - 1].hash + return jsDaemon.api.object.get(hash) + }).then((res) => { + expect(res).to.exist() + return rmDir(dir) + }) + }) + })) +}) diff --git a/test/fixtures/.gitkeep b/test/fixtures/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/test/kad-dht.js b/test/kad-dht.js new file mode 100644 index 00000000..1bd5fdba --- /dev/null +++ b/test/kad-dht.js @@ -0,0 +1,109 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const series = require('async/series') +const crypto = require('crypto') +const parallel = require('async/parallel') +const waterfall = require('async/waterfall') +const bl = require('bl') + +const GODaemon = require('./utils/interop-daemon-spawner/go') +const JSDaemon = require('./utils/interop-daemon-spawner/js') + +describe.skip('kad-dht', () => { + describe('a JS node in the land of Go', () => { + let jsD + let goD1 + let goD2 + let goD3 + + before((done) => { + goD1 = new GODaemon() + goD2 = new GODaemon() + goD3 = new GODaemon() + + jsD = new JSDaemon({ port: 40 }) + + parallel([ + (cb) => goD1.start(cb), + (cb) => goD2.start(cb), + (cb) => goD3.start(cb), + (cb) => jsD.start(cb) + ], done) + }) + + after((done) => { + series([ + (cb) => goD1.stop(cb), + (cb) => goD2.stop(cb), + (cb) => goD3.stop(cb), + (cb) => jsD.stop(cb) + ], done) + }) + + it('make connections', (done) => { + parallel([ + (cb) => jsD.api.id(cb), + (cb) => goD1.api.id(cb), + (cb) => goD2.api.id(cb), + (cb) => goD3.api.id(cb) + ], (err, ids) => { + expect(err).to.not.exist() + parallel([ + (cb) => jsD.api.swarm.connect(ids[1].addresses[0], cb), + (cb) => goD1.api.swarm.connect(ids[2].addresses[0], cb), + (cb) => goD2.api.swarm.connect(ids[3].addresses[0], cb) + ], done) + }) + }) + + it('one hop', (done) => { + const data = crypto.randomBytes(9001) + + waterfall([ + (cb) => goD1.api.add(data, cb), + (res, cb) => jsD.api.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], (err, file) => { + expect(err).to.not.exist() + expect(file).to.be.eql(data) + done() + }) + }) + + it('two hops', (done) => { + const data = crypto.randomBytes(9001) + + waterfall([ + (cb) => goD2.api.add(data, cb), + (res, cb) => jsD.api.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], (err, file) => { + expect(err).to.not.exist() + expect(file).to.be.eql(data) + done() + }) + }) + + it('three hops', (done) => { + const data = crypto.randomBytes(9001) + + waterfall([ + (cb) => goD3.api.add(data, cb), + (res, cb) => jsD.api.cat(res[0].hash, cb), + (stream, cb) => stream.pipe(bl(cb)) + ], (err, file) => { + expect(err).to.not.exist() + expect(file).to.be.eql(data) + done() + }) + }) + }) + + describe('a Go node in the land of JS', () => {}) + describe('hybrid', () => {}) +}) diff --git a/test/node.js b/test/node.js new file mode 100644 index 00000000..7e4a5134 --- /dev/null +++ b/test/node.js @@ -0,0 +1,8 @@ +/* eslint-env mocha */ +'use strict' + +require('./repo') +require('./exchange-files') +require('./circuit-relay') +require('./kad-dht') +require('./pubsub') diff --git a/test/pubsub.js b/test/pubsub.js new file mode 100644 index 00000000..a497b76e --- /dev/null +++ b/test/pubsub.js @@ -0,0 +1,346 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const series = require('async/series') +const parallel = require('async/parallel') + +const GoDaemon = require('./utils/interop-daemon-spawner/go') +const JSDaemon = require('./utils/interop-daemon-spawner/js') + +/* + * Wait for a condition to become true. When its true, callback is called. + */ +function waitFor (predicate, callback) { + const ttl = Date.now() + (2 * 1000) + const self = setInterval(() => { + if (predicate()) { + clearInterval(self) + return callback() + } + if (Date.now() > ttl) { + clearInterval(self) + return callback(new Error('waitFor time expired')) + } + }, 500) +} + +describe('pubsub', function () { + this.timeout(5 * 1000) + + let jsD + let goD + let jsId + let goId + + before(function (done) { + this.timeout(50 * 1000) + + goD = new GoDaemon({ + disposable: true, + init: true, + flags: ['--enable-pubsub-experiment'] + }) + jsD = new JSDaemon() + + parallel([ + (cb) => goD.start(cb), + (cb) => jsD.start(cb) + ], (done)) + }) + + after(function (done) { + this.timeout(50 * 1000) + + parallel([ + (cb) => goD.stop(cb), + (cb) => jsD.stop(cb) + ], done) + }) + + it('make connections', (done) => { + series([ + (cb) => jsD.api.id(cb), + (cb) => goD.api.id(cb) + ], (err, ids) => { + expect(err).to.not.exist() + + jsId = ids[0].id + goId = ids[1].id + + const jsLocalAddr = ids[0].addresses.find(a => a.includes('127.0.0.1')) + const goLocalAddr = ids[1].addresses.find(a => a.includes('127.0.0.1')) + + parallel([ + (cb) => jsD.api.swarm.connect(goLocalAddr, cb), + (cb) => goD.api.swarm.connect(jsLocalAddr, cb), + (cb) => setTimeout(() => { + cb() + }, 1000) + ], done) + }) + }) + + describe('ascii data', () => { + const data = Buffer.from('hello world') + + it('publish from Go, subscribe on Go', (done) => { + const topic = 'pubsub-go-go' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', goId) + } + + series([ + (cb) => goD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => goD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from JS, subscribe on JS', (done) => { + const topic = 'pubsub-js-js' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', jsId) + } + + series([ + (cb) => jsD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => jsD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from JS, subscribe on Go', (done) => { + const topic = 'pubsub-js-go' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', jsId) + } + + series([ + (cb) => goD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => jsD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from Go, subscribe on JS', (done) => { + const topic = 'pubsub-go-js' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', goId) + } + + series([ + (cb) => jsD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => goD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + }) + + describe('non-ascii data', () => { + const data = Buffer.from('你好世界') + + it('publish from Go, subscribe on Go', (done) => { + const topic = 'pubsub-non-ascii-go-go' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', goId) + } + + series([ + (cb) => goD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => goD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from JS, subscribe on JS', (done) => { + const topic = 'pubsub-non-ascii-js-js' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', jsId) + } + + series([ + (cb) => jsD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => jsD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from JS, subscribe on Go', (done) => { + const topic = 'pubsub-non-ascii-js-go' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', jsId) + } + + series([ + (cb) => goD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => jsD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from Go, subscribe on JS', (done) => { + const topic = 'pubsub-non-ascii-go-js' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString()).to.equal(data.toString()) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', goId) + } + + series([ + (cb) => jsD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => goD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + }) + + describe('binary data', () => { + const data = Buffer.from('a36161636179656162830103056164a16466666666f400010203040506070809', 'hex') + + it('publish from Go, subscribe on Go', (done) => { + const topic = 'pubsub-binary-go-go' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString('hex')).to.equal(data.toString('hex')) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', goId) + } + + series([ + (cb) => goD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => goD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from Go, subscribe on JS', (done) => { + const topic = 'pubsub-binary-go-js' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString('hex')).to.equal(data.toString('hex')) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', goId) + } + + series([ + (cb) => jsD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => goD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from JS, subscribe on Go', (done) => { + const topic = 'pubsub-binary-js-go' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString('hex')).to.equal(data.toString('hex')) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', jsId) + } + + series([ + (cb) => goD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => jsD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + + it('publish from JS, subscribe on JS', (done) => { + const topic = 'pubsub-binary-js-js' + let n = 0 + + function checkMessage (msg) { + ++n + expect(msg.data.toString('hex')).to.equal(data.toString('hex')) + expect(msg).to.have.property('seqno') + expect(Buffer.isBuffer(msg.seqno)).to.be.eql(true) + expect(msg).to.have.property('topicIDs').eql([topic]) + expect(msg).to.have.property('from', jsId) + } + + series([ + (cb) => jsD.api.pubsub.subscribe(topic, checkMessage, cb), + (cb) => setTimeout(() => { cb() }, 500), + (cb) => jsD.api.pubsub.publish(topic, data, cb), + (cb) => waitFor(() => n === 1, cb) + ], done) + }) + }) +}) diff --git a/test/repo.js b/test/repo.js new file mode 100644 index 00000000..6e348eb7 --- /dev/null +++ b/test/repo.js @@ -0,0 +1,85 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const waterfall = require('async/waterfall') +const crypto = require('crypto') +const os = require('os') +const path = require('path') +const hat = require('hat') + +const GoDaemon = require('./utils/interop-daemon-spawner/go') +const JsDaemon = require('./utils/interop-daemon-spawner/js') + +function catAndCheck (daemon, hash, data, callback) { + daemon.api.cat(hash, (err, fileData) => { + expect(err).to.not.exist() + expect(fileData).to.eql(data) + callback() + }) +} + +describe('repo', () => { + it('read repo: go -> js', (done) => { + const dir = path.join(os.tmpdir(), hat()) + const data = crypto.randomBytes(1024 * 5) + + const goDaemon = new GoDaemon({ + init: true, + disposable: false, + path: dir + }) + let jsDaemon + + let hash + waterfall([ + (cb) => goDaemon.start(cb), + (cb) => goDaemon.api.add(data, cb), + (res, cb) => { + hash = res[0].hash + catAndCheck(goDaemon, hash, data, cb) + }, + (cb) => goDaemon.stop(cb), + (cb) => { + jsDaemon = new JsDaemon({ + init: false, + disposable: false, + path: dir + }) + jsDaemon.start(cb) + }, + (cb) => catAndCheck(goDaemon, hash, data, cb), + (cb) => jsDaemon.stop(cb) + ], done) + }) + + // This was last due to an update on go-ipfs that changed how datastore is + // configured + it.skip('read repo: js -> go', (done) => { + const dir = path.join(os.tmpdir(), hat()) + const data = crypto.randomBytes(1024 * 5) + + const jsDaemon = new JsDaemon({init: true, disposable: false, path: dir}) + let goDaemon + + let hash + waterfall([ + (cb) => jsDaemon.start(cb), + (cb) => jsDaemon.api.add(data, cb), + (res, cb) => { + hash = res[0].hash + catAndCheck(jsDaemon, hash, data, cb) + }, + (cb) => jsDaemon.stop(cb), + (cb) => { + goDaemon = new GoDaemon({init: false, disposable: false, path: dir}) + goDaemon.start(cb) + }, + (cb) => catAndCheck(goDaemon, hash, data, cb), + (cb) => goDaemon.stop(cb) + ], done) + }) +}) diff --git a/test/utils/another-daemon-spawner.js b/test/utils/another-daemon-spawner.js new file mode 100644 index 00000000..c8d2295c --- /dev/null +++ b/test/utils/another-daemon-spawner.js @@ -0,0 +1,124 @@ +/* eslint-env mocha */ + +'use strict' + +const waterfall = require('async/waterfall') +const series = require('async/series') + +const relayConfig = require('./ipfs-factory-daemon/default-config.json') +const Factory = require('./ipfs-factory-daemon') +const GoDaemon = require('./interop-daemon-spawner/go') + +const nodes = [] +const factory = new Factory() +exports = module.exports + +exports.spawnGoNode = (addrs, hop, api, gateway, callback) => { + if (typeof hop === 'function') { + callback = hop + hop = false + } + if (typeof api === 'function') { + callback = api + api = 0 + } + if (typeof gateway === 'function') { + callback = gateway + gateway = 0 + } + + api = api || 0 + gateway = gateway || 0 + + const daemon = new GoDaemon({ + disposable: true, + init: true, + config: { + Addresses: { + Swarm: addrs, + API: `/ip4/0.0.0.0/tcp/${api}`, + Gateway: `/ip4/0.0.0.0/tcp/${gateway}` + }, + Swarm: { + AddrFilters: null, + DisableBandwidthMetrics: false, + DisableNatPortMap: false, + DisableRelay: false, + EnableRelayHop: hop + } + } + }) + + daemon.start((err) => { + if (err) { + return callback(err) + } + daemon.api.id((err, id) => { + if (err) { + return callback(err) + } + nodes.push(daemon) + callback(null, daemon, id.addresses) + }) + }) +} + +exports.spawnJsNode = (addrs, hop, api, gateway, callback) => { + let relayPeer + let relayAddrs + + if (typeof hop === 'function') { + callback = hop + hop = false + } + if (typeof api === 'function') { + callback = api + api = 0 + } + if (typeof gateway === 'function') { + callback = gateway + gateway = 0 + } + + api = api || 0 + gateway = gateway || 0 + + callback = callback || function noop () {} + + waterfall([ + (cb) => factory.spawnNode(null, Object.assign(relayConfig, { + Addresses: { + Swarm: addrs, + API: `/ip4/0.0.0.0/tcp/${api}`, + Gateway: `/ip4/0.0.0.0/tcp/${gateway}` + }, + EXPERIMENTAL: { + Swarm: { + DisableRelay: false, + EnableRelayHop: hop + } + } + }), cb), + (node, cb) => { + relayPeer = node + relayPeer.swarm.localAddrs(cb) + }, + (addrs, cb) => { + relayAddrs = addrs + cb() + } + ], (err) => { + if (err) { + return callback(err) + } + callback(null, relayPeer, relayAddrs) + }) +} + +exports.stopNodes = (callback) => { + series([ + (cb) => factory.dismantle(cb) + ].concat(nodes.map((node) => (cb) => { + setTimeout(() => node.stop(cb), 100) + })), callback) +} diff --git a/test/utils/clean.js b/test/utils/clean.js new file mode 100644 index 00000000..13752b59 --- /dev/null +++ b/test/utils/clean.js @@ -0,0 +1,15 @@ +'use strict' + +const rimraf = require('rimraf') +const fs = require('fs') + +module.exports = (dir) => { + try { + fs.accessSync(dir) + } catch (err) { + // Does not exist so all good + return + } + + rimraf.sync(dir) +} diff --git a/test/utils/create-repo-browser.js b/test/utils/create-repo-browser.js new file mode 100644 index 00000000..18b220b7 --- /dev/null +++ b/test/utils/create-repo-browser.js @@ -0,0 +1,28 @@ +/* global self */ +'use strict' + +const IPFSRepo = require('ipfs-repo') +const hat = require('hat') + +const idb = self.indexedDB || + self.mozIndexedDB || + self.webkitIndexedDB || + self.msIndexedDB + +function createTempRepo (repoPath) { + repoPath = repoPath || '/ipfs-' + hat() + + const repo = new IPFSRepo(repoPath) + + repo.teardown = (done) => { + repo.close(() => { + idb.deleteDatabase(repoPath) + idb.deleteDatabase(repoPath + '/blocks') + done() + }) + } + + return repo +} + +module.exports = createTempRepo diff --git a/test/utils/create-repo-nodejs.js b/test/utils/create-repo-nodejs.js new file mode 100644 index 00000000..d4a6f87e --- /dev/null +++ b/test/utils/create-repo-nodejs.js @@ -0,0 +1,29 @@ +'use strict' + +const IPFSRepo = require('ipfs-repo') +const clean = require('./clean') +const os = require('os') +const path = require('path') +const hat = require('hat') +const series = require('async/series') + +function createTempRepo (repoPath) { + repoPath = repoPath || path.join(os.tmpdir(), '/ipfs-test-' + hat()) + + const repo = new IPFSRepo(repoPath) + + repo.teardown = (done) => { + series([ + // ignore err, might have been closed already + (cb) => repo.close(() => cb()), + (cb) => { + clean(repoPath) + cb() + } + ], done) + } + + return repo +} + +module.exports = createTempRepo diff --git a/test/utils/interop-daemon-spawner/go.js b/test/utils/interop-daemon-spawner/go.js new file mode 100644 index 00000000..8010724d --- /dev/null +++ b/test/utils/interop-daemon-spawner/go.js @@ -0,0 +1,73 @@ +'use strict' + +const ctl = require('ipfsd-ctl') +const waterfall = require('async/waterfall') + +class GoDaemon { + constructor (opts) { + opts = opts || { + disposable: true, + init: true + } + + this.init = opts.init + this.path = opts.path + this.disposable = opts.disposable + this.node = null + this.api = null + this.config = opts.config || {} + this.flags = opts.flags || {} + } + + start (callback) { + waterfall([ + (cb) => { + if (this.disposable) { + const config = Object.assign({ init: this.init }, this.config) + ctl.disposable(config, cb) + } else if (this.init) { + ctl.local(this.path, (err, node) => { + if (err) { + return cb(err) + } + node.init((err) => cb(err, node)) + }) + } else { + ctl.local(this.path, cb) + } + }, + (node, cb) => { + this.node = node + this.node.setConfig('Bootstrap', '[]', cb) + }, + (res, cb) => this.node.startDaemon(this.flags, cb), + (api, cb) => { + this.api = api + + if (process.env.DEBUG) { + this.api.log.tail((err, stream) => { + if (err) { + return console.error(err) + } + stream.on('data', (chunk) => { + console.log('go-log: %s.%s %s (%s)', chunk.system, chunk.subsystem || '', chunk.event, chunk.error) + }) + }) + this.node._run( + ['log', 'level', 'all', 'debug'], + {env: this.node.env}, + cb + ) + } else { + cb() + } + } + ], (err) => callback(err)) + } + + stop (callback) { + this.node.stopDaemon(callback) + } +} + +module.exports = GoDaemon diff --git a/test/utils/interop-daemon-spawner/js.js b/test/utils/interop-daemon-spawner/js.js new file mode 100644 index 00000000..0a191417 --- /dev/null +++ b/test/utils/interop-daemon-spawner/js.js @@ -0,0 +1,87 @@ +'use strict' + +const EventEmitter = require('events').EventEmitter +const IPFSAPI = require('ipfs-api') +const series = require('async/series') +const rimraf = require('rimraf') +const tmpDir = require('./util').tmpDir + +const HttpApi = require('ipfs/src/http') + +function portConfig (port) { + port = port + 5 + + return { + Gateway: '/ip4/127.0.0.1/tcp/' + (9090 + port), + API: '/ip4/127.0.0.1/tcp/' + (5002 + port), + Swarm: [ + '/ip4/127.0.0.1/tcp/' + (4003 + port), + '/ip4/127.0.0.1/tcp/' + (4104 + port) + '/ws' + ] + } +} + +class JsDaemon extends EventEmitter { + constructor (opts) { + super() + opts = Object.assign({}, { + disposable: true, + init: true + }, opts || {}) + + this.path = opts.path + this.disposable = opts.disposable + this.init = opts.init + this.port = opts.port || 1 + + this.path = opts.path || tmpDir() + this._started = false + + const extras = { + enablePubsubExperiment: true + } + if (this.init) { + const p = portConfig(this.port) + this.node = new HttpApi(this.path, { + Bootstrap: [], + Addresses: p + }, extras) + } else { + this.node = new HttpApi(this.path, null, extras) + } + + this.node.start(this.init, (err) => { + if (err) { + throw err + } + this._started = true + this.api = new IPFSAPI(this.node.apiMultiaddr) + + this.emit('start') + }) + } + + start (callback) { + if (!this._started) { + return this.once('start', callback) + } + + callback() + } + + stop (callback) { + this._started = false + series([ + (cb) => this.node.stop(cb), + (cb) => { + if (this.disposable) { + rimraf(this.path, cb) + } else { + cb() + } + } + ], (err) => callback(err)) + } +} + +module.exports = JsDaemon diff --git a/test/utils/interop-daemon-spawner/util.js b/test/utils/interop-daemon-spawner/util.js new file mode 100644 index 00000000..11b47f57 --- /dev/null +++ b/test/utils/interop-daemon-spawner/util.js @@ -0,0 +1,9 @@ +'use strict' + +const os = require('os') +const path = require('path') +const hat = require('hat') + +exports.tmpDir = (prefix) => { + return path.join(os.tmpdir(), prefix || 'js-ipfs-interop', hat()) +} diff --git a/test/utils/ipfs-factory-daemon/default-config.json b/test/utils/ipfs-factory-daemon/default-config.json new file mode 100644 index 00000000..f2203594 --- /dev/null +++ b/test/utils/ipfs-factory-daemon/default-config.json @@ -0,0 +1,41 @@ +{ + "Identity": { + "PeerID": "", + "PrivKey": "" + }, + "Addresses": { + "Swarm": [ + "/ip4/127.0.0.1/tcp/0" + ], + "API": "/ip4/127.0.0.1/tcp/0", + "Gateway": "/ip4/127.0.0.1/tcp/0" + }, + "Version": { + "Current": "jsipfs-dev", + "Check": "error", + "CheckDate": "0001-01-01T00:00:00Z", + "CheckPeriod": "172800000000000", + "AutoUpdate": "minor" + }, + "Discovery": { + "MDNS": { + "Enabled": false, + "Interval": 10 + }, + "webRTCStar": { + "Enabled": false + } + }, + "Bootstrap": [], + "Gateway": { + "HTTPHeaders": null, + "RootRedirect": "", + "Writable": false + }, + "API": { + "HTTPHeaders": null + }, + "Swarm": { + "AddrFilters": null + } +} diff --git a/test/utils/ipfs-factory-daemon/index.js b/test/utils/ipfs-factory-daemon/index.js new file mode 100644 index 00000000..1c973789 --- /dev/null +++ b/test/utils/ipfs-factory-daemon/index.js @@ -0,0 +1,79 @@ +'use strict' + +const PeerId = require('peer-id') +const IPFSAPI = require('ipfs-api') +const clean = require('../clean') +const HttpApi = require('ipfs/src/http') +const series = require('async/series') +const eachSeries = require('async/eachSeries') +const defaultConfig = require('./default-config.json') +const os = require('os') +const hat = require('hat') + +class Factory { + constructor () { + this.daemonsSpawned = [] + } + + /* yields a new started node */ + spawnNode (repoPath, suppliedConfig, callback) { + if (typeof repoPath === 'function') { + callback = repoPath + repoPath = undefined + } + if (typeof suppliedConfig === 'function') { + callback = suppliedConfig + suppliedConfig = {} + } + + repoPath = repoPath || os.tmpdir() + '/ipfs-' + hat() + + let daemon + let ctl + let config + + series([ + (cb) => { + // prepare config for node + + config = Object.assign({}, defaultConfig, suppliedConfig) + + PeerId.create({ bits: 1024 }, (err, id) => { + if (err) { return cb(err) } + + const peerId = id.toJSON() + config.Identity.PeerID = peerId.id + config.Identity.PrivKey = peerId.privKey + cb() + }) + }, + (cb) => { + daemon = new HttpApi(repoPath, config, {enablePubsubExperiment: true}) + daemon.repoPath = repoPath + this.daemonsSpawned.push(daemon) + + daemon.start(true, cb) + }, + (cb) => { + ctl = IPFSAPI(daemon.apiMultiaddr) + ctl.repoPath = repoPath + ctl.apiMultiaddr = daemon.apiMultiaddr + cb() + } + ], (err) => callback(err, ctl)) + } + + dismantle (callback) { + eachSeries(this.daemonsSpawned, (d, cb) => { + d.stop((err) => { + clean(d.repoPath) + if (err) { + console.error('error stopping', err) + } + cb(err) + }) + }, callback) + } +} + +module.exports = Factory diff --git a/test/utils/ipfs-factory-instance/default-config.json b/test/utils/ipfs-factory-instance/default-config.json new file mode 100644 index 00000000..677c7c85 --- /dev/null +++ b/test/utils/ipfs-factory-instance/default-config.json @@ -0,0 +1,29 @@ +{ + "Addresses": { + "Swarm": [ + "/ip4/127.0.0.1/tcp/0" + ], + "API": "/ip4/127.0.0.1/tcp/0", + "Gateway": "/ip4/127.0.0.1/tcp/0" + }, + "Version": { + "Current": "jsipfs-dev", + "Check": "error", + "CheckDate": "0001-01-01T00:00:00Z", + "CheckPeriod": "172800000000000", + "AutoUpdate": "minor" + }, + "Discovery": { + "MDNS": { + "Enabled": false, + "Interval": 10 + }, + "webRTCStar": { + "Enabled": false + } + }, + "Bootstrap": [], + "API": { + "HTTPHeaders": null + } +} diff --git a/test/utils/ipfs-factory-instance/index.js b/test/utils/ipfs-factory-instance/index.js new file mode 100644 index 00000000..568a1710 --- /dev/null +++ b/test/utils/ipfs-factory-instance/index.js @@ -0,0 +1,63 @@ +'use strict' + +const series = require('async/series') +const each = require('async/each') +const hat = require('hat') +const os = require('os') +const path = require('path') + +const defaultConfig = require('./default-config.json') +const IPFS = require('../../../src/core') +const createTempRepo = require('../create-repo-nodejs') + +module.exports = Factory + +function Factory () { + if (!(this instanceof Factory)) { + return new Factory() + } + + const nodes = [] + + /* yields a new started node instance */ + this.spawnNode = (repoPath, suppliedConfig, callback) => { + if (typeof repoPath === 'function') { + callback = repoPath + repoPath = undefined + } + + if (typeof suppliedConfig === 'function') { + callback = suppliedConfig + suppliedConfig = {} + } + + if (!repoPath) { + repoPath = path.join(os.tmpdir(), '.ipfs-' + hat()) + } + + const config = Object.assign({}, defaultConfig, suppliedConfig) + + const repo = createTempRepo(repoPath) + const node = new IPFS({ + repo: repo, + init: { bits: 1024 }, + config: config, + EXPERIMENTAL: { + pubsub: true, + dht: true + } + }) + + node.once('ready', () => { + nodes.push({ repo: repo, ipfs: node }) + callback(null, node) + }) + } + + this.dismantle = function (callback) { + series([ + (cb) => each(nodes, (el, cb) => el.ipfs.stop(cb), cb), + (cb) => each(nodes, (el, cb) => el.repo.teardown(cb), cb) + ], callback) + } +}