Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Add failing get recursive test #736

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 9 additions & 7 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
"aegir": "^9.4.0",
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"concat-stream": "^1.6.0",
"delay": "^1.3.1",
"detect-node": "^2.0.3",
"eslint-plugin-react": "^6.9.0",
Expand All @@ -83,6 +84,7 @@
"qs": "^6.3.0",
"rimraf": "^2.5.4",
"stream-to-promise": "^2.2.0",
"through2": "^2.0.3",
"transform-loader": "^0.2.3"
},
"dependencies": {
Expand All @@ -97,19 +99,19 @@
"hoek": "^4.1.0",
"idb-pull-blob-store": "^0.5.1",
"ipfs-api": "^12.1.4",
"ipfs-bitswap": "^0.9.0",
"ipfs-bitswap": "^0.9.2",
"ipfs-block": "^0.5.4",
"ipfs-block-service": "^0.8.0",
"ipfs-block-service": "^0.8.1",
"ipfs-multipart": "^0.1.0",
"ipfs-repo": "^0.11.2",
"ipfs-unixfs": "^0.1.9",
"ipfs-unixfs-engine": "^0.15.0",
"ipld-resolver": "^0.4.1",
"ipfs-unixfs-engine": "^0.15.1",
"ipld-resolver": "^0.4.2",
"isstream": "^0.1.2",
"libp2p-floodsub": "0.7.1",
"joi": "^10.2.0",
"libp2p-ipfs-nodejs": "^0.17.8",
"libp2p-ipfs-browser": "^0.17.7",
"libp2p-ipfs-nodejs": "^0.17.9",
"libp2p-ipfs-browser": "^0.17.8",
"lodash.flatmap": "^4.5.0",
"lodash.get": "^4.4.2",
"lodash.has": "^4.5.2",
Expand Down Expand Up @@ -171,4 +173,4 @@
"nginnever <ginneversource@gmail.com>",
"npmcdn-to-unpkg-bot <npmcdn-to-unpkg-bot@users.noreply.github.com>"
]
}
}
8 changes: 8 additions & 0 deletions test/cli/test-files.js
Original file line number Diff line number Diff line change
Expand Up @@ -89,5 +89,13 @@ describe('files', () => {
].join('\n'))
})
})

it('get recursively', () => {
return ipfs('files get QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU').then((out) => {
const directory = path.join(process.cwd(), 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU')
expect(fs.readdirSync(directory).length).to.be.eql(8)
// TODO add assertion on content of files
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@victorbjelkholm your turn, tests are passing :)

(also, make sure to unlink to delete that file, like the other tests do.

})
})
})
})
99 changes: 99 additions & 0 deletions test/core/files.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
/* eslint-env mocha */
'use strict'

const expect = require('chai').expect
const pull = require('pull-stream')
const series = require('async/series')
const concat = require('concat-stream')
const through = require('through2')

const IPFS = require('../../src/core')
const createTempRepo = require('../utils/create-repo-node.js')

describe('files', () => {
const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK'
let ipfs

before((done) => {
const repo = createTempRepo()
ipfs = new IPFS(repo)
series([
(cb) => ipfs.init({ bits: 1024 }, cb),
(cb) => ipfs.load(cb)
], done)
})

it('can add deeply nested heterogeneous dirs', done => {
const files = [
{ path: 'a/b/c/d/e', content: pull.values([new Buffer('banana')]) },
{ path: 'a/b/c/d/f', content: pull.values([new Buffer('strawberry')]) },
{ path: 'a/b/g', content: pull.values([new Buffer('ice')]) },
{ path: 'a/b/h', content: pull.values([new Buffer('cream')]) }
]

ipfs.files.add(files, (err, res) => {
expect(err).to.not.exist
const root = res[res.length - 1]

expect(root.path).to.equal('a')
expect(root.hash).to.equal(rootHash)
done()
})
})

it('can export that dir', done => {
ipfs.files.get(rootHash, (err, stream) => {
expect(err).to.not.exist

// accumulate the files and their content
var files = []
stream.pipe(through.obj((file, enc, next) => {
if (file.content) {
file.content.pipe(concat((content) => {
files.push({
path: file.path,
content: content
})
next()
}))
} else {
files.push(file)
next()
}
}, () => {
files = files.sort(byPath)
// Check paths
var paths = files.map((file) => file.path)
expect(paths).to.include.members([
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK',
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b',
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c',
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d',
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/e',
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/f',
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/g',
'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/h'
])

// Check contents
var contents = files.map(function (file) {
return file.content ? file.content.toString() : null
})

expect(contents).to.include.members([
'banana',
'strawberry',
'ice',
'cream'
])
done()
}))
})
})
})

function byPath (a, b) {
if (a.path > b.path) return 1
if (a.path < b.path) return -1
return 0
}