Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
feat: basic mfs.write command
Browse files Browse the repository at this point in the history
License: MIT
Signed-off-by: achingbrain <alex@achingbrain.net>
  • Loading branch information
achingbrain committed May 8, 2018
1 parent 723bbe9 commit ccecb1b
Show file tree
Hide file tree
Showing 16 changed files with 494 additions and 150 deletions.
18 changes: 10 additions & 8 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,28 +38,30 @@
"aegir": "^13.0.6",
"chai": "^4.1.2",
"dirty-chai": "^2.0.1",
"ipfs": "^0.28.2",
"ipfs": "~0.28.2",
"pre-commit": "^1.2.2",
"safe-buffer": "^5.1.1",
"tmp": "0.0.33"
"sign-commit": "~0.1.0",
"tmp": "~0.0.33"
},
"dependencies": {
"async": "^2.6.0",
"blob": "0.0.4",
"blob": "~0.0.4",
"bs58": "^4.0.1",
"cids": "~0.5.3",
"debug": "^3.1.0",
"detect-node": "^2.0.3",
"file-api": "^0.10.4",
"file-api": "~0.10.4",
"filereader-stream": "^1.0.0",
"interface-datastore": "^0.4.2",
"ipfs-unixfs": "^0.1.14",
"ipfs-unixfs-engine": "^0.29.0",
"is-pull-stream": "0.0.0",
"interface-datastore": "~0.4.2",
"ipfs-unixfs": "~0.1.14",
"ipfs-unixfs-engine": "~0.29.0",
"is-pull-stream": "~0.0.0",
"is-stream": "^1.1.0",
"promisify-es6": "^1.0.3",
"pull-cat": "^1.1.11",
"pull-paramap": "^1.2.2",
"pull-pushable": "^2.2.0",
"pull-stream": "^3.6.7",
"pull-traverse": "^1.0.3",
"stream-to-pull-stream": "^1.7.2"
Expand Down
12 changes: 1 addition & 11 deletions src/core/stat.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,19 +45,9 @@ module.exports = function mfsStat (ipfs) {

const meta = unmarshal(node.data)

let size = 0

if (meta.data && meta.data.length) {
size = meta.data.length
}

if (meta.blockSizes && meta.blockSizes.length) {
size = meta.blockSizes.reduce((acc, curr) => acc + curr, 0)
}

done(null, {
hash: node.multihash,
size: size,
size: meta.fileSize(),
cumulativeSize: node.size,
childBlocks: meta.blockSizes.length,
type: meta.type
Expand Down
14 changes: 7 additions & 7 deletions src/core/utils/add-link.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const addLink = (ipfs, options, callback) => {
options = Object.assign({}, {
parent: undefined,
child: undefined,
name: undefined,
name: '',
flush: true
}, options)

Expand All @@ -24,14 +24,14 @@ const addLink = (ipfs, options, callback) => {
return callback(new Error('No child passed to addLink'))
}

if (!options.name) {
return callback(new Error('No name passed to addLink'))
}

waterfall([
(done) => {
// Remove the old link if necessary
DAGNode.rmLink(options.parent, options.name, done)
if (options.name) {
// Remove the old link if necessary
return DAGNode.rmLink(options.parent, options.name, done)
}

done(null, options.parent)
},
(parent, done) => {
// Add the new link to the parent
Expand Down
4 changes: 3 additions & 1 deletion src/core/utils/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,7 @@ const Key = require('interface-datastore').Key

module.exports = {
FILE_SEPARATOR: '/',
MFS_ROOT_KEY: new Key('/local/filesroot')
MFS_ROOT_KEY: new Key('/local/filesroot'),
MAX_CHUNK_SIZE: 262144,
MAX_LINKS: 174
}
22 changes: 22 additions & 0 deletions src/core/utils/create-node.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
'use strict'

const waterfall = require('async/waterfall')
const {
DAGNode
} = require('ipld-dag-pb')

const createNode = (ipfs, data, links, options, callback) => {
waterfall([
// Create a DAGNode with the new data
(cb) => DAGNode.create(data, links, cb),
(newNode, cb) => {
// Persist it
ipfs.dag.put(newNode, {
format: options.format,
hashAlg: options.hashAlg
}, (error) => cb(error, newNode))
}
], callback)
}

module.exports = createNode
9 changes: 9 additions & 0 deletions src/core/utils/end-pull-stream.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
'use strict'

const endPullStream = (callback) => {
// Ugh. https://github.com/standard/standard/issues/623
const foo = true
return callback(foo)
}

module.exports = endPullStream
8 changes: 7 additions & 1 deletion src/core/utils/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,18 @@
const constants = require('./constants')

module.exports = {
endPullStream: require('./end-pull-stream'),
validatePath: require('./validate-path'),
withMfsRoot: require('./with-mfs-root'),
updateMfsRoot: require('./update-mfs-root'),
traverseTo: require('./traverse-to'),
addLink: require('./add-link'),
updateTree: require('./update-tree'),
createNode: require('./create-node'),
limitStreamBytes: require('./limit-stream-bytes'),
FILE_SEPARATOR: constants.FILE_SEPARATOR
loadNode: require('./load-node'),
zeros: require('./zeros'),
FILE_SEPARATOR: constants.FILE_SEPARATOR,
MAX_CHUNK_SIZE: constants.MAX_CHUNK_SIZE,
MAX_LINKS: constants.MAX_LINKS
}
5 changes: 2 additions & 3 deletions src/core/utils/limit-stream-bytes.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
'use strict'

const asyncMap = require('pull-stream/throughs/async-map')
const endPullStream = require('./end-pull-stream')

const limitStreamBytes = (limit) => {
let bytesRead = 0

return asyncMap((buffer, cb) => {
if (bytesRead > limit) {
// Ugh. https://github.com/standard/standard/issues/623
const foo = true
return cb(foo)
endPullStream(cb)
}

// If we only need to return part of this buffer, slice it to make it smaller
Expand Down
25 changes: 25 additions & 0 deletions src/core/utils/load-node.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
'use strict'

const waterfall = require('async/waterfall')
const CID = require('cids')
const log = require('debug')('mfs:utils:load-node')
const bs58 = require('bs58')

const loadNode = (ipfs, cid, callback) => {
const multihash = cid && (cid.multihash || cid.hash)

if (!multihash) {
log(`No multihash passed so cannot load DAGNode`)

return callback()
}

log(`Loading DAGNode for child ${bs58.encode(multihash)}`)

waterfall([
(cb) => ipfs.dag.get(new CID(multihash), cb),
(result, cb) => cb(null, result.value)
], callback)
}

module.exports = loadNode
2 changes: 1 addition & 1 deletion src/core/utils/traverse-to.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ const traverseTo = (ipfs, path, options, callback) => {
node: rootNode,
parent: null
}, (parent, {pathSegment, index}, done) => {
log(`Looking for ${pathSegment} in ${parent.name} ${bs58.encode(parent.node.multihash)}s`)
log(`Looking for ${pathSegment} in ${parent.name} ${bs58.encode(parent.node.multihash)}`)

parent.node.links.forEach(link => {
log(`${bs58.encode(link.multihash)} ${link.name}`)
Expand Down
31 changes: 31 additions & 0 deletions src/core/utils/zeros.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
'use strict'

// A pull stream source that will emit buffers full of zeros up to the specified length
const zeros = (max = Infinity, increment = 4096) => {
let i = 0

return (end, cb) => {
if (end) {
return cb && cb(end)
}

if (i >= max) {
// Ugh. https://github.com/standard/standard/issues/623
const foo = true
return cb(foo)
}

let nextLength = increment

if ((i + nextLength) > max) {
// final chunk doesn't divide neatly into increment
nextLength = max - i
}

i += nextLength

cb(null, Buffer.alloc(nextLength, 0))
}
}

module.exports = zeros
2 changes: 1 addition & 1 deletion src/core/write/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,6 @@ module.exports = function mfsWrite (ipfs) {
(newRoot, next) => updateMfsRoot(ipfs, newRoot.node.multihash, next)
], done)
}
], (error, result) => callback(error, result))
], (error) => callback(error))
})
}
Loading

0 comments on commit ccecb1b

Please sign in to comment.