Skip to content
This repository has been archived by the owner on Aug 11, 2021. It is now read-only.

Commit

Permalink
feat: migrate to new ipfs-block and block-service api
Browse files Browse the repository at this point in the history
  • Loading branch information
daviddias committed Mar 21, 2017
1 parent 49b01ef commit c9e958b
Show file tree
Hide file tree
Showing 50 changed files with 8,687 additions and 336 deletions.
19 changes: 9 additions & 10 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,10 @@
"license": "MIT",
"devDependencies": {
"aegir": "^11.0.0",
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"dirty-chai": "^1.2.2",
"eth-hash-to-cid": "^0.1.0",
"ethereumjs-block": "^1.5.0",
"fs-pull-blob-store": "~0.4.1",
"idb-pull-blob-store": "~0.5.1",
"lodash": "^4.17.4",
"ncp": "^2.0.0",
"pre-commit": "^1.2.2",
Expand All @@ -48,12 +46,12 @@
"dependencies": {
"async": "^2.1.5",
"cids": "~0.4.2",
"interface-pull-blob-store": "~0.6.0",
"ipfs-block": "~0.5.5",
"ipfs-block-service": "~0.8.3",
"ipfs-repo": "~0.11.3",
"ipld-dag-cbor": "~0.10.1",
"ipld-dag-pb": "~0.10.1",
"interface-datastore": "^0.1.1",
"ipfs-block": "~0.6.0",
"ipfs-block-service": "~0.9.0",
"ipfs-repo": "~0.12.0",
"ipld-dag-cbor": "~0.11.0",
"ipld-dag-pb": "~0.11.0",
"ipld-eth-block": "^2.2.1",
"ipld-eth-block-list": "^1.0.3",
"ipld-eth-state-trie": "^1.0.2",
Expand All @@ -62,6 +60,7 @@
"is-ipfs": "~0.3.0",
"lodash.flatten": "^4.4.0",
"lodash.includes": "^4.3.0",
"memdown": "^1.2.4",
"multihashes": "~0.4.4",
"pull-sort": "^1.0.0",
"pull-stream": "^3.5.0",
Expand All @@ -76,4 +75,4 @@
"kumavis <kumavis@users.noreply.github.com>",
"wanderer <mjbecze@gmail.com>"
]
}
}
138 changes: 59 additions & 79 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@ const pull = require('pull-stream')
const CID = require('cids')
const doUntil = require('async/doUntil')
const IPFSRepo = require('ipfs-repo')
const MemoryStore = require('interface-pull-blob-store')
const BlockService = require('ipfs-block-service')
const joinPath = require('path').join
const pullDeferSource = require('pull-defer').source
const pullTraverse = require('pull-traverse')
const map = require('async/map')
const series = require('async/series')
const waterfall = require('async/waterfall')
const MemoryStore = require('interface-datastore').MemoryDatastore

const dagPB = require('ipld-dag-pb')
const dagCBOR = require('ipld-dag-cbor')
Expand All @@ -25,10 +26,8 @@ function noop () {}

class IPLDResolver {
constructor (blockService) {
// nicola will love this!
if (!blockService) {
const repo = new IPFSRepo('in-memory', { stores: MemoryStore })
blockService = new BlockService(repo)
throw new Error('Missing blockservice')
}

this.bs = blockService
Expand Down Expand Up @@ -182,50 +181,24 @@ class IPLDResolver {
if (typeof options === 'function') {
return setImmediate(() => callback(new Error('no options were passed')))
}

let nodeAndCID
callback = callback || noop

if (options.cid && CID.isCID(options.cid)) {
nodeAndCID = {
node: node,
cid: options.cid
}

store.apply(this)
} else {
options.hashAlg = options.hashAlg || 'sha2-256'

const r = this.resolvers[options.format]
// TODO add support for different hash funcs in the utils of
// each format (just really needed for CBOR for now, really
// r.util.cid(node1, hashAlg, (err, cid) => {
r.util.cid(node, (err, cid) => {
if (err) {
return callback(err)
}

nodeAndCID = {
node: node,
cid: cid
}

store.apply(this)
})
return this._put(options.cid, node, callback)
}

function store () {
callback = callback || noop
options.hashAlg = options.hashAlg || 'sha2-256'
const r = this.resolvers[options.format]
// TODO add support for different hash funcs in the utils of
// each format (just really needed for CBOR for now, really
// r.util.cid(node1, hashAlg, (err, cid) => {
r.util.cid(node, (err, cid) => {
if (err) {
return callback(err)
}

pull(
pull.values([nodeAndCID]),
this._putStream((err) => {
if (err) {
return callback(err)
}
callback(null, nodeAndCID.cid)
})
)
}
this._put(cid, node, callback)
})
}

treeStream (cid, path, options) {
Expand Down Expand Up @@ -340,22 +313,11 @@ class IPLDResolver {
/* */

_get (cid, callback) {
pull(
this._getStream(cid),
pull.collect((err, res) => {
if (err) {
return callback(err)
}
callback(null, res[0])
})
)
}
const r = this.resolvers[cid.codec]

_getStream (cid) {
return pull(
this.bs.getStream(cid),
pull.asyncMap((block, cb) => {
const r = this.resolvers[cid.codec]
waterfall([
(cb) => this.bs.get(cid, cb),
(block, cb) => {
if (r) {
r.util.deserialize(block.data, (err, deserialized) => {
if (err) {
Expand All @@ -366,32 +328,50 @@ class IPLDResolver {
} else { // multicodec unknown, send back raw data
cb(null, block.data)
}
})
)
}
], callback)
}

_putStream (callback) {
_put (cid, node, callback) {
callback = callback || noop

return pull(
pull.asyncMap((nodeAndCID, cb) => {
const cid = nodeAndCID.cid
const r = this.resolvers[cid.codec]

r.util.serialize(nodeAndCID.node, (err, serialized) => {
if (err) {
return cb(err)
}
cb(null, {
block: new Block(serialized),
cid: cid
})
})
}),
this.bs.putStream(),
pull.onEnd(callback)
)
const r = this.resolvers[cid.codec]
waterfall([
(cb) => r.util.serialize(node, cb),
(buf, cb) => this.bs.put(new Block(buf, cid), cb)
], (err) => {
if (err) {
return callback(err)
}
callback(null, cid)
})
}
}

/**
* Create an IPLD resolver with an inmemory blockservice and
* repo.
*
* @param {function(Error, IPLDResolver)} callback
* @returns {void}
*/
IPLDResolver.inMemory = function (callback) {
const repo = new IPFSRepo('in-memory', {
fs: MemoryStore,
level: require('memdown'),
lock: 'memory'
})
const blockService = new BlockService(repo)

series([
(cb) => repo.init({}, cb),
(cb) => repo.open(cb)
], (err) => {
if (err) {
return callback(err)
}
callback(null, new IPLDResolver(blockService))
})
}

module.exports = IPLDResolver
13 changes: 9 additions & 4 deletions test/basics.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
/* eslint-env mocha */
'use strict'

const expect = require('chai').expect
const chai = require('chai')
const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
const BlockService = require('ipfs-block-service')

const IPLDResolver = require('../src')
Expand All @@ -11,12 +14,14 @@ module.exports = (repo) => {
it('creates an instance', () => {
const bs = new BlockService(repo)
const r = new IPLDResolver(bs)
expect(r.bs).to.exist // eslint-disable-line
expect(r.bs).to.exist()
})

it('creates an in memory repo if no blockService is passed', () => {
const r = new IPLDResolver()
expect(r.bs).to.exist // eslint-disable-line
IPLDResolver.inMemory((err, r) => {
expect(err).to.not.exist()
expect(r.bs).to.exist()
})
})

it.skip('add support to a new format', () => {})
Expand Down
47 changes: 16 additions & 31 deletions test/browser.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,8 @@

'use strict'

const eachSeries = require('async/eachSeries')
const Store = require('idb-pull-blob-store')
const _ = require('lodash')
const series = require('async/series')
const IPFSRepo = require('ipfs-repo')
const pull = require('pull-stream')
const repoContext = require.context('buffer!./example-repo', true)

const basePath = 'ipfs' + Math.random()

Expand All @@ -21,35 +17,24 @@ idb.deleteDatabase(basePath)
idb.deleteDatabase(basePath + '/blocks')

describe('Browser', () => {
before((done) => {
const repoData = []
repoContext.keys().forEach((key) => {
repoData.push({
key: key.replace('./', ''),
value: repoContext(key)
})
})

const mainBlob = new Store(basePath)
const blocksBlob = new Store(basePath + '/blocks')

eachSeries(repoData, (file, cb) => {
if (_.startsWith(file.key, 'datastore/')) {
return cb()
}

const blocks = _.startsWith(file.key, 'blocks/')
const blob = blocks ? blocksBlob : mainBlob
const key = blocks ? file.key.replace(/^blocks\//, '') : file.key
const repo = new IPFSRepo(basePath)

pull(
pull.values([file.value]),
blob.write(key, cb)
)
}, done)
before((done) => {
series([
(cb) => repo.init({}, cb),
(cb) => repo.open(cb)
], done)
})

const repo = new IPFSRepo(basePath, { stores: Store })
after((done) => {
series([
(cb) => repo.close(cb),
(cb) => {
idb.deleteDatabase(basePath)
idb.deleteDatabase(basePath + '/blocks')
}
], done)
})

require('./basics')(repo)
require('./ipld-dag-pb')(repo)
Expand Down
1 change: 1 addition & 0 deletions test/example-repo/blocks/SHARDING
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/repo/flatfs/shard/v1/next-to-last/2
30 changes: 30 additions & 0 deletions test/example-repo/blocks/_README
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
This is a repository of IPLD objects. Each IPLD object is in a single file,
named <base32 encoding of cid>.data. Where <base32 encoding of cid> is the
"base32" encoding of the CID (as specified in
https://github.com/multiformats/multibase) without the 'B' prefix.
All the object files are placed in a tree of directories, based on a
function of the CID. This is a form of sharding similar to
the objects directory in git repositories. Previously, we used
prefixes, we now use the next-to-last two charters.

func NextToLast(base32cid string) {
nextToLastLen := 2
offset := len(base32cid) - nextToLastLen - 1
return str[offset : offset+nextToLastLen]
}

For example, an object with a base58 CIDv1 of

zb2rhYSxw4ZjuzgCnWSt19Q94ERaeFhu9uSqRgjSdx9bsgM6f

has a base32 CIDv1 of

BAFKREIA22FLID5AJ2KU7URG47MDLROZIH6YF2KALU2PWEFPVI37YLKRSCA

and will be placed at

SC/AFKREIA22FLID5AJ2KU7URG47MDLROZIH6YF2KALU2PWEFPVI37YLKRSCA.data

with 'SC' being the last-to-next two characters and the 'B' at the
beginning of the CIDv1 string is the multibase prefix that is not
stored in the filename.
2 changes: 1 addition & 1 deletion test/example-repo/datastore/CURRENT
Original file line number Diff line number Diff line change
@@ -1 +1 @@
MANIFEST-000007
MANIFEST-000009
15 changes: 5 additions & 10 deletions test/example-repo/datastore/LOG
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
=============== Dec 10, 2015 (PST) ===============
07:50:02.056578 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
07:50:02.057231 db@open opening
07:50:02.057312 journal@recovery F·1
07:50:02.057514 journal@recovery recovering @3
07:50:02.058921 mem@flush created L0@5 N·4 S·1KiB "/ip..\xf6\xe4\xa9,v5":"/pk..\xf6\xe4\xa9,v6"
07:50:02.059983 db@janitor F·4 G·0
07:50:02.060001 db@open done T·2.755926ms
07:50:02.073183 db@close closing
07:50:02.073285 db@close done T·97.522µs
=============== Mar 17, 2017 (CET) ===============
14:09:53.401094 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed
14:09:53.401727 db@open opening
14:09:53.409913 db@janitor F·4 G·0
14:09:53.409974 db@open done T·8.211747ms
Loading

0 comments on commit c9e958b

Please sign in to comment.