Skip to content
This repository has been archived by the owner on Jul 21, 2023. It is now read-only.

Commit

Permalink
fix: replace node buffers with uint8arrays (#114)
Browse files Browse the repository at this point in the history
* fix: replace node buffers with uint8arrays

BREAKING CHANGE:

- All use of node Buffers has been replaced with Uint8Arrays

* fix: keep allocUnsafe for node for performance

Co-authored-by: Jacob Heun <jacobheun@gmail.com>
  • Loading branch information
achingbrain and jacobheun authored Aug 11, 2020
1 parent c44bf9a commit d005338
Show file tree
Hide file tree
Showing 9 changed files with 111 additions and 47 deletions.
16 changes: 16 additions & 0 deletions .aegir.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
'use strict'

module.exports = {
webpack: {
node: {
// needed by random-bytes
crypto: true,

// needed by cipher-base
stream: true,

// needed by core-util-is
Buffer: true
}
}
}
11 changes: 6 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@
"dist",
"src"
],
"browser": {
"./src/coder/encode.js": "./src/coder/encode.browser.js"
},
"scripts": {
"lint": "aegir lint",
"build": "aegir build",
Expand Down Expand Up @@ -39,20 +42,18 @@
},
"homepage": "https://github.com/libp2p/js-libp2p-mplex#readme",
"devDependencies": {
"aegir": "^22.0.0",
"chai": "^4.2.0",
"dirty-chai": "^2.0.1",
"aegir": "^25.0.0",
"interface-stream-muxer": "^0.8.0",
"p-defer": "^3.0.0",
"random-bytes": "^1.0.0",
"random-int": "^2.0.0",
"streaming-iterables": "^4.1.0"
"streaming-iterables": "^5.0.2",
"uint8arrays": "^1.1.0"
},
"dependencies": {
"abort-controller": "^3.0.0",
"abortable-iterator": "^3.0.0",
"bl": "^4.0.0",
"buffer": "^5.5.0",
"debug": "^4.1.1",
"it-pipe": "^1.0.1",
"it-pushable": "^1.3.1",
Expand Down
4 changes: 2 additions & 2 deletions src/coder/decode.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class Decoder {
}

/**
* @param {Buffer|BufferList} chunk
* @param {Uint8Array|BufferList} chunk
* @returns {object[]} An array of message objects
*/
write (chunk) {
Expand Down Expand Up @@ -58,7 +58,7 @@ class Decoder {
/**
* Attempts to decode the message header from the buffer
* @private
* @param {Buffer} data
* @param {Uint8Array} data
* @returns {*} message header (id, type, offset, length)
*/
_decodeHeader (data) {
Expand Down
54 changes: 54 additions & 0 deletions src/coder/encode.browser.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
'use strict'

const varint = require('varint')
const BufferList = require('bl/BufferList')

const POOL_SIZE = 10 * 1024

class Encoder {
constructor () {
this._pool = new Uint8Array(POOL_SIZE)
this._poolOffset = 0
}

/**
* Encodes the given message and returns it and its header
* @param {*} msg The message object to encode
* @returns {Uint8Array|Uint8Array[]}
*/
write (msg) {
const pool = this._pool
let offset = this._poolOffset

varint.encode(msg.id << 3 | msg.type, pool, offset)
offset += varint.encode.bytes
varint.encode(msg.data ? msg.data.length : 0, pool, offset)
offset += varint.encode.bytes

const header = pool.subarray(this._poolOffset, offset)

if (POOL_SIZE - offset < 100) {
this._pool = new Uint8Array(POOL_SIZE)
this._poolOffset = 0
} else {
this._poolOffset = offset
}

if (!msg.data) return header

return [header, msg.data]
}
}

const encoder = new Encoder()

// Encode one or more messages and yield a BufferList of encoded messages
module.exports = source => (async function * encode () {
for await (const msg of source) {
if (Array.isArray(msg)) {
yield new BufferList(msg.map(m => encoder.write(m)))
} else {
yield new BufferList(encoder.write(msg))
}
}
})()
1 change: 0 additions & 1 deletion src/coder/encode.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
'use strict'

const { Buffer } = require('buffer')
const varint = require('varint')
const BufferList = require('bl/BufferList')

Expand Down
2 changes: 1 addition & 1 deletion src/mplex.js
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ class Mplex {
* @param {object} options
* @param {number} options.id
* @param {string} options.type
* @param {Buffer|BufferList} options.data
* @param {Uint8Array|BufferList} options.data
* @returns {void}
*/
_handleIncoming ({ id, type, data }) {
Expand Down
48 changes: 23 additions & 25 deletions test/coder.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,59 +2,57 @@
/* eslint max-nested-callbacks: ["error", 5] */
'use strict'

const { Buffer } = require('buffer')
const chai = require('chai')
const dirtyChai = require('dirty-chai')
const { BufferList } = require('bl')
const { expect } = chai
chai.use(dirtyChai)
const { expect } = require('aegir/utils/chai')
const uint8ArrayFromString = require('uint8arrays/from-string')
const uint8ArrayConcat = require('uint8arrays/concat')

const coder = require('../src/coder')

describe('coder', () => {
it('should encode header', async () => {
const source = [{ id: 17, type: 0, data: Buffer.from('17') }]
const source = [{ id: 17, type: 0, data: uint8ArrayFromString('17') }]

const data = new BufferList()
for await (const chunk of coder.encode(source)) {
data.append(chunk)
}

const expectedHeader = Buffer.from('880102', 'hex')
const expectedHeader = uint8ArrayFromString('880102', 'base16')
expect(data.slice(0, expectedHeader.length)).to.be.eql(expectedHeader)
})

it('should decode header', async () => {
const source = [Buffer.from('8801023137', 'hex')]
const source = [uint8ArrayFromString('8801023137', 'base16')]
for await (const msgs of coder.decode(source)) {
expect(msgs.length).to.equal(1)
msgs[0].data = msgs[0].data.slice() // convert BufferList to Buffer
expect(msgs[0]).to.be.eql({ id: 17, type: 0, data: Buffer.from('17') })
expect(msgs[0]).to.be.eql({ id: 17, type: 0, data: uint8ArrayFromString('17') })
}
})

it('should encode several msgs into buffer', async () => {
const source = [
{ id: 17, type: 0, data: Buffer.from('17') },
{ id: 19, type: 0, data: Buffer.from('19') },
{ id: 21, type: 0, data: Buffer.from('21') }
{ id: 17, type: 0, data: uint8ArrayFromString('17') },
{ id: 19, type: 0, data: uint8ArrayFromString('19') },
{ id: 21, type: 0, data: uint8ArrayFromString('21') }
]

const data = new BufferList()
for await (const chunk of coder.encode(source)) {
data.append(chunk)
}

expect(data.slice()).to.be.eql(Buffer.from('88010231379801023139a801023231', 'hex'))
expect(data.slice()).to.be.eql(uint8ArrayFromString('88010231379801023139a801023231', 'base16'))
})

it('should encode from BufferList', async () => {
const source = [{
id: 17,
type: 0,
data: new BufferList([
Buffer.from(Math.random().toString()),
Buffer.from(Math.random().toString())
uint8ArrayFromString(Math.random().toString()),
uint8ArrayFromString(Math.random().toString())
])
}]

Expand All @@ -63,15 +61,15 @@ describe('coder', () => {
data.append(chunk)
}

expect(data.slice()).to.be.eql(Buffer.concat([
Buffer.from('8801', 'hex'),
Buffer.from([source[0].data.length]),
expect(data.slice()).to.eql(uint8ArrayConcat([
uint8ArrayFromString('8801', 'base16'),
Uint8Array.from([source[0].data.length]),
source[0].data.slice()
]))
})

it('should decode msgs from buffer', async () => {
const source = [Buffer.from('88010231379801023139a801023231', 'hex')]
const source = [uint8ArrayFromString('88010231379801023139a801023231', 'base16')]

const res = []
for await (const msgs of coder.decode(source)) {
Expand All @@ -82,9 +80,9 @@ describe('coder', () => {
}

expect(res).to.be.deep.eql([
{ id: 17, type: 0, data: Buffer.from('17') },
{ id: 19, type: 0, data: Buffer.from('19') },
{ id: 21, type: 0, data: Buffer.from('21') }
{ id: 17, type: 0, data: uint8ArrayFromString('17') },
{ id: 19, type: 0, data: uint8ArrayFromString('19') },
{ id: 21, type: 0, data: uint8ArrayFromString('21') }
])
})

Expand All @@ -96,16 +94,16 @@ describe('coder', () => {
data.append(chunk)
}

expect(data.slice()).to.be.eql(Buffer.from('880100', 'hex'))
expect(data.slice()).to.be.eql(uint8ArrayFromString('880100', 'base16'))
})

it('should decode zero length body msg', async () => {
const source = [Buffer.from('880100', 'hex')]
const source = [uint8ArrayFromString('880100', 'base16')]

for await (const msgs of coder.decode(source)) {
expect(msgs.length).to.equal(1)
msgs[0].data = msgs[0].data.slice() // convert BufferList to Buffer
expect(msgs[0]).to.be.eql({ id: 17, type: 0, data: Buffer.alloc(0) })
expect(msgs[0]).to.be.eql({ id: 17, type: 0, data: new Uint8Array(0) })
}
})
})
5 changes: 1 addition & 4 deletions test/restrict-size.spec.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
/* eslint-env mocha */
'use strict'

const chai = require('chai')
const dirtyChai = require('dirty-chai')
const { expect } = chai
chai.use(dirtyChai)
const { expect } = require('aegir/utils/chai')
const pipe = require('it-pipe')
const randomBytes = require('random-bytes')
const { tap, consume, collect } = require('streaming-iterables')
Expand Down
17 changes: 8 additions & 9 deletions test/stream.spec.js
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
/* eslint-env mocha */
'use strict'

const { Buffer } = require('buffer')
const chai = require('chai')
const dirtyChai = require('dirty-chai')
const { expect } = chai
chai.use(dirtyChai)
const { expect } = require('aegir/utils/chai')
const pipe = require('it-pipe')
const randomBytes = require('random-bytes')
const randomInt = require('random-int')
const { tap, take, collect, consume, map } = require('streaming-iterables')
const defer = require('p-defer')
const uint8ArrayFromString = require('uint8arrays/from-string')
const uint8ArrayToString = require('uint8arrays/to-string')
const uint8ArrayConcat = require('uint8arrays/concat')

const createStream = require('../src/stream')
const { MessageTypes, MessageTypeNames } = require('../src/message-types')
Expand All @@ -31,13 +30,13 @@ const infiniteRandom = {
}
}

const msgToBuffer = msg => Buffer.from(JSON.stringify(msg))
const msgToBuffer = msg => uint8ArrayFromString(JSON.stringify(msg))

const bufferToMessage = buf => {
const msg = JSON.parse(buf)
const msg = JSON.parse(uint8ArrayToString(buf))
// JSON.stringify(Buffer) encodes as {"type":"Buffer","data":[1,2,3]}
if (msg.data && msg.data.type === 'Buffer') {
msg.data = Buffer.from(msg.data.data)
msg.data = new Uint8Array(msg.data.data)
}
return msg
}
Expand Down Expand Up @@ -565,6 +564,6 @@ describe('stream', () => {
expect(dataMessages[0].data.length).to.equal(maxMsgSize)
expect(dataMessages[1].data.length).to.equal(maxMsgSize)
expect(dataMessages[2].data.length).to.equal(2)
expect(Buffer.concat(dataMessages.map(m => m.data.slice()))).to.deep.equal(bigMessage)
expect(uint8ArrayConcat(dataMessages.map(m => m.data.slice()))).to.deep.equal(bigMessage)
})
})

0 comments on commit d005338

Please sign in to comment.