Skip to content

Commit

Permalink
zlib: use .bytesWritten instead of .bytesRead
Browse files Browse the repository at this point in the history
The introduction of `.bytesRead` to zlib streams was unfortunate,
because other streams in Node.js core use the exact opposite naming
of `.bytesRead` and `.bytesWritten`.

While one could see how the original naming makes sense in
a `Transform` stream context, we should try to work towards more
consistent APIs in core for these things.

This introduces `zlib.bytesWritten` and documentation-only deprecates
`zlib.bytesRead`.

PR-URL: #19414
Refs: #8874
Refs: #13088
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Reviewed-By: Сковорода Никита Андреевич <chalkerx@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Sakthipriyan Vairamani <thechargingvolcano@gmail.com>
Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de>
  • Loading branch information
addaleax authored and targos committed Apr 12, 2018
1 parent b476102 commit 307ce80
Show file tree
Hide file tree
Showing 4 changed files with 59 additions and 15 deletions.
11 changes: 11 additions & 0 deletions doc/api/deprecations.md
Original file line number Diff line number Diff line change
Expand Up @@ -796,6 +796,16 @@ Use [`asyncResource.runInAsyncScope()`][] API instead which provides a much
safer, and more convenient, alternative. See
https://github.com/nodejs/node/pull/18513 for more details.
<a id="DEP0108"></a>
### DEP0108: zlib.bytesRead
Type: Documentation-only
Deprecated alias for [`zlib.bytesWritten`][]. This original name was chosen
because it also made sense to interpret the value as the number of bytes
read by the engine, but is inconsistent with other streams in Node.js that
expose values under these names.
[`Buffer.allocUnsafeSlow(size)`]: buffer.html#buffer_class_method_buffer_allocunsafeslow_size
[`Buffer.from(array)`]: buffer.html#buffer_class_method_buffer_from_array
[`Buffer.from(buffer)`]: buffer.html#buffer_class_method_buffer_from_buffer
Expand Down Expand Up @@ -857,6 +867,7 @@ https://github.com/nodejs/node/pull/18513 for more details.
[`util.puts()`]: util.html#util_util_puts_strings
[`util`]: util.html
[`worker.exitedAfterDisconnect`]: cluster.html#cluster_worker_exitedafterdisconnect
[`zlib.bytesWritten`]: zlib.html#zlib_zlib_byteswritten
[alloc]: buffer.html#buffer_class_method_buffer_alloc_size_fill_encoding
[alloc_unsafe_size]: buffer.html#buffer_class_method_buffer_allocunsafe_size
[from_arraybuffer]: buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length
Expand Down
24 changes: 20 additions & 4 deletions doc/api/zlib.md
Original file line number Diff line number Diff line change
Expand Up @@ -400,13 +400,28 @@ class of the compressor/decompressor classes.
### zlib.bytesRead
<!-- YAML
added: v8.1.0
deprecated: REPLACEME
-->

> Stability: 0 - Deprecated: Use [`zlib.bytesWritten`][] instead.
* {number}

The `zlib.bytesRead` property specifies the number of bytes read by the engine
before the bytes are processed (compressed or decompressed, as appropriate for
the derived class).
Deprecated alias for [`zlib.bytesWritten`][]. This original name was chosen
because it also made sense to interpret the value as the number of bytes
read by the engine, but is inconsistent with other streams in Node.js that
expose values under these names.

### zlib.bytesWritten
<!-- YAML
added: REPLACEME
-->

* {number}

The `zlib.bytesWritten` property specifies the number of bytes written to
the engine, before the bytes are processed (compressed or decompressed,
as appropriate for the derived class).

### zlib.close([callback])
<!-- YAML
Expand Down Expand Up @@ -763,7 +778,8 @@ Decompress a chunk of data with [Unzip][].
[InflateRaw]: #zlib_class_zlib_inflateraw
[Inflate]: #zlib_class_zlib_inflate
[Memory Usage Tuning]: #zlib_memory_usage_tuning
[options]: #zlib_class_options
[Unzip]: #zlib_class_zlib_unzip
[`UV_THREADPOOL_SIZE`]: cli.html#cli_uv_threadpool_size_size
[options]: #zlib_class_options
[`zlib.bytesWritten`]: #zlib_zlib_byteswritten
[zlib documentation]: https://zlib.net/manual.html#Constants
23 changes: 19 additions & 4 deletions lib/zlib.js
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ function Zlib(opts, mode) {
}
}
Transform.call(this, opts);
this.bytesRead = 0;
this.bytesWritten = 0;
this._handle = new binding.Zlib(mode);
this._handle.jsref = this; // Used by processCallback() and zlibOnError()
this._handle.onerror = zlibOnError;
Expand Down Expand Up @@ -309,6 +309,21 @@ Object.defineProperty(Zlib.prototype, '_closed', {
}
});

// `bytesRead` made sense as a name when looking from the zlib engine's
// perspective, but it is inconsistent with all other streams exposed by Node.js
// that have this concept, where it stands for the number of bytes read
// *from* the stream (that is, net.Socket/tls.Socket & file system streams).
Object.defineProperty(Zlib.prototype, 'bytesRead', {
configurable: true,
enumerable: true,
get() {
return this.bytesWritten;
},
set(value) {
this.bytesWritten = value;
}
});

Zlib.prototype.params = function params(level, strategy, callback) {
if (level < Z_MIN_LEVEL || level > Z_MAX_LEVEL)
throw new errors.RangeError('ERR_INVALID_ARG_VALUE', 'level', level);
Expand Down Expand Up @@ -489,7 +504,7 @@ function processChunkSync(self, chunk, flushFlag) {
}
}

self.bytesRead = inputRead;
self.bytesWritten = inputRead;

if (nread >= kMaxLength) {
_close(self);
Expand Down Expand Up @@ -546,8 +561,8 @@ function processCallback() {
var availOutAfter = state[0];
var availInAfter = state[1];

var inDelta = (handle.availInBefore - availInAfter);
self.bytesRead += inDelta;
const inDelta = handle.availInBefore - availInAfter;
self.bytesWritten += inDelta;

var have = handle.availOutBefore - availOutAfter;
if (have > 0) {
Expand Down
16 changes: 9 additions & 7 deletions test/parallel/test-zlib-bytes-read.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ for (const method of [
const comp = zlib[method[0]]();
comp.on('data', function(d) {
compData = Buffer.concat([compData, d]);
assert.strictEqual(this.bytesRead, compWriter.size,
assert.strictEqual(this.bytesWritten, compWriter.size,
`Should get write size on ${method[0]} data.`);
});
comp.on('end', common.mustCall(function() {
assert.strictEqual(this.bytesRead, compWriter.size,
assert.strictEqual(this.bytesWritten, compWriter.size,
`Should get write size on ${method[0]} end.`);
assert.strictEqual(this.bytesRead, expectStr.length,
assert.strictEqual(this.bytesWritten, expectStr.length,
`Should get data size on ${method[0]} end.`);

{
Expand All @@ -49,12 +49,12 @@ for (const method of [
const decomp = zlib[method[1]]();
decomp.on('data', function(d) {
decompData = Buffer.concat([decompData, d]);
assert.strictEqual(this.bytesRead, decompWriter.size,
assert.strictEqual(this.bytesWritten, decompWriter.size,
`Should get write size on ${method[0]}/` +
`${method[1]} data.`);
});
decomp.on('end', common.mustCall(function() {
assert.strictEqual(this.bytesRead, compData.length,
assert.strictEqual(this.bytesWritten, compData.length,
`Should get compressed size on ${method[0]}/` +
`${method[1]} end.`);
assert.strictEqual(decompData.toString(), expectStr,
Expand All @@ -74,14 +74,16 @@ for (const method of [
const decomp = zlib[method[1]]();
decomp.on('data', function(d) {
decompData = Buffer.concat([decompData, d]);
assert.strictEqual(this.bytesRead, decompWriter.size,
assert.strictEqual(this.bytesWritten, decompWriter.size,
`Should get write size on ${method[0]}/` +
`${method[1]} data.`);
});
decomp.on('end', common.mustCall(function() {
assert.strictEqual(this.bytesRead, compData.length,
assert.strictEqual(this.bytesWritten, compData.length,
`Should get compressed size on ${method[0]}/` +
`${method[1]} end.`);
// Checking legacy name.
assert.strictEqual(this.bytesWritten, this.bytesRead);
assert.strictEqual(decompData.toString(), expectStr,
`Should get original string on ${method[0]}/` +
`${method[1]} end.`);
Expand Down

0 comments on commit 307ce80

Please sign in to comment.