Revert "stream: add bytesRead property for readable"

This reverts commit bfb2cd0bfddd716366f1c89637cca9fc1234e592.

The bytesRead property, as implemented, tracks characters
instead of bytes when using an identity encoding.

Refs: https://github.com/nodejs/node/pull/4372
PR-URL: https://github.com/nodejs/node/pull/4746
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: James M Snell <jasnell@gmail.com>
This commit is contained in:
cjihrig 2016-01-18 12:35:52 -05:00
parent ff64a4c395
commit eee9dc7e9d
4 changed files with 5 additions and 127 deletions

View File

@ -250,11 +250,6 @@ readable: null
end
```
#### readable.bytesRead
The amount of read bytes. If `objectMode` is `true`, the value is 0 always.
#### readable.isPaused()
* Return: `Boolean`

View File

@ -83,8 +83,6 @@ function Readable(options) {
this._readableState = new ReadableState(options, this);
this.bytesRead = 0;
// legacy
this.readable = true;
@ -137,7 +135,6 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
stream.bytesRead += state.objectMode ? 0 : chunk.length;
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);

View File

@ -91,6 +91,7 @@ exports._normalizeConnectArgs = normalizeConnectArgs;
// called when creating new Socket, or when re-using a closed Socket
function initSocketHandle(self) {
self.destroyed = false;
self.bytesRead = 0;
self._bytesDispatched = 0;
self._sockname = null;
@ -514,6 +515,10 @@ function onread(nread, buffer) {
// will prevent this from being called again until _read() gets
// called again.
// if it's not enough data, we'll just call handle.readStart()
// again right away.
self.bytesRead += nread;
// Optimization: emit the original buffer with end points
var ret = self.push(buffer);

View File

@ -1,119 +0,0 @@
'use strict';
require('../common');
const assert = require('assert');
const Readable = require('stream').Readable;
const Duplex = require('stream').Duplex;
const Transform = require('stream').Transform;
(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
}
});
readable._max = 1000;
readable._index = 1;
var total = 0;
readable.on('data', function(chunk) {
total += chunk.length;
});
readable.on('end', function() {
assert.equal(total, readable.bytesRead);
});
})();
(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
}
});
readable._max = 1000;
readable._index = 1;
var total = 0;
readable.setEncoding('utf8');
readable.on('data', function(chunk) {
total += Buffer.byteLength(chunk);
});
readable.on('end', function() {
assert.equal(total, readable.bytesRead);
});
})();
(function() {
const duplex = new Duplex({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
},
write: function(chunk, encoding, next) {
next();
}
});
duplex._max = 1000;
duplex._index = 1;
var total = 0;
duplex.setEncoding('utf8');
duplex.on('data', function(chunk) {
total += Buffer.byteLength(chunk);
});
duplex.on('end', function() {
assert.equal(total, duplex.bytesRead);
});
})();
(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('{"key":"value"}'));
}
});
readable._max = 1000;
readable._index = 1;
const transform = new Transform({
readableObjectMode : true,
transform: function(chunk, encoding, next) {
next(null, JSON.parse(chunk));
},
flush: function(done) {
done();
}
});
var total = 0;
readable.on('data', function(chunk) {
total += chunk.length;
});
transform.on('end', function() {
assert.equal(0, transform.bytesRead);
assert.equal(total, readable.bytesRead);
});
readable.pipe(transform);
})();