stream: do not unconditionally call _read() on resume()

`readable.resume()` calls `.read(0)`, which in turn previously set
`needReadable = true`, and so a subsequent `.read()` call would
call `_read()` even though enough data was already available.

This can lead to elevated memory usage, because calling `_read()`
when enough data is in the readable buffer means that backpressure
is not being honoured.

Fixes: https://github.com/nodejs/node/issues/26957

PR-URL: https://github.com/nodejs/node/pull/26965
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de>
This commit is contained in:
Anna Henningsen 2019-03-28 20:46:39 +01:00
parent 86a29356f4
commit 20c3ac2556
No known key found for this signature in database
GPG Key ID: 9C63F3A6CD2AD8F9
2 changed files with 22 additions and 1 deletions

View File

@ -475,7 +475,7 @@ Readable.prototype.read = function(n) {
ret = null;
if (ret === null) {
state.needReadable = true;
state.needReadable = state.length <= state.highWaterMark;
n = 0;
} else {
state.length -= n;

View File

@ -0,0 +1,21 @@
'use strict';
const common = require('../common');
const { Readable } = require('stream');
// readable.resume() should not lead to a ._read() call being scheduled
// when we exceed the high water mark already.
const readable = new Readable({
read: common.mustNotCall(),
highWaterMark: 100
});
// Fill up the internal buffer so that we definitely exceed the HWM:
for (let i = 0; i < 10; i++)
readable.push('a'.repeat(200));
// Call resume, and pause after one chunk.
// The .pause() is just so that we dont empty the buffer fully, which would
// be a valid reason to call ._read().
readable.resume();
readable.once('data', common.mustCall(() => readable.pause()));