http_parser: do not dealloc during kOnExecute

`freeParser` deallocates `Parser` instances early if they do not fit
into the free list. This does not play well with recent socket
consumption change, because it will try to deallocate the parser while
executing on its stack.

Regression was introduced in: 1bc4468

Fix: https://github.com/nodejs/node/issues/2928
PR-URL: https://github.com/nodejs/node/pull/2956
Reviewed-by: Trevor Norris <trev.norris@gmail.com>
This commit is contained in:
Fedor Indutny 2015-09-18 12:15:39 -07:00
parent 4fb4c14b61
commit 229a03f1bb
2 changed files with 75 additions and 1 deletions

View File

@ -366,7 +366,9 @@ class Parser : public BaseObject {
static void Close(const FunctionCallbackInfo<Value>& args) {
Parser* parser = Unwrap<Parser>(args.Holder());
delete parser;
if (--parser->refcount_ == 0)
delete parser;
}
@ -504,6 +506,22 @@ class Parser : public BaseObject {
}
protected:
class ScopedRetainParser {
public:
explicit ScopedRetainParser(Parser* p) : p_(p) {
CHECK_GT(p_->refcount_, 0);
p_->refcount_++;
}
~ScopedRetainParser() {
if (0 == --p_->refcount_)
delete p_;
}
private:
Parser* const p_;
};
static const size_t kAllocBufferSize = 64 * 1024;
static void OnAllocImpl(size_t suggested_size, uv_buf_t* buf, void* ctx) {
@ -540,6 +558,8 @@ class Parser : public BaseObject {
if (nread == 0)
return;
ScopedRetainParser retain(parser);
parser->current_buffer_.Clear();
Local<Value> ret = parser->Execute(buf->base, nread);
@ -668,7 +688,10 @@ class Parser : public BaseObject {
char* current_buffer_data_;
StreamResource::Callback<StreamResource::AllocCb> prev_alloc_cb_;
StreamResource::Callback<StreamResource::ReadCb> prev_read_cb_;
int refcount_ = 1;
static const struct http_parser_settings settings;
friend class ScopedRetainParser;
};

View File

@ -0,0 +1,51 @@
'use strict';
const common = require('../common');
const assert = require('assert');
const httpCommon = require('_http_common');
const HTTPParser = process.binding('http_parser').HTTPParser;
const net = require('net');
const PARALLEL = 30;
const COUNT = httpCommon.parsers.max + 100;
const parsers = new Array(COUNT);
for (var i = 0; i < parsers.length; i++)
parsers[i] = httpCommon.parsers.alloc();
var gotRequests = 0;
var gotResponses = 0;
function execAndClose() {
process.stdout.write('.');
if (parsers.length === 0)
return;
const parser = parsers.pop();
parser.reinitialize(HTTPParser.RESPONSE);
const socket = net.connect(common.PORT);
parser.consume(socket._handle._externalStream);
parser.onIncoming = function onIncoming() {
process.stdout.write('+');
gotResponses++;
parser.unconsume(socket._handle._externalStream);
httpCommon.freeParser(parser);
socket.destroy();
setImmediate(execAndClose);
};
}
var server = net.createServer(function(c) {
if (++gotRequests === COUNT)
server.close();
c.end('HTTP/1.1 200 OK\r\n\r\n', function() {
c.destroySoon();
});
}).listen(common.PORT, function() {
for (var i = 0; i < PARALLEL; i++)
execAndClose();
});
process.on('exit', function() {
assert.equal(gotResponses, COUNT);
});