http2: add Http2Stream.bufferSize
This commit adds `bufferSize` for `Http2Stream`. Refs: https://github.com/nodejs/node/issues/21631 PR-URL: https://github.com/nodejs/node/pull/23711 Reviewed-By: James M Snell <jasnell@gmail.com>
This commit is contained in:
parent
9a20a12830
commit
33fbb93d2f
@ -1012,6 +1012,15 @@ added: v8.4.0
|
||||
Set to `true` if the `Http2Stream` instance was aborted abnormally. When set,
|
||||
the `'aborted'` event will have been emitted.
|
||||
|
||||
#### http2stream.bufferSize
|
||||
<!-- YAML
|
||||
added: REPLACEME
|
||||
-->
|
||||
* {number}
|
||||
|
||||
This property shows the number of characters currently buffered to be written.
|
||||
See [`net.Socket.bufferSize`][] for details.
|
||||
|
||||
#### http2stream.close(code[, callback])
|
||||
<!-- YAML
|
||||
added: v8.4.0
|
||||
@ -3415,6 +3424,7 @@ following additional properties:
|
||||
[`http2stream.pushStream()`]: #http2_http2stream_pushstream_headers_options_callback
|
||||
[`net.Server.close()`]: net.html#net_server_close_callback
|
||||
[`net.Socket`]: net.html#net_class_net_socket
|
||||
[`net.Socket.bufferSize`]: net.html#net_socket_buffersize
|
||||
[`net.Socket.prototype.ref()`]: net.html#net_socket_ref
|
||||
[`net.Socket.prototype.unref()`]: net.html#net_socket_unref
|
||||
[`net.connect()`]: net.html#net_net_connect
|
||||
|
@ -1689,6 +1689,12 @@ class Http2Stream extends Duplex {
|
||||
return `Http2Stream ${util.format(obj)}`;
|
||||
}
|
||||
|
||||
get bufferSize() {
|
||||
// `bufferSize` properties of `net.Socket` are `undefined` when
|
||||
// their `_handle` are falsy. Here we avoid the behavior.
|
||||
return this[kState].writeQueueSize + this.writableLength;
|
||||
}
|
||||
|
||||
get endAfterHeaders() {
|
||||
return this[kState].endAfterHeaders;
|
||||
}
|
||||
|
51
test/parallel/test-http2-buffersize.js
Normal file
51
test/parallel/test-http2-buffersize.js
Normal file
@ -0,0 +1,51 @@
|
||||
'use strict';
|
||||
|
||||
const { mustCall, hasCrypto, skip } = require('../common');
|
||||
if (!hasCrypto)
|
||||
skip('missing crypto');
|
||||
const assert = require('assert');
|
||||
const { createServer, connect } = require('http2');
|
||||
const Countdown = require('../common/countdown');
|
||||
|
||||
// This test ensures that `bufferSize` of Http2Session and Http2Stream work
|
||||
// as expected.
|
||||
{
|
||||
const SOCKETS = 2;
|
||||
const TIMES = 10;
|
||||
const BUFFER_SIZE = 30;
|
||||
const server = createServer();
|
||||
|
||||
// Other `bufferSize` tests for net module and tls module
|
||||
// don't assert `bufferSize` of server-side sockets.
|
||||
server.on('stream', mustCall((stream) => {
|
||||
stream.on('data', mustCall());
|
||||
stream.on('end', mustCall());
|
||||
}, SOCKETS));
|
||||
|
||||
server.listen(0, mustCall(() => {
|
||||
const authority = `http://localhost:${server.address().port}`;
|
||||
const client = connect(authority);
|
||||
const countdown = new Countdown(SOCKETS, () => {
|
||||
client.close();
|
||||
server.close();
|
||||
});
|
||||
|
||||
client.once('connect', mustCall());
|
||||
|
||||
for (let j = 0; j < SOCKETS; j += 1) {
|
||||
const stream = client.request({ ':method': 'POST' });
|
||||
stream.on('data', () => {});
|
||||
stream.on('close', mustCall(() => {
|
||||
countdown.dec();
|
||||
}));
|
||||
|
||||
for (let i = 0; i < TIMES; i += 1) {
|
||||
stream.write(Buffer.allocUnsafe(BUFFER_SIZE), mustCall());
|
||||
const expectedSocketBufferSize = BUFFER_SIZE * (i + 1);
|
||||
assert.strictEqual(stream.bufferSize, expectedSocketBufferSize);
|
||||
}
|
||||
stream.end();
|
||||
stream.close();
|
||||
}
|
||||
}));
|
||||
}
|
72
test/parallel/test-tls-streamwrap-buffersize.js
Normal file
72
test/parallel/test-tls-streamwrap-buffersize.js
Normal file
@ -0,0 +1,72 @@
|
||||
'use strict';
|
||||
const common = require('../common');
|
||||
if (!common.hasCrypto)
|
||||
common.skip('missing crypto');
|
||||
const assert = require('assert');
|
||||
const fixtures = require('../common/fixtures');
|
||||
const makeDuplexPair = require('../common/duplexpair');
|
||||
const tls = require('tls');
|
||||
const net = require('net');
|
||||
|
||||
// This test ensures that `bufferSize` also works for those tlsSockets
|
||||
// created from `socket` of `Duplex`, with which, TLSSocket will wrap
|
||||
// sockets in `StreamWrap`.
|
||||
{
|
||||
const iter = 10;
|
||||
|
||||
function createDuplex(port) {
|
||||
const { clientSide, serverSide } = makeDuplexPair();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const socket = net.connect({
|
||||
port,
|
||||
}, common.mustCall(() => {
|
||||
clientSide.pipe(socket);
|
||||
socket.pipe(clientSide);
|
||||
clientSide.on('close', common.mustCall(() => socket.destroy()));
|
||||
socket.on('close', common.mustCall(() => clientSide.destroy()));
|
||||
|
||||
resolve(serverSide);
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
const server = tls.createServer({
|
||||
key: fixtures.readKey('agent2-key.pem'),
|
||||
cert: fixtures.readKey('agent2-cert.pem')
|
||||
}, common.mustCall((socket) => {
|
||||
let str = '';
|
||||
socket.setEncoding('utf-8');
|
||||
socket.on('data', (chunk) => { str += chunk; });
|
||||
|
||||
socket.on('end', common.mustCall(() => {
|
||||
assert.strictEqual(str, 'a'.repeat(iter - 1));
|
||||
server.close();
|
||||
}));
|
||||
}));
|
||||
|
||||
server.listen(0, common.mustCall(() => {
|
||||
const { port } = server.address();
|
||||
createDuplex(port).then((socket) => {
|
||||
const client = tls.connect({
|
||||
socket,
|
||||
rejectUnauthorized: false,
|
||||
}, common.mustCall(() => {
|
||||
assert.strictEqual(client.bufferSize, 0);
|
||||
|
||||
for (let i = 1; i < iter; i++) {
|
||||
client.write('a');
|
||||
assert.strictEqual(client.bufferSize, i + 1);
|
||||
}
|
||||
|
||||
// It seems that tlsSockets created from sockets of `Duplex` emit no
|
||||
// "finish" events. We use "end" event instead.
|
||||
client.on('end', common.mustCall(() => {
|
||||
assert.strictEqual(client.bufferSize, undefined);
|
||||
}));
|
||||
|
||||
client.end();
|
||||
}));
|
||||
});
|
||||
}));
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user