test: relax chunk count expectations
In parallel/test-fs-read-stream-concurrent-reads.js the number of data chunks used is being tested when few concurrent reads are performed. The number of chunks can fluctuate based on the number of concurrent reads as well as the data that was read in one shot. Accommodate these variations in the test. Fixes: https://github.com/nodejs/node/issues/22339 PR-URL: https://github.com/nodejs/node/pull/25415 Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
This commit is contained in:
parent
c1ac578881
commit
cc26957cc3
@ -13,7 +13,7 @@ const fs = require('fs');
|
||||
const filename = fixtures.path('loop.js'); // Some small non-homogeneous file.
|
||||
const content = fs.readFileSync(filename);
|
||||
|
||||
const N = 1000;
|
||||
const N = 2000;
|
||||
let started = 0;
|
||||
let done = 0;
|
||||
|
||||
@ -26,10 +26,10 @@ function startRead() {
|
||||
.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
arrayBuffers.add(chunk.buffer);
|
||||
if (started < N)
|
||||
startRead();
|
||||
})
|
||||
.on('end', common.mustCall(() => {
|
||||
if (started < N)
|
||||
startRead();
|
||||
assert.deepStrictEqual(Buffer.concat(chunks), content);
|
||||
if (++done === N) {
|
||||
const retainedMemory =
|
||||
@ -43,5 +43,5 @@ function startRead() {
|
||||
|
||||
// Don’t start the reads all at once – that way we would have to allocate
|
||||
// a large amount of memory upfront.
|
||||
for (let i = 0; i < 4; ++i)
|
||||
for (let i = 0; i < 6; ++i)
|
||||
startRead();
|
||||
|
Loading…
x
Reference in New Issue
Block a user