zlib: report premature ends earlier
Report end-of-stream when decompressing when we detect it, and do not wait until the writable side of a zlib stream is closed as well. Refs: https://github.com/nodejs/node/issues/26332 PR-URL: https://github.com/nodejs/node/pull/26363 Refs: https://github.com/nodejs/node/issues/26332 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de> Reviewed-By: James M Snell <jasnell@gmail.com>
This commit is contained in:
parent
a0778a97e1
commit
28db96f31c
10
lib/zlib.js
10
lib/zlib.js
@ -546,6 +546,16 @@ function processCallback() {
|
||||
return;
|
||||
}
|
||||
|
||||
if (availInAfter > 0) {
|
||||
// If we have more input that should be written, but we also have output
|
||||
// space available, that means that the compression library was not
|
||||
// interested in receiving more data, and in particular that the input
|
||||
// stream has ended early.
|
||||
// This applies to streams where we don't check data past the end of
|
||||
// what was consumed; that is, everything except Gunzip/Unzip.
|
||||
self.push(null);
|
||||
}
|
||||
|
||||
// finished with the chunk.
|
||||
this.buffer = null;
|
||||
this.cb();
|
||||
|
33
test/parallel/test-zlib-premature-end.js
Normal file
33
test/parallel/test-zlib-premature-end.js
Normal file
@ -0,0 +1,33 @@
|
||||
'use strict';
|
||||
const common = require('../common');
|
||||
const zlib = require('zlib');
|
||||
const assert = require('assert');
|
||||
|
||||
const input = '0123456789'.repeat(4);
|
||||
|
||||
for (const [ compress, decompressor ] of [
|
||||
[ zlib.deflateRawSync, zlib.createInflateRaw ],
|
||||
[ zlib.deflateSync, zlib.createInflate ],
|
||||
[ zlib.brotliCompressSync, zlib.createBrotliDecompress ]
|
||||
]) {
|
||||
const compressed = compress(input);
|
||||
const trailingData = Buffer.from('not valid compressed data');
|
||||
|
||||
for (const variant of [
|
||||
(stream) => { stream.end(compressed); },
|
||||
(stream) => { stream.write(compressed); stream.write(trailingData); },
|
||||
(stream) => { stream.write(compressed); stream.end(trailingData); },
|
||||
(stream) => { stream.write(Buffer.concat([compressed, trailingData])); },
|
||||
(stream) => { stream.end(Buffer.concat([compressed, trailingData])); }
|
||||
]) {
|
||||
let output = '';
|
||||
const stream = decompressor();
|
||||
stream.setEncoding('utf8');
|
||||
stream.on('data', (chunk) => output += chunk);
|
||||
stream.on('end', common.mustCall(() => {
|
||||
assert.strictEqual(output, input);
|
||||
assert.strictEqual(stream.bytesWritten, compressed.length);
|
||||
}));
|
||||
variant(stream);
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user