stream.unshift - TypeError: Argument must be a buffer
- Version: v10.15.3
- Platform: Ubuntu 18.04
- Subsystem:
The stream.unshift
documentation states:
chunk
<Buffer> | <Uint8Array> | <string> | <any>
Chunk of data to unshift onto the read queue. For streams not operating in object mode, chunk must be a string, Buffer or Uint8Array. For object mode streams, chunk may be any JavaScript value other than null.
The issue occurs when the stream
encoding is not passed, or is not set using setEncoding
. When that happens state.decoder
is not set and fromList
function may use state.buffer.concat
(when state.buffer.length > 1
) , which internally uses copyBuffer
, that requires the source & target to be a Buffer
or Uint8Array
.
When stream.unshift
is called with a string
(which the documentation states that is a valid argument), in some cases where the buffer is filled, and state.buffer.concat
is triggered, an error will be thrown:
buffer.js:636
return _copy(this, target, targetStart, sourceStart, sourceEnd);
^
TypeError: argument must be a buffer
Here's the script to reproduce the error:
It's the parseHeader
example from the documentation, but instead of converting remaining
to a Buffer
, I pass it directly to .unshift
class ArrayReader extends Readable {
constructor(opt) {
super(opt);
const numbers = new Array(16384 * 2).fill(0).map(String);
this.buffer = ['header', '\n\n', ...numbers];
}
_read(size) {
while (this.buffer.length) {
const chunk = this.buffer.shift();
if (!this.buffer.length) {
this.push(chunk);
this.push(null);
return true;
}
if (!this.push(chunk))
return;
}
}
}
// Pull off a header delimited by \n\n
// use unshift() if we get too much
// Call the callback with (error, header, stream)
const { StringDecoder } = require('string_decoder');
function parseHeader(stream, callback) {
stream.on('error', callback);
stream.on('readable', onReadable);
const decoder = new StringDecoder('utf8');
let header = '';
function onReadable() {
let chunk;
while (null !== (chunk = stream.read())) {
const str = decoder.write(chunk);
if (str.match(/\n\n/)) {
const split = str.split(/\n\n/);
header += split.shift();
const remaining = split.join('\n\n');
// const buf = Buffer.from(remaining, 'utf8');
stream.removeListener('error', callback);
stream.removeListener('readable', onReadable);
if (remaining.length)
stream.unshift(remaining);
// Now the body of the message can be read from the stream.
callback(null, header, stream);
} else {
// still reading the header.
header += str;
}
}
}
}
parseHeader(new ArrayReader(), (err, header, stream) => {
stream.once('data', chunk => {
// console.log(stream._readableState.buffer);
});
});
This can be fixed, either by converting the chunk
to Buffer
similar to how it's done in .push
, and maybe adding an encoding
argument too. Or by changing the documentation to state that a string
can only be passed if the stream
encoding is set.
Readable.prototype.unshift = function(chunk, encoding) {
const state = this._readableState;
let skipChunkCheck = false;
// to avoid repetition between `.push` & `.unshift` this can be check inside readableAddChunk
if (!state.objectMode) {
if (typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = Buffer.from(chunk, encoding);
encoding = '';
}
skipChunkCheck = true;
}
} else {
skipChunkCheck = true;
}
return readableAddChunk(this, chunk, encoding, true, skipChunkCheck);
};