A writable stream that concatenates all data chunks and calls a callback with the complete result.
npx @tessl/cli install tessl/npm-concat-stream@2.0.0A Node.js writable stream that concatenates all data chunks written to it and invokes a callback with the complete result when the stream ends. It supports multiple data types including Buffers, strings, arrays of byte integers, and Uint8Arrays, with automatic type detection and configurable output encoding.
npm install concat-streamconst concat = require('concat-stream');For ES modules:
import concat from 'concat-stream';const concat = require('concat-stream');
const fs = require('fs');
// Basic usage with callback
const readStream = fs.createReadStream('data.txt');
const concatStream = concat(function(data) {
// data contains all the concatenated data as a Buffer
console.log(data.toString());
});
readStream.pipe(concatStream);
// Usage with encoding option
const stringStream = concat({ encoding: 'string' }, function(data) {
// data is automatically converted to a string
console.log(data);
});
// Usage without callback (listen for 'finish' event)
const stream = concat({ encoding: 'buffer' });
stream.on('finish', function() {
const result = stream.getBody();
console.log(result);
});
// You can write data to the stream
stream.write(Buffer.from('Hello '));
stream.write(Buffer.from('World'));
stream.end();Creates a new concat-stream instance with optional configuration.
/**
* Creates a writable stream that concatenates data chunks
* @param {Object} opts - Configuration options
* @param {string} opts.encoding - Output encoding: 'string', 'buffer', 'array', 'uint8array', 'u8', 'uint8', 'object'
* @param {Function} cb - Callback function called with concatenated result
* @returns {ConcatStream} - Writable stream instance
*/
function concat(opts, cb);
function concat(cb); // opts optionalThe main constructor function implementing the concatenation functionality.
/**
* ConcatStream constructor function - extends Node.js Writable stream
* Can be called with or without 'new' keyword
* @param {Object} opts - Configuration options
* @param {string} opts.encoding - Output encoding type
* @param {Function} cb - Optional callback for completion
* @returns {ConcatStream} - Writable stream instance
*/
function ConcatStream(opts, cb);
// Instance methods available on ConcatStream instances:
/**
* Get concatenated data in the specified format
* @returns {Buffer|string|Array|Uint8Array|Object} Concatenated data
*/
ConcatStream.prototype.getBody = function();
/**
* Automatically detect encoding from first data chunk
* @param {*} buff - Optional buffer to analyze (defaults to first chunk)
* @returns {string} Detected encoding type
*/
ConcatStream.prototype.inferEncoding = function(buff);
/**
* Standard Node.js Writable stream events
* 'finish' - Emitted when all data has been written and the stream ended
* 'error' - Emitted on write/processing errors (inherited from Writable)
*/
// Event: 'finish' - fired when stream.end() is called and all writes are complete
// Event: 'error' - fired on stream errors
/**
* Standard Writable stream methods (inherited)
* @param {*} chunk - Data to write
* @param {string} encoding - Encoding (ignored, handled automatically)
* @param {Function} callback - Optional callback when write completes
*/
ConcatStream.prototype.write = function(chunk, encoding, callback);
/**
* End the stream, optionally writing a final chunk
* @param {*} chunk - Optional final chunk to write before ending
* @param {string} encoding - Encoding (ignored, handled automatically)
* @param {Function} callback - Optional callback when stream ends
*/
ConcatStream.prototype.end = function(chunk, encoding, callback);The stream accepts multiple input data types:
Configure output format using the encoding option:
// Encoding detection priority (from inferEncoding method):
// 1. Buffer.isBuffer(chunk) -> 'buffer'
// 2. typeof Uint8Array !== 'undefined' && chunk instanceof Uint8Array -> 'uint8array'
// 3. Array.isArray(chunk) -> 'array'
// 4. typeof chunk === 'string' -> 'string'
// 5. Object.prototype.toString.call(chunk) === "[object Object]" -> 'object'
// 6. Default fallback -> 'buffer'const concat = require('concat-stream');
const bufferStream = concat(function(data) {
// data is a Buffer containing all concatenated data
console.log('Total bytes:', data.length);
console.log('Content:', data.toString('utf8'));
});
bufferStream.write(Buffer.from('Hello '));
bufferStream.write(Buffer.from('World'));
bufferStream.end();const concat = require('concat-stream');
const stringStream = concat({ encoding: 'string' }, function(data) {
// data is a string
console.log('Concatenated:', data); // "Hello World"
});
stringStream.write('Hello ');
stringStream.write('World');
stringStream.end();const concat = require('concat-stream');
const arrayStream = concat({ encoding: 'array' }, function(data) {
// data is a flattened array
console.log('Array:', data); // [1, 2, 3, 4, 5, 6]
});
arrayStream.write([1, 2, 3]);
arrayStream.write([4, 5, 6]);
arrayStream.end();const concat = require('concat-stream');
// When mixing types, the output format is inferred from first chunk
const mixedStream = concat(function(data) {
// First chunk was a string, so output will be string
console.log('Result:', data); // "Hello123"
});
mixedStream.write('Hello');
mixedStream.write(Buffer.from('123'));
mixedStream.end();When no data is written to the stream, the result depends on the encoding:
const concat = require('concat-stream');
// No encoding set, no data written -> returns empty array
const emptyStream1 = concat(function(data) {
console.log(data); // []
});
emptyStream1.end();
// String encoding, no data written -> returns empty string
const emptyStream2 = concat({ encoding: 'string' }, function(data) {
console.log(data); // ""
});
emptyStream2.end();
// Buffer encoding, no data written -> returns empty Buffer
const emptyStream3 = concat({ encoding: 'buffer' }, function(data) {
console.log(data); // <Buffer >
console.log(data.length); // 0
});
emptyStream3.end();const concat = require('concat-stream');
// Using 'objects' encoding
const objectStream = concat({ encoding: 'objects' }, function(data) {
// data is an array containing all written objects
console.log('Objects:', data); // [{ name: 'Alice' }, { name: 'Bob' }]
});
objectStream.write({ name: 'Alice' });
objectStream.write({ name: 'Bob' });
objectStream.end();
// Auto-detection for objects also works
const autoStream = concat(function(data) {
console.log('Auto-detected objects:', data); // [{ id: 1 }, { id: 2 }]
});
autoStream.write({ id: 1 });
autoStream.write({ id: 2 });
autoStream.end();const concat = require('concat-stream');
const fs = require('fs');
const readStream = fs.createReadStream('image.png');
const concatStream = concat(function(imageBuffer) {
// imageBuffer contains the entire image as a Buffer
console.log('Image size:', imageBuffer.length, 'bytes');
// Process the complete image data
processImage(imageBuffer);
});
readStream.on('error', handleError);
readStream.pipe(concatStream);
function handleError(err) {
console.error('Error reading file:', err);
process.exit(1);
}interface ConcatOptions {
/**
* Output encoding format
* - 'buffer': Return as Node.js Buffer (default for buffer inputs)
* - 'string': Return as UTF-8 string (default for string inputs)
* - 'array': Return as flattened array (default for array inputs)
* - 'uint8array'/'u8'/'uint8': Return as Uint8Array (default for Uint8Array inputs)
* - 'object' or 'objects': Return as array of objects (default for object inputs)
* - undefined: Auto-detect from first chunk
*/
encoding?: 'buffer' | 'string' | 'array' | 'uint8array' | 'u8' | 'uint8' | 'object' | 'objects';
}Before processing, encoding values are normalized to standard forms:
// Input encoding values that get normalized:
'u8' → 'uint8array'
'uint8' → 'uint8array'
'object' → 'object' (no change)
'objects' → 'objects' (no change, but both work the same)
// All encoding values are converted to lowercase strings
'BUFFER' → 'buffer'
'String' → 'string'When no encoding is specified, the stream automatically detects the appropriate output format based on the first data chunk:
// These examples show automatic type detection
const concat = require('concat-stream');
// First chunk is Buffer -> output will be Buffer
concat(callback).write(Buffer.from('data'));
// First chunk is string -> output will be string
concat(callback).write('data');
// First chunk is array -> output will be array
concat(callback).write([1, 2, 3]);concat-stream does not handle errors internally. You must handle errors on the source streams that pipe into it.
const concat = require('concat-stream');
const fs = require('fs');
const readStream = fs.createReadStream('file.txt');
const concatStream = concat(function(data) {
console.log('File contents:', data.toString());
});
// Important: Always handle errors on source streams
readStream.on('error', function(err) {
console.error('Read error:', err);
// Handle the error appropriately
});
readStream.pipe(concatStream);Recommended error handling libraries:
end-of-stream - Detect stream completion and errorspump - Pipe streams together with proper error handlingconcat-stream works in browsers when bundled with tools like browserify or webpack. It includes polyfills for older environments and has been tested across major browsers including IE8+.