Simple line-by-line stream reader for Node.js applications
npx @tessl/cli install tessl/npm-byline@5.0.0Byline is a simple line-by-line stream reader for Node.js that wraps any readable stream to emit individual lines instead of chunks of data. It supports both UNIX and Windows line endings, Unicode UTS #18 line boundaries, and implements the Node.js streams2 API for optimal performance and compatibility.
npm install bylineconst byline = require('byline');Direct access to the LineStream class:
const { LineStream } = require('byline');Note: This package uses CommonJS exports only. ES module imports are not natively supported.
const fs = require('fs');
const byline = require('byline');
// Convenience API - wrap a readable stream
const stream = byline(fs.createReadStream('sample.txt', { encoding: 'utf8' }));
stream.on('data', function(line) {
console.log(line);
});
stream.on('end', function() {
console.log('Finished reading file');
});Byline is built around several key components:
byline(stream) for quick stream wrappingcreateStream() method for more controlLineStream class implementing Node.js streams2 Transform interfaceThe primary API for quickly wrapping readable streams to emit lines instead of chunks.
/**
* Wraps a readable stream to emit individual lines
* @param {ReadableStream} readStream - The readable stream to wrap
* @param {Object} [options] - Optional configuration options
* @param {boolean} [options.keepEmptyLines=false] - Whether to emit empty lines
* @returns {LineStream} LineStream instance that emits line data events
*/
function byline(readStream, options) {}Explicit method for creating line streams with optional stream wrapping.
/**
* Creates a LineStream, optionally wrapping an existing stream
* @param {ReadableStream} [readStream] - Optional readable stream to wrap
* @param {Object} [options] - Optional configuration options
* @param {boolean} [options.keepEmptyLines=false] - Whether to emit empty lines
* @returns {LineStream} LineStream instance for line processing
*/
function createStream(readStream, options) {}Direct access to the transform stream class for advanced usage patterns.
/**
* Transform stream class that processes chunks into lines
* Inherits from stream.Transform and implements streams2 API
* @param {Object} [options] - Transform stream options plus byline-specific configuration
* @param {boolean} [options.keepEmptyLines=false] - Whether to emit empty lines
* @constructor
*/
function LineStream(options) {}Legacy method for creating line streams - use createStream() instead.
/**
* @deprecated Use createStream() instead
* Creates a line stream from a readable stream
* @param {ReadableStream} readStream - The readable stream to wrap
* @returns {LineStream} LineStream instance
*/
function createLineStream(readStream) {}Note: This method logs a deprecation warning when used and will be removed in future versions.
Create standalone transform streams that can be piped between other streams.
const fs = require('fs');
const byline = require('byline');
// Create a through-stream without wrapping a specific input
const lineStream = byline.createStream();
// Use in a pipeline
fs.createReadStream('input.txt')
.pipe(lineStream)
.pipe(fs.createWriteStream('output.txt'));Pause and Resume with Large Files:
const fs = require('fs');
const byline = require('byline');
const stream = byline(fs.createReadStream('large-file.txt'));
stream.setEncoding('utf8');
stream.on('data', function(line) {
console.log('Processing:', line);
// Pause processing briefly
stream.pause();
setTimeout(function() {
stream.resume();
}, 10);
});
stream.on('end', function() {
console.log('Finished processing file');
});Handling Empty Lines:
const fs = require('fs');
const byline = require('byline');
// Skip empty lines (default behavior)
const streamSkipEmpty = byline(fs.createReadStream('file.txt'));
// Keep empty lines
const streamKeepEmpty = byline(fs.createReadStream('file.txt'), {
keepEmptyLines: true
});
streamKeepEmpty.on('data', function(line) {
if (line === '') {
console.log('Found empty line');
} else {
console.log('Line:', line);
}
});Direct LineStream Construction:
const fs = require('fs');
const { LineStream } = require('byline');
const input = fs.createReadStream('sample.txt');
const output = fs.createWriteStream('processed.txt');
const lineStream = new LineStream({ keepEmptyLines: true });
input.pipe(lineStream);
lineStream.pipe(output);
lineStream.on('data', function(line) {
console.log('Line length:', line.length);
});Full support for Node.js streams2 readable/flowing modes and backpressure handling.
const fs = require('fs');
const byline = require('byline');
const stream = byline(fs.createReadStream('sample.txt'));
// Non-flowing mode with manual reading
stream.on('readable', function() {
let line;
while (null !== (line = stream.read())) {
console.log(line);
}
});Comprehensive support for different line ending formats and Unicode line boundaries.
Supported line endings:
\n\r\n\r\v, \f, \x85, \u2028, \u2029Edge case handling:
Byline automatically detects and handles character encodings from source streams.
/**
* Set the encoding for line output
* Inherited from Node.js Readable stream
* @param {string} encoding - Character encoding (e.g., 'utf8', 'ascii', 'base64')
* @returns {LineStream} This stream for chaining
*/
LineStream.prototype.setEncoding = function(encoding) {}Encoding behavior:
Usage example:
const fs = require('fs');
const byline = require('byline');
const stream = byline(fs.createReadStream('file.txt'));
stream.setEncoding('utf8'); // Ensure string output
stream.on('data', function(line) {
console.log(typeof line); // 'string'
});/**
* Configuration options for byline streams
* @typedef {Object} StreamOptions
* @property {boolean} [keepEmptyLines=false] - Whether to emit empty lines. Default: false (empty lines are skipped)
*/Usage with options:
const fs = require('fs');
const byline = require('byline');
// Keep empty lines in output
const stream = byline(fs.createReadStream('file.txt'), {
keepEmptyLines: true
});
stream.on('data', function(line) {
console.log(`Line: "${line}"`); // Will include empty strings
});As LineStream extends Node.js Transform, it inherits all standard stream methods:
/**
* Pause the stream - stops emitting 'data' events
* @returns {LineStream} This stream for chaining
*/
LineStream.prototype.pause = function() {}
/**
* Resume a paused stream
* @returns {LineStream} This stream for chaining
*/
LineStream.prototype.resume = function() {}
/**
* Pipe this stream to a writable destination
* @param {WritableStream} destination - The destination stream
* @param {Object} [options] - Pipe options
* @returns {WritableStream} The destination stream
*/
LineStream.prototype.pipe = function(destination, options) {}
/**
* Read data from the stream in non-flowing mode
* @param {number} [size] - Number of bytes to read
* @returns {string|Buffer|null} Line data or null if no data available
*/
LineStream.prototype.read = function(size) {}Stream Events (inherited):
'data' - Emitted for each line'end' - Emitted when no more data'error' - Emitted on errors'readable' - Emitted when data is available to read'close' - Emitted when stream is closedByline throws standard JavaScript Error objects for validation failures.
Common error conditions:
Error('expected readStream') - When readStream is not provided to internal functionsError('readStream must be readable') - When provided stream is not readableError handling example:
const fs = require('fs');
const byline = require('byline');
try {
const stream = byline(fs.createReadStream('nonexistent.txt'));
stream.on('error', function(err) {
console.error('Stream error:', err.message);
});
stream.on('data', function(line) {
console.log(line);
});
} catch (err) {
console.error('Setup error:', err.message);
}/**
* LineStream extends Node.js Transform stream
* Emits 'data' events with individual lines as strings or Buffers
* Supports all standard Transform stream events and methods
* @typedef {Object} LineStream
* @property {string} [encoding] - Current encoding for output lines
* @property {string[]} _lineBuffer - Internal line buffer array
* @property {boolean} _keepEmptyLines - Configuration flag for empty line handling
* @property {boolean} _lastChunkEndedWithCR - State tracking for CRLF boundary handling
* @property {string} _chunkEncoding - Current chunk encoding being processed
*/
/**
* Standard Node.js Transform stream options plus byline-specific configuration
* @typedef {Object} LineStreamOptions
* @property {boolean} [keepEmptyLines=false] - Whether to emit empty lines
*/
/**
* Options for byline convenience functions
* @typedef {Object} StreamOptions
* @property {boolean} [keepEmptyLines=false] - Whether to emit empty lines
*/