Simple streaming readline module for Node.js that enables memory-efficient line-by-line file reading
npx @tessl/cli install tessl/npm-linebyline@1.3.0LineByLine is a simple streaming readline module for Node.js that enables memory-efficient line-by-line file reading. It creates a readable stream from a file and emits line events for each line of text, making it ideal for processing large files without loading them entirely into memory.
npm install linebylineconst readLine = require('linebyline');const readLine = require('linebyline');
// Read a file line by line
const rl = readLine('./somefile.txt');
rl.on('line', function(line, lineCount, byteCount) {
console.log('Line ' + lineCount + ': ' + line);
})
.on('error', function(e) {
console.error('Error reading file:', e);
})
.on('end', function() {
console.log('Finished reading file');
});LineByLine is built around a simple streaming architecture:
Creates a new ReadLine instance for reading files line by line.
/**
* Creates a new ReadLine instance for reading files line by line
* @param {string|Stream} file - File path string or readable stream object
* @param {Object} [opts] - Configuration options
* @param {number} [opts.maxLineLength=4096] - Maximum line length in bytes
* @param {boolean} [opts.retainBuffer=false] - If true, emits raw Buffer objects instead of strings
* @returns {ReadLine} ReadLine instance (EventEmitter)
*/
function readLine(file, opts);Usage Examples:
// Basic usage with file path
const rl = readLine('./data.txt');
// With options
const rl = readLine('./large-file.txt', {
maxLineLength: 8192, // 8K buffer
retainBuffer: false // Convert to strings
});
// With existing stream
const fs = require('fs');
const stream = fs.createReadStream('./file.txt');
const rl = readLine(stream);
// Can be called with or without 'new'
const rl1 = readLine('./file.txt');
const rl2 = new readLine('./file.txt');The ReadLine instance emits several events during file processing.
Emitted for each line read from the file.
/**
* Emitted for each line read from the file
* @param {string|Buffer} line - Line content (string by default, Buffer if retainBuffer=true)
* @param {number} lineCount - Current line number (1-based)
* @param {number} byteCount - Total bytes processed so far
*/
rl.on('line', function(line, lineCount, byteCount) {
// Process line
});Usage Examples:
// Standard string processing
rl.on('line', function(line, lineCount, byteCount) {
console.log(`Line ${lineCount} (${byteCount} bytes): ${line}`);
});
// Custom encoding with retainBuffer
const iconv = require('iconv-lite');
const rl = readLine('./file-in-win1251.txt', { retainBuffer: true });
rl.on('line', function(buffer, lineCount, byteCount) {
const line = iconv.decode(buffer, 'win1251');
console.log(`Decoded line ${lineCount}: ${line}`);
});Emitted when the underlying file stream opens.
/**
* Emitted when the underlying file stream opens
* @param {number} fd - File descriptor
*/
rl.on('open', function(fd) {
// File opened successfully
});Emitted when an error occurs during reading or processing.
/**
* Emitted when an error occurs during reading or processing
* @param {Error} error - Error object
*/
rl.on('error', function(error) {
// Handle error
});Usage Examples:
rl.on('error', function(err) {
if (err.code === 'ENOENT') {
console.error('File not found:', err.path);
} else {
console.error('Reading error:', err.message);
}
});Emitted when the file has been completely read.
/**
* Emitted when the file has been completely read
*/
rl.on('end', function() {
// File reading completed
});Emitted when the underlying stream closes.
/**
* Emitted when the underlying stream closes
*/
rl.on('close', function() {
// Stream closed
});Controls the maximum buffer size for a single line.
/**
* Maximum buffer size for a single line
* @type {number}
* @default 4096
*/
opts.maxLineLengthLines longer than this limit will not be read properly and may cause data loss.
Controls whether to emit raw Buffer objects instead of converted strings.
/**
* When true, the 'line' event receives raw Buffer objects instead of strings
* @type {boolean}
* @default false
*/
opts.retainBufferUseful for custom encoding scenarios where you need to decode the buffer manually.
Usage Examples:
// High precision for large lines
const rl = readLine('./big-data.txt', {
maxLineLength: 16384 // 16K buffer
});
// Custom encoding handling
const rl = readLine('./encoded-file.txt', {
retainBuffer: true
});
rl.on('line', function(buffer, lineCount, byteCount) {
// Custom decoding logic here
const line = buffer.toString('utf16le');
console.log(line);
});/**
* ReadLine class - extends EventEmitter
*/
class ReadLine extends EventEmitter {
constructor(file, opts);
/** The underlying readable stream */
input: ReadableStream;
}
/**
* Configuration options for ReadLine constructor
*/
interface ReadLineOptions {
/** Maximum line length in bytes (default: 4096) */
maxLineLength?: number;
/** If true, emits raw Buffer objects instead of strings (default: false) */
retainBuffer?: boolean;
}LineByLine provides comprehensive error handling through the event system:
maxLineLength may cause data loss but don't emit errorsBest Practices:
const rl = readLine('./file.txt');
// Always handle errors
rl.on('error', function(err) {
console.error('Error:', err.message);
// Handle error appropriately
});
// Ensure proper cleanup
rl.on('end', function() {
console.log('Processing completed');
});
rl.on('close', function() {
console.log('Stream closed');
});LineByLine handles different line ending formats:
maxLineLength option