Advanced streaming capabilities for processing large CSV files with support for multiple input sources including files, streams, and strings. Built on Node.js Transform streams for memory-efficient processing.
Parse CSV data directly from file paths with optional read stream configuration.
/**
* Parse CSV from a file path
* @param filePath - Path to the CSV file
* @param options - Optional file read stream configuration
* @returns Converter instance for chaining
*/
fromFile(filePath: string, options?: CreateReadStreamOption): Converter;
interface CreateReadStreamOption {
flags?: string;
encoding?: string;
fd?: number;
mode?: number;
autoClose?: boolean;
start?: number;
end?: number;
highWaterMark?: number;
}Usage Examples:
import csvtojson from "csvtojson";
// Basic file parsing
const jsonArray = await csvtojson()
.fromFile('./data.csv');
// With custom read options
const jsonArray = await csvtojson()
.fromFile('./large-file.csv', {
encoding: 'utf8',
highWaterMark: 64 * 1024 // 64KB chunks
});
// Parse specific portion of file
const jsonArray = await csvtojson()
.fromFile('./data.csv', {
start: 1000, // Skip first 1000 bytes
end: 5000 // Stop at 5000 bytes
});Parse CSV data from any Node.js Readable stream for maximum flexibility.
/**
* Parse CSV from a readable stream
* @param readStream - Node.js Readable stream containing CSV data
* @returns Converter instance for chaining
*/
fromStream(readStream: Readable): Converter;Usage Examples:
import csvtojson from "csvtojson";
import { createReadStream } from "fs";
import { createGunzip } from "zlib";
// From file stream
const fileStream = createReadStream('./data.csv');
const jsonArray = await csvtojson()
.fromStream(fileStream);
// From compressed file
const gzipStream = createReadStream('./data.csv.gz')
.pipe(createGunzip());
const jsonArray = await csvtojson()
.fromStream(gzipStream);
// From HTTP response
const response = await fetch('https://example.com/data.csv');
const jsonArray = await csvtojson()
.fromStream(response.body);Parse CSV data directly from strings for in-memory processing.
/**
* Parse CSV from a string
* @param csvString - String containing CSV data
* @returns Converter instance for chaining
*/
fromString(csvString: string): Converter;Usage Examples:
import csvtojson from "csvtojson";
// Basic string parsing
const csvData = `name,age,city
Alice,25,New York
Bob,30,London`;
const jsonArray = await csvtojson()
.fromString(csvData);
// Result: [
// { name: "Alice", age: "25", city: "New York" },
// { name: "Bob", age: "30", city: "London" }
// ]
// With custom parsing options
const jsonArray = await csvtojson({
checkType: true,
trim: true
})
.fromString(csvData);Promise-like interface for async/await and .then() usage patterns.
/**
* Promise-like interface for handling async CSV parsing results
* @param onfulfilled - Success callback receiving parsed JSON array
* @param onrejected - Error callback receiving error information
* @returns Promise-like object for chaining
*/
then<TResult1 = any[], TResult2 = never>(
onfulfilled?: (value: any[]) => TResult1 | PromiseLike<TResult1>,
onrejected?: (reason: any) => TResult2 | PromiseLike<TResult2>
): PromiseLike<TResult1 | TResult2>;Usage Examples:
import csvtojson from "csvtojson";
// Async/await pattern
async function parseCSV() {
try {
const jsonArray = await csvtojson()
.fromFile('./data.csv');
return jsonArray;
} catch (error) {
console.error('Parsing failed:', error);
}
}
// Promise .then() pattern
csvtojson()
.fromFile('./data.csv')
.then(jsonArray => {
console.log('Parsed data:', jsonArray);
})
.catch(error => {
console.error('Error:', error);
});
// Transform results
const processedData = await csvtojson()
.fromFile('./data.csv')
.then(jsonArray =>
jsonArray.map(row => ({
...row,
processed: true
}))
);Access to stream properties and parsing state information.
/**
* Get current parsing parameters
*/
readonly parseParam: CSVParseParam;
/**
* Get parsing runtime state
*/
readonly parseRuntime: ParseRuntime;
/**
* Get number of parsed lines
*/
readonly parsedLineNumber: number;
interface ParseRuntime {
/** Whether need to process ignore column from column name to column index */
needProcessIgnoreColumn: boolean;
/** Whether need to process include column from column name to column index */
needProcessIncludeColumn: boolean;
/** The indexes of columns to reserve, undefined means reserve all, [] means hide all */
selectedColumns?: number[];
/** Whether parsing has ended */
ended: boolean;
/** Whether an error has occurred during parsing */
hasError: boolean;
/** Error object if hasError is true */
error?: Error;
/** Inferred delimiter from CSV data */
delimiter: string | string[];
/** Inferred end-of-line character */
eol?: string;
/** Converter functions for columns, populated at runtime */
columnConv: (CellParser | null)[];
/** Header type information */
headerType: any[];
/** Header title strings */
headerTitle: string[];
/** Header flag information */
headerFlag: any[];
/** Inferred headers from CSV data */
headers?: any[];
/** Internal buffer for CSV line processing */
csvLineBuffer?: Buffer;
/** Whether parsing has started (after first chunk processed) */
started: boolean;
/** Hook for preprocessing raw CSV data */
preRawDataHook?: PreRawDataCallback;
/** Hook for preprocessing individual file lines */
preFileLineHook?: PreFileLineCallback;
/** Current number of parsed lines */
parsedLineNumber: number;
/** Column value setter functions */
columnValueSetter: Function[];
/** Subscription handlers for stream events */
subscribe?: {
onNext?: (data: any, lineNumber: number) => void | PromiseLike<void>;
onError?: (err: CSVError) => void;
onCompleted?: () => void;
};
/** Promise handlers for then() method */
then?: {
onfulfilled: (value: any[]) => any;
onrejected: (err: Error) => any;
};
}Usage Examples:
import csvtojson from "csvtojson";
const converter = csvtojson({
delimiter: ";",
trim: true
});
// Check current parameters
console.log(converter.parseParam.delimiter); // ";"
console.log(converter.parseParam.trim); // true
// Monitor parsing progress
converter.fromFile('./large-file.csv')
.subscribe(
(jsonObj, lineNumber) => {
if (lineNumber % 1000 === 0) {
console.log(`Processed ${converter.parsedLineNumber} lines`);
}
}
);Configure stream options for optimal memory usage with large files.
/**
* Create converter with custom Transform stream options
* @param param - Parsing parameters
* @param options - Node.js Transform stream options
*/
constructor(param?: Partial<CSVParseParam>, options?: TransformOptions);
interface TransformOptions {
objectMode?: boolean;
highWaterMark?: number;
encoding?: string;
// ... additional stream options
}Usage Examples:
import csvtojson from "csvtojson";
// Optimize for large files
const converter = csvtojson({
delimiter: ",",
maxRowLength: 10000
}, {
highWaterMark: 128 * 1024, // 128KB buffer
objectMode: false
});
// Process very large file
const jsonArray = await converter
.fromFile('./very-large-file.csv');