JSONStream is a streaming JSON parser and stringifier for Node.js that enables processing of large JSON data structures without loading the entire content into memory. It provides JSONPath-style selectors for extracting specific values from JSON streams and supports both parsing and stringifying operations with customizable formatting.
npm install JSONStreamconst JSONStream = require('JSONStream');ESM import:
import JSONStream from 'JSONStream';const JSONStream = require('JSONStream');
const fs = require('fs');
// Parse JSON stream and extract specific data
fs.createReadStream('large-data.json')
.pipe(JSONStream.parse('rows.*'))
.on('data', function(row) {
console.log('Row:', row);
});
// Stringify objects to JSON array format
const stringify = JSONStream.stringify();
stringify.pipe(process.stdout);
stringify.write({name: 'Alice', age: 25});
stringify.write({name: 'Bob', age: 30});
stringify.end();JSONStream is built around the Node.js streaming architecture:
Parse streaming JSON data and emit values that match specified patterns.
/**
* Creates a transform stream that parses JSON and emits matching values
* @param {string|Array|null} path - JSONPath pattern to match values (optional)
* @param {function} map - Optional mapping/filtering function
* @returns {Stream} Transform stream
*/
JSONStream.parse(path, map)Path Pattern Types:
'rows.*.doc' - dot notation JSONPath['rows', true, 'doc'] - programmatic pattern specification'docs..value' or ['docs', {recurse: true}, 'value']'obj.$*' or ['obj', {emitKey: true}]['obj', {emitPath: true}]Pattern Elements:
true or *: Matches any key{emitKey: true}: Include key in emitted data{emitPath: true}: Include full path in emitted data{recurse: true}: Recursive descent (equivalent to ..)Usage Examples:
const JSONStream = require('JSONStream');
// Basic pattern matching
const parser = JSONStream.parse('users.*');
parser.on('data', function(user) {
console.log('User:', user);
});
// With mapping function
const filtered = JSONStream.parse('items.*', function(item) {
return item.active ? item : null; // Filter inactive items
});
// Key emission
const withKeys = JSONStream.parse('config.$*');
withKeys.on('data', function(data) {
console.log('Key:', data.key, 'Value:', data.value);
});
// Path emission
const withPaths = JSONStream.parse(['data', {emitPath: true}]);
withPaths.on('data', function(data) {
console.log('Path:', data.path, 'Value:', data.value);
});
// Recursive descent
const recursive = JSONStream.parse('docs..title');
recursive.on('data', function(title) {
console.log('Found title:', title);
});Create a writable stream that formats objects as a JSON array.
/**
* Creates a writable stream for JSON array stringification
* @param {string} open - Opening delimiter (default: '[\n')
* @param {string} sep - Item separator (default: '\n,\n')
* @param {string} close - Closing delimiter (default: '\n]\n')
* @param {number} indent - JSON indentation level (default: 0)
* @returns {Stream} Writable stream
*/
JSONStream.stringify(open, sep, close, indent)Special Parameters:
false: Creates newline-separated output without array bracketsnull/undefined: Uses default JSON array formattingUsage Examples:
const JSONStream = require('JSONStream');
// Default JSON array format
const defaultStringify = JSONStream.stringify();
defaultStringify.pipe(process.stdout);
defaultStringify.write({id: 1, name: 'Alice'});
defaultStringify.write({id: 2, name: 'Bob'});
defaultStringify.end();
// Output: [\n{"id":1,"name":"Alice"}\n,\n{"id":2,"name":"Bob"}\n]\n
// Newline-separated format
const newlineStringify = JSONStream.stringify(false);
newlineStringify.pipe(process.stdout);
newlineStringify.write({id: 1});
newlineStringify.write({id: 2});
newlineStringify.end();
// Output: {"id":1}\n{"id":2}\n
// Custom formatting
const customStringify = JSONStream.stringify('[\n ', ',\n ', '\n]', 2);
customStringify.pipe(process.stdout);
customStringify.write({name: 'test'});
customStringify.end();Create a writable stream that formats key-value pairs as a JSON object.
/**
* Creates a writable stream for JSON object stringification
* @param {string} open - Opening delimiter (default: '{\n')
* @param {string} sep - Property separator (default: '\n,\n')
* @param {string} close - Closing delimiter (default: '\n}\n')
* @param {number} indent - JSON indentation level (default: 0)
* @returns {Stream} Writable stream
*/
JSONStream.stringifyObject(open, sep, close, indent)Input Format: Expects arrays in the format [key, value] when writing to the stream.
Usage Examples:
const JSONStream = require('JSONStream');
// Default JSON object format
const objStringify = JSONStream.stringifyObject();
objStringify.pipe(process.stdout);
objStringify.write(['name', 'Alice']);
objStringify.write(['age', 25]);
objStringify.write(['active', true]);
objStringify.end();
// Output: {\n"name":"Alice"\n,\n"age":25\n,\n"active":true\n}\n
// Compact format
const compact = JSONStream.stringifyObject('{', ',', '}');
compact.pipe(process.stdout);
compact.write(['id', 123]);
compact.write(['type', 'user']);
compact.end();
// Output: {"id":123,"type":"user"}All JSONStream functions return Node.js streams that emit standard events plus additional events:
// Standard stream events
stream.on('data', function(chunk) { /* Data received */ });
stream.on('end', function() { /* Stream ended */ });
stream.on('error', function(err) { /* Error occurred */ });
// JSONStream-specific events (parse only)
stream.on('header', function(data) { /* Content before first match */ });
stream.on('footer', function(data) { /* Content after last match */ });Event Details:
data: Emitted for each matching value (parse) or when written (stringify)header: Content from JSON that appears before the first pattern matchfooter: Content from JSON that appears after the last pattern matcherror: JSON parsing errors with enhanced position informationJSONStream includes a command-line tool for processing JSON from stdin:
# Basic usage
echo '{"users":[{"name":"Alice"},{"name":"Bob"}]}' | JSONStream 'users.*'
# From file
cat data.json | JSONStream 'items.*.title'
# From HTTP request (with curl)
curl -s https://api.example.com/data | JSONStream 'results.*'CLI Behavior:
[\n, \n,\n, \n]\n)// Stream interfaces (Node.js built-in)
interface ReadableStream {
on(event: string, callback: Function): this;
pipe(destination: WritableStream): WritableStream;
}
interface WritableStream {
write(chunk: any): boolean;
end(chunk?: any): void;
}
interface TransformStream extends ReadableStream, WritableStream {}
// Pattern matching types
type JSONPathPattern = string | Array<JSONPathElement>;
type JSONPathElement =
| string // Exact key match
| boolean // true matches any key
| RegExp // Pattern match
| Function // Custom matcher: (key) => boolean
| {emitKey: boolean} // Include key in output
| {emitPath: boolean} // Include path in output
| {recurse: boolean}; // Recursive descent
// Map function type
type MapFunction = (value: any, path: string[]) => any;
// Event data types
interface KeyValueData {
key: string;
value: any;
}
interface PathValueData {
path: string[];
value: any;
}JSONStream provides enhanced error handling for JSON parsing:
const parser = JSONStream.parse('data.*');
parser.on('error', function(err) {
// Enhanced error messages include position information
console.error('JSON Error:', err.message);
// Example: "Invalid JSON (Unexpected token } at position 42)"
});
// Stringify errors
const stringify = JSONStream.stringify();
stringify.on('error', function(err) {
console.error('Stringify Error:', err.message);
});Common Error Scenarios: