Simple, fast, powerful parser toolkit for JavaScript using the Earley parsing algorithm
—
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Pending
The risk profile of this skill
Node.js stream integration for parsing large inputs or continuous data streams.
Writable stream wrapper that feeds data to a nearley parser as it arrives.
/**
* Create a writable stream wrapper for a nearley parser
* @param parser - Nearley Parser instance to feed data to
*/
class StreamWrapper extends require('stream').Writable {
constructor(parser: Parser);
}
/**
* StreamWrapper instance properties
*/
interface StreamWrapper {
/** The wrapped parser instance */
_parser: Parser;
}Usage Examples:
const fs = require("fs");
const nearley = require("nearley");
const StreamWrapper = require("nearley/lib/stream");
// Create parser
const grammar = nearley.Grammar.fromCompiled(require("./grammar.js"));
const parser = new nearley.Parser(grammar);
// Create stream wrapper
const stream = new StreamWrapper(parser);
// Handle stream events
stream.on('finish', () => {
console.log("Parsing complete");
console.log("Results:", parser.results);
});
stream.on('error', (error) => {
console.error("Stream error:", error.message);
});
// Pipe file through parser
fs.createReadStream('input.txt', 'utf8').pipe(stream);The internal write method that processes chunks.
/**
* Internal stream write implementation
* @param chunk - Data chunk to process
* @param encoding - Character encoding
* @param callback - Completion callback
*/
_write(chunk: Buffer, encoding: string, callback: Function): void;Usage Examples:
const StreamWrapper = require("nearley/lib/stream");
// Manual stream writing
const stream = new StreamWrapper(parser);
// Write data chunks manually
stream.write("first chunk");
stream.write(" second chunk");
stream.end(); // Signal end of input
// Or use with any readable stream
process.stdin.pipe(stream);Handle large files that don't fit in memory.
Usage Examples:
const fs = require("fs");
const readline = require("readline");
const nearley = require("nearley");
const StreamWrapper = require("nearley/lib/stream");
// Example: Parse line-by-line for very large files
async function parseHugeFile(filename) {
const grammar = nearley.Grammar.fromCompiled(require("./line-grammar.js"));
const parser = new nearley.Parser(grammar);
const stream = new StreamWrapper(parser);
const fileStream = fs.createReadStream(filename);
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
// Process line by line
for await (const line of rl) {
stream.write(line + '\n');
}
stream.end();
return parser.results;
}
// Usage
parseHugeFile('huge-data.txt')
.then(results => console.log("Parsed results:", results))
.catch(error => console.error("Parse error:", error));Handle continuous data streams like network connections.
Usage Examples:
const net = require("net");
const nearley = require("nearley");
const StreamWrapper = require("nearley/lib/stream");
// Example: TCP server that parses incoming data
const server = net.createServer((socket) => {
console.log("Client connected");
// Create parser for this connection
const grammar = nearley.Grammar.fromCompiled(require("./protocol-grammar.js"));
const parser = new nearley.Parser(grammar);
const stream = new StreamWrapper(parser);
// Handle complete messages
stream.on('finish', () => {
if (parser.results.length > 0) {
const message = parser.results[0];
console.log("Received message:", message);
// Send response
socket.write("Message received\n");
}
});
// Pipe socket data through parser
socket.pipe(stream);
socket.on('end', () => {
console.log("Client disconnected");
});
});
server.listen(3000, () => {
console.log("Parser server listening on port 3000");
});Handle parsing errors in streaming contexts.
Usage Examples:
const fs = require("fs");
const nearley = require("nearley");
const StreamWrapper = require("nearley/lib/stream");
function parseFileStream(filename) {
return new Promise((resolve, reject) => {
const grammar = nearley.Grammar.fromCompiled(require("./grammar.js"));
const parser = new nearley.Parser(grammar);
const stream = new StreamWrapper(parser);
// Handle successful completion
stream.on('finish', () => {
if (parser.results.length === 0) {
reject(new Error("No valid parse found"));
} else {
resolve(parser.results);
}
});
// Handle stream errors
stream.on('error', (error) => {
reject(new Error(`Stream error: ${error.message}`));
});
// Create file stream with error handling
const fileStream = fs.createReadStream(filename, 'utf8');
fileStream.on('error', (error) => {
reject(new Error(`File error: ${error.message}`));
});
// Pipe with error propagation
fileStream.pipe(stream);
});
}
// Usage with error handling
parseFileStream('data.txt')
.then(results => {
console.log("Parse successful:", results);
})
.catch(error => {
console.error("Parse failed:", error.message);
});Configure stream behavior for different use cases.
Usage Examples:
const nearley = require("nearley");
const StreamWrapper = require("nearley/lib/stream");
// Parser with history for stream error recovery
const parser = new nearley.Parser(grammar, {
keepHistory: true,
lexer: customLexer
});
const stream = new StreamWrapper(parser);
// Configure stream options
stream.setDefaultEncoding('utf8');
// Handle backpressure for high-volume streams
let backpressure = false;
stream.on('drain', () => {
backpressure = false;
console.log("Stream ready for more data");
});
function writeToStream(data) {
if (!backpressure) {
backpressure = !stream.write(data);
if (backpressure) {
console.log("Stream backpressure detected");
}
}
}/**
* Default streaming lexer for character-by-character parsing
*/
class StreamLexer {
constructor();
reset(data: string, state?: object): void;
next(): {value: string} | undefined;
save(): {line: number, col: number};
formatError(token: object, message: string): string;
}Usage Examples:
const nearley = require("nearley");
const StreamWrapper = require("nearley/lib/stream");
// Using default StreamLexer with streams
const grammar = nearley.Grammar.fromCompiled(require("./char-grammar.js"));
const parser = new nearley.Parser(grammar); // Uses StreamLexer by default
const stream = new StreamWrapper(parser);
// The StreamLexer will process input character by character
// and provide line/column information for errors
stream.write("input text with\nmultiple lines");
stream.end();
// Error messages will include line/column info:
// "Syntax error at line 2 col 5: ..."