Comprehensive stream processing for handling character data, tokens, and various input sources including strings, files, and buffers. These classes provide the input/output infrastructure for ANTLR4 parsers and lexers.
Character streams handle raw text input from various sources.
/**
* Basic character input stream
*/
class InputStream {
/**
* Create input stream from string data
* @param data - String data to create stream from
*/
constructor(data: string);
/**
* Create input stream with Unicode decoding option
* @param data - String data to create stream from
* @param decodeToUnicodeCodePoints - Whether to decode to Unicode code points
*/
constructor(data: string, decodeToUnicodeCodePoints: boolean);
}
/**
* Character stream with navigation and marking capabilities
*/
class CharStream {
/** Current position in stream */
index: number;
/** Total size of stream */
size: number;
/**
* Create character stream from string data
* @param data - String data to create stream from
*/
constructor(data: string);
/**
* Create character stream with Unicode decoding option
* @param data - String data to create stream from
* @param decodeToUnicodeCodePoints - Whether to decode to Unicode code points
*/
constructor(data: string, decodeToUnicodeCodePoints: boolean);
/** Reset stream to beginning */
reset(): void;
/** Consume one character and advance position */
consume(): void;
/**
* Look ahead at character without consuming
* @param offset - Offset from current position (1-based)
* @returns Character code at offset position
*/
LA(offset: number): number;
/**
* Look ahead at character (alias for LA)
* @param offset - Offset from current position
* @returns Character code at offset position
*/
LT(offset: number): number;
/**
* Mark current position for later return
* @returns Mark identifier
*/
mark(): number;
/**
* Release a previously created mark
* @param marker - Mark identifier to release
*/
release(marker: number): void;
/**
* Seek to specific position in stream
* @param index - Position to seek to
*/
seek(index: number): void;
/**
* Get text between two positions
* @param start - Start position
* @param stop - Stop position
* @returns Text between positions
*/
getText(start: number, stop: number): string;
/**
* Get string representation of stream
* @returns String representation
*/
toString(): string;
}
/**
* Character stream that reads from files
*/
class FileStream extends CharStream {
/** Name of the file */
fileName: string;
/**
* Create file stream
* @param fileName - Path to file to read
* @param encoding - Character encoding (optional)
* @param decodeToUnicodeCodePoints - Unicode decoding option (optional)
*/
constructor(fileName: string, encoding?: string, decodeToUnicodeCodePoints?: boolean);
}
/**
* Factory class for creating character streams from various sources
*/
class CharStreams {
/**
* Create character stream from string
* @param data - String data
* @param decodeToUnicodeCodePoints - Unicode decoding option (optional)
* @returns Character stream
*/
static fromString(data: string, decodeToUnicodeCodePoints?: boolean): CharStream;
/**
* Create character stream from Buffer (Node.js)
* @param buffer - Buffer containing data
* @param encoding - Character encoding (optional)
* @returns Character stream
*/
static fromBuffer(buffer: Buffer, encoding?: string): CharStream;
/**
* Create character stream from Blob (Browser)
* @param blob - Blob containing data
* @param encoding - Character encoding
* @param onLoad - Success callback receiving stream
* @param onError - Error callback receiving error
*/
static fromBlob(blob: Blob, encoding: string, onLoad: (stream: CharStream) => void, onError: (error: Error) => void): void;
/**
* Create character stream from file path (asynchronous)
* @param path - File path to read
* @param encoding - Character encoding
* @param callback - Callback receiving error and stream
*/
static fromPath(path: string, encoding: string, callback: (err: Error, stream: CharStream) => void): void;
/**
* Create character stream from file path (synchronous)
* @param path - File path to read
* @param encoding - Character encoding
* @returns Character stream
*/
static fromPathSync(path: string, encoding: string): CharStream;
}Token streams handle sequences of tokens produced by lexers.
/**
* Abstract token stream interface
*/
abstract class TokenStream {
/** Current position in token stream */
index: number;
/** Total number of tokens */
size: number;
/**
* Look ahead at token type without consuming
* @param i - Position offset (1-based)
* @returns Token type at offset
*/
LA(i: number): number;
/**
* Look ahead at token without consuming
* @param k - Position offset (1-based)
* @returns Token at offset
*/
LT(k: number): Token;
/**
* Get text for specified interval
* @param interval - Token interval (optional, defaults to entire stream)
* @returns Text representation
*/
getText(interval?: Interval): string;
/**
* Get hidden tokens to the left of specified token
* @param tokenIndex - Target token index
* @param channelIndex - Channel to search (optional)
* @returns Array of hidden tokens
*/
getHiddenTokensToLeft(tokenIndex: number, channelIndex?: number): Token[];
/**
* Get hidden tokens to the right of specified token
* @param tokenIndex - Target token index
* @param channelIndex - Channel to search (optional)
* @returns Array of hidden tokens
*/
getHiddenTokensToRight(tokenIndex: number, channelIndex?: number): Token[];
/**
* Get token at specific index
* @param idx - Token index
* @returns Token at index
*/
get(idx: number): Token;
}
/**
* Buffered implementation of token stream
*/
class BufferedTokenStream extends TokenStream {
/** Token source (lexer) */
tokenSource: Lexer;
}
/**
* Standard implementation of buffered token stream
*/
class CommonTokenStream extends BufferedTokenStream {
/** Array of tokens in stream */
tokens: Token[];
/**
* Create token stream from lexer
* @param lexer - Lexer to generate tokens
*/
constructor(lexer: Lexer);
/**
* Create token stream from lexer with specific channel
* @param lexer - Lexer to generate tokens
* @param channel - Channel to filter tokens (default: 0)
*/
constructor(lexer: Lexer, channel: number);
/** Fill the token buffer by reading from lexer */
fill(): void;
}Utility for modifying token streams without changing the original tokens.
/**
* Utility for rewriting token streams
*/
class TokenStreamRewriter {
/** Default program name constant */
static readonly DEFAULT_PROGRAM_NAME: string;
/**
* Create rewriter for token stream
* @param tokens - Token stream to rewrite
*/
constructor(tokens: CommonTokenStream);
/**
* Get the underlying token stream
* @returns Token stream being rewritten
*/
getTokenStream(): CommonTokenStream;
/**
* Insert text after specified token
* @param token - Token to insert after
* @param text - Text to insert
* @param programName - Program name (optional)
*/
insertAfter(token: Token, text: Text, programName?: string): void;
/**
* Insert text after specified index
* @param index - Index to insert after
* @param text - Text to insert
* @param programName - Program name (optional)
*/
insertAfter(index: number, text: Text, programName?: string): void;
/**
* Insert text before specified token
* @param token - Token to insert before
* @param text - Text to insert
* @param programName - Program name (optional)
*/
insertBefore(token: Token, text: Text, programName?: string): void;
/**
* Insert text before specified index
* @param index - Index to insert before
* @param text - Text to insert
* @param programName - Program name (optional)
*/
insertBefore(index: number, text: Text, programName?: string): void;
/**
* Replace single token with text
* @param token - Token to replace
* @param text - Replacement text
* @param programName - Program name (optional)
*/
replaceSingle(token: Token, text: Text, programName?: string): void;
/**
* Replace single token at index with text
* @param index - Token index to replace
* @param text - Replacement text
* @param programName - Program name (optional)
*/
replaceSingle(index: number, text: Text, programName?: string): void;
/**
* Replace range of tokens with text
* @param from - Start token or index
* @param to - End token or index
* @param text - Replacement text
* @param programName - Program name (optional)
*/
replace(from: Token | number, to: Token | number, text: Text, programName?: string): void;
/**
* Delete range of tokens
* @param from - Start token or index
* @param to - End token or index
* @param programName - Program name (optional)
*/
delete(from: number | Token, to: number | Token, programName?: string): void;
/**
* Get rewrite program by name
* @param name - Program name
* @returns Rewrite operations
*/
getProgram(name: string): Rewrites;
/**
* Initialize a new rewrite program
* @param name - Program name
* @returns Rewrite operations
*/
initializeProgram(name: string): Rewrites;
/**
* Get final rewritten text
* @returns Rewritten text
*/
getText(): string;
/**
* Get rewritten text for specific program
* @param program - Program name
* @returns Rewritten text
*/
getText(program: string): string;
/**
* Get rewritten text for interval
* @param interval - Token interval
* @param programName - Program name (optional)
* @returns Rewritten text
*/
getText(interval: Interval, programName?: string): string;
/**
* Reduce operations to single operation per index
* @param rewrites - Rewrite operations
* @returns Reduced operations map
*/
reduceToSingleOperationPerIndex(rewrites: Rewrites): Map<number, RewriteOperation>;
/**
* Concatenate operation texts
* @param a - First text
* @param b - Second text
* @returns Concatenated text
*/
catOpText(a: Text, b: Text): string;
/**
* Get operations of specific kind before index
* @param rewrites - Rewrite operations
* @param kind - Operation kind
* @param before - Index limit
* @returns Filtered operations
*/
getKindOfOps(rewrites: Rewrites, kind: any, before: number): RewriteOperation[];
}/**
* Represents a text interval
*/
class Interval {
/** Start position */
start: number;
/** Stop position */
stop: number;
/**
* Create interval from numbers
* @param start - Start position
* @param stop - Stop position
*/
constructor(start: number, stop: number);
/**
* Create interval from tokens
* @param start - Start token
* @param stop - Stop token (optional)
*/
constructor(start: Token, stop: Token | undefined);
}
/**
* Text type for rewrite operations
*/
type Text = string;
/**
* Map of rewrite operations
*/
type Rewrites = Map<number, RewriteOperation>;
/**
* Base class for rewrite operations
*/
interface RewriteOperation {
// Implementation details vary by operation type
}Creating character streams from various sources:
import { CharStreams } from "antlr4";
// From string
const stringStream = CharStreams.fromString("hello world");
// From file (synchronous)
const fileStream = CharStreams.fromPathSync("./input.txt", "utf8");
// From file (asynchronous)
CharStreams.fromPath("./input.txt", "utf8", (err, stream) => {
if (err) {
console.error("Failed to read file:", err);
} else {
console.log("Stream created from file");
}
});
// From buffer (Node.js)
const buffer = Buffer.from("hello world", "utf8");
const bufferStream = CharStreams.fromBuffer(buffer);Working with token streams:
import { CommonTokenStream, CharStreams } from "antlr4";
import MyLexer from "./MyLexer.js";
// Create lexer
const input = CharStreams.fromString("var x = 42;");
const lexer = new MyLexer(input);
// Create token stream
const tokens = new CommonTokenStream(lexer);
// Fill token buffer
tokens.fill();
// Access tokens
console.log("Token count:", tokens.size);
console.log("First token:", tokens.get(0));
// Look ahead
console.log("Next token type:", tokens.LA(1));
console.log("Next token:", tokens.LT(1));
// Get text
console.log("All text:", tokens.getText());Using token stream rewriter:
import { TokenStreamRewriter, CommonTokenStream } from "antlr4";
// Assume we have a populated token stream
const tokens = new CommonTokenStream(lexer);
const rewriter = new TokenStreamRewriter(tokens);
// Insert text after first token
rewriter.insertAfter(0, " /* inserted */");
// Replace a token
rewriter.replaceSingle(2, "newValue");
// Delete a range
rewriter.delete(5, 7);
// Get rewritten text
console.log("Original:", tokens.getText());
console.log("Rewritten:", rewriter.getText());Navigating character streams:
import { CharStream } from "antlr4";
const stream = new CharStream("hello");
console.log("Size:", stream.size); // 5
console.log("Current char:", String.fromCharCode(stream.LA(1))); // 'h'
// Mark position
const mark = stream.mark();
// Consume characters
stream.consume(); // advance to 'e'
stream.consume(); // advance to 'l'
console.log("Current position:", stream.index); // 2
console.log("Current char:", String.fromCharCode(stream.LA(1))); // 'l'
// Seek back to mark
stream.seek(mark);
console.log("Back to:", String.fromCharCode(stream.LA(1))); // 'h'
// Get text range
console.log("Text 0-2:", stream.getText(0, 2)); // "hel"