Node.js specific functionality for reading and writing Avro container files from the filesystem with streaming support and header extraction.
Create readable stream of records from a local Avro file.
/**
* Create readable stream of records from a local Avro file
* @param path - Path to Avro container file
* @param opts - Decoder options
* @returns Readable stream of decoded records
*/
function createFileDecoder(path, opts);Usage Examples:
const avsc = require('avsc');
// Basic file decoding
const decoder = avsc.createFileDecoder('./data.avro');
decoder.on('data', (record) => {
console.log('Record:', record);
});
decoder.on('metadata', (type, codec, header) => {
console.log('Schema:', type.schema());
console.log('Codec:', codec);
});
decoder.on('end', () => {
console.log('Finished reading');
});
// With options
const decoder = avsc.createFileDecoder('./data.avro', {
noDecode: false,
codecs: {
'custom-compression': customDecodec
}
});Create writable stream of records to a local Avro file.
/**
* Create writable stream of records to a local Avro file
* @param path - Output file path
* @param schema - Avro schema for records
* @param opts - Encoder options
* @returns Writable stream for encoding records
*/
function createFileEncoder(path, schema, opts);Usage Examples:
const avsc = require('avsc');
// Create encoder with schema
const schema = {
type: 'record',
name: 'User',
fields: [
{name: 'name', type: 'string'},
{name: 'age', type: 'int'}
]
};
const encoder = avsc.createFileEncoder('./output.avro', schema);
// Write records
encoder.write({name: 'Alice', age: 30});
encoder.write({name: 'Bob', age: 25});
encoder.end();
// With compression and options
const encoder = avsc.createFileEncoder('./compressed.avro', schema, {
codec: 'deflate',
blockSize: 65536,
syncMarker: Buffer.from('custom-sync-marker')
});Extract a container file's header synchronously.
/**
* Extract a container file's header synchronously
* @param path - Path to Avro container file
* @param opts - Extraction options
* @returns Header object or null if invalid file
*/
function extractFileHeader(path, opts);Usage Examples:
const avsc = require('avsc');
// Extract header information
const header = avsc.extractFileHeader('./data.avro');
if (header) {
console.log('Schema:', header.schema);
console.log('Codec:', header.codec);
console.log('Sync marker:', header.sync);
} else {
console.log('Invalid Avro file');
}
// With options
const header = avsc.extractFileHeader('./data.avro', {
decode: true, // Decode header values (default: true)
size: 8192 // Buffer size for reading (default: 4096)
});interface DecoderOptions {
/** Skip decoding, return raw buffers */
noDecode?: boolean;
/** Reader schema for schema evolution */
readerSchema?: string | object | Type;
/** Custom compression codecs */
codecs?: {[name: string]: Codec};
/** Hook for custom schema parsing */
parseHook?: (schema: any) => Type;
}
interface EncoderOptions {
/** Block size in bytes (default: 65536) */
blockSize?: number;
/** Compression codec name */
codec?: string;
/** Custom compression codecs */
codecs?: {[name: string]: Codec};
/** Whether to write file header */
writeHeader?: boolean | 'always' | 'never' | 'auto';
/** Custom synchronization marker */
syncMarker?: Buffer;
}
interface HeaderExtractionOptions {
/** Whether to decode header values */
decode?: boolean;
/** Buffer size for reading header */
size?: number;
}
interface FileHeader {
/** Avro schema object */
schema: any;
/** Compression codec name */
codec: string;
/** Synchronization marker */
sync: Buffer;
/** Raw header metadata */
meta: {[key: string]: Buffer};
}
type Codec = (buffer: Buffer, callback: (err: Error | null, result?: Buffer) => void) => void;