Browser-optimized builds with blob support and different bundle sizes for various use cases from types-only to full RPC functionality.
AVSC provides several browser builds optimized for different use cases:
Complete functionality including blob support and all features.
// Available at: avsc/etc/browser/avsc.js
const avsc = require('avsc/etc/browser/avsc');
/**
* Create readable stream of records from a blob
* @param blob - Blob containing Avro data
* @param opts - Decoder options
* @returns Readable stream of decoded records
*/
function createBlobDecoder(blob, opts);
/**
* Create duplex stream for encoding records to blobs
* @param schema - Avro schema for encoding
* @param opts - Encoder options
* @returns Duplex stream that outputs blobs
*/
function createBlobEncoder(schema, opts);Smaller build with only type system functionality.
// Available at: avsc/etc/browser/avsc-types.js
const avsc = require('avsc/etc/browser/avsc-types');
// Includes: parse, Type, types namespace
// Excludes: file operations, services, streamingBuild with RPC services support but no file operations.
// Available at: avsc/etc/browser/avsc-services.js
const avsc = require('avsc/etc/browser/avsc-services');
// Includes: parse, Type, Service, streaming, services
// Excludes: file operations, blob operationsCreate readable stream of records from browser Blob objects.
/**
* Create readable stream of records from a blob
* @param blob - Blob or File containing Avro container data
* @param opts - Decoder options
* @returns Readable stream of decoded records
*/
function createBlobDecoder(blob, opts);Usage Examples:
// Handle file upload
const fileInput = document.getElementById('avro-file');
fileInput.addEventListener('change', (event) => {
const file = event.target.files[0];
if (file) {
const decoder = avsc.createBlobDecoder(file);
decoder.on('data', (record) => {
console.log('Record:', record);
});
decoder.on('end', () => {
console.log('Finished reading file');
});
}
});
// With options
const decoder = avsc.createBlobDecoder(blob, {
noDecode: false,
codecs: {
'custom': customDecompressionCodec
}
});Create duplex stream for encoding records to Blob objects.
/**
* Create duplex stream for encoding records to blobs
* @param schema - Avro schema for encoding
* @param opts - Encoder options
* @returns Duplex stream that outputs Blob objects
*/
function createBlobEncoder(schema, opts);Usage Examples:
const schema = {
type: 'record',
name: 'LogEntry',
fields: [
{name: 'timestamp', type: 'long'},
{name: 'message', type: 'string'}
]
};
const encoder = avsc.createBlobEncoder(schema);
encoder.on('data', (blob) => {
// Create download link
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = 'data.avro';
link.click();
URL.revokeObjectURL(url);
});
// Write records
encoder.write({timestamp: Date.now(), message: 'User logged in'});
encoder.write({timestamp: Date.now(), message: 'Page viewed'});
encoder.end();Seamless integration with browser File API for handling user uploads.
// Handle dropped files
function handleDrop(event) {
event.preventDefault();
const files = event.dataTransfer.files;
for (const file of files) {
if (file.name.endsWith('.avro')) {
const decoder = avsc.createBlobDecoder(file);
decoder.on('metadata', (type, codec) => {
console.log('File schema:', type.schema());
console.log('Compression:', codec);
});
decoder.on('data', (record) => {
// Process record
});
}
}
}AVSC can be used in Web Workers for background processing.
// In main thread
const worker = new Worker('avro-worker.js');
worker.postMessage({schema, records});
// In avro-worker.js
importScripts('node_modules/avsc/etc/browser/avsc.js');
self.onmessage = function(event) {
const {schema, records} = event.data;
const type = avsc.parse(schema);
const encoded = records.map(record => {
return type.toBuffer(record);
});
self.postMessage({encoded});
};Process streaming Avro data from network requests.
async function processAvroStream(url) {
const response = await fetch(url);
const reader = response.body.getReader();
const decoder = new avsc.streams.BlockDecoder();
decoder.on('data', (record) => {
console.log('Streamed record:', record);
});
// Pump response stream to decoder
const pump = async () => {
while (true) {
const {done, value} = await reader.read();
if (done) {
decoder.end();
break;
}
decoder.write(value);
}
};
pump().catch(console.error);
}Choose the right build for your use case:
const avsc = require('avsc/etc/browser/avsc-types');
// Available: parse, Type, types
const type = avsc.parse(schema);
const isValid = type.isValid(data);const avsc = require('avsc/etc/browser/avsc-services');
// Available: parse, Type, Service, streams
const service = avsc.Service.forProtocol(protocol);
const client = service.createClient();const avsc = require('avsc/etc/browser/avsc');
// Available: All features including blob operations
const decoder = avsc.createBlobDecoder(file);interface BlobDecoderOptions {
/** Skip decoding, return raw buffers */
noDecode?: boolean;
/** Reader schema for evolution */
readerSchema?: any;
/** Custom compression codecs */
codecs?: {[name: string]: Codec};
/** Hook for custom schema parsing */
parseHook?: (schema: any) => Type;
}
interface BlobEncoderOptions {
/** Block size for compression */
blockSize?: number;
/** Compression codec */
codec?: string;
/** Custom compression codecs */
codecs?: {[name: string]: Codec};
/** Whether to write container header */
writeHeader?: boolean;
/** Custom sync marker */
syncMarker?: Buffer;
}
// Browser-specific types
interface FileWithBlob extends File {
/** File as blob */
blob(): Promise<Blob>;
}
interface AvroBlob extends Blob {
/** Avro-specific metadata */
metadata?: {
schema: any;
codec: string;
};
}
type BrowserCodec = (buffer: ArrayBuffer, callback: (err: Error | null, result?: ArrayBuffer) => void) => void;