Web Streams API compatible CSV generation for browser environments and modern Node.js applications supporting the ReadableStream interface.
Creates a ReadableStream for CSV generation compatible with the Web Streams standard.
/**
* Creates a ReadableStream for CSV generation using Web Streams API
* @param options - Configuration options for generation
* @returns ReadableStream<Buffer> compatible with Web Streams standard
*/
function generate(options?: Options): ReadableStream<Buffer>;Usage Examples:
import { generate } from "csv-generate/stream";
// Basic web stream usage
const stream = generate({
columns: 3,
length: 10,
seed: 1
});
// Using async iteration (modern browsers/Node.js)
for await (const chunk of stream) {
console.log(chunk.toString()); // Buffer converted to string
}
// Using stream reader
const reader = stream.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
console.log(value); // Buffer containing CSV data
}
} finally {
reader.releaseLock();
}The Web Streams implementation is optimized for browser environments without Node.js dependencies.
// Browser usage with fetch-like patterns
const csvStream = generate({
columns: ['ascii', 'int'],
length: 100
});
// Transform stream for processing
const processedStream = csvStream.pipeThrough(
new TransformStream({
transform(chunk, controller) {
// Process each chunk of CSV data
const processed = processChunk(chunk);
controller.enqueue(processed);
}
})
);Usage Examples:
<!-- Browser usage -->
<script type="module">
import { generate } from "csv-generate/browser/esm";
const stream = generate({
columns: 2,
length: 5,
seed: 42
});
// Process stream in browser
const reader = stream.getReader();
const chunks = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
}
const csvData = Buffer.concat(chunks).toString();
console.log(csvData);
</script>Web Streams provide built-in backpressure and flow control mechanisms.
// Stream with backpressure handling
const stream = generate({
columns: 5,
length: 1000
});
// Controlled consumption with backpressure
const reader = stream.getReader();
const processChunk = async (chunk: Buffer) => {
// Simulate slow processing
await new Promise(resolve => setTimeout(resolve, 10));
return chunk.toString();
};
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
const processed = await processChunk(value);
// Natural backpressure through async processing
}
} finally {
reader.releaseLock();
}Usage Examples:
// Pipe to Response (server environments)
const csvStream = generate({
columns: 3,
length: 1000,
seed: Date.now()
});
// In a web server context
const response = new Response(csvStream, {
headers: {
'Content-Type': 'text/csv',
'Content-Disposition': 'attachment; filename="data.csv"'
}
});
// Transform and pipe
const uppercaseStream = csvStream.pipeThrough(
new TransformStream({
transform(chunk, controller) {
const text = chunk.toString().toUpperCase();
controller.enqueue(Buffer.from(text));
}
})
);Web Streams implementation uses fixed buffer configuration optimized for streaming performance.
// Internal stream configuration (informational)
const stream = new ReadableStream(
{
async pull(controller) {
// Internal: generates chunks and enqueues them
}
},
{ highWaterMark: 1024 } // Fixed 1KB buffer size
);Usage Examples:
// Memory-efficient processing of large datasets
const largeDataStream = generate({
columns: 10,
length: 1000000, // 1 million records
seed: 12345
});
// Process without loading everything into memory
const processInChunks = async (stream: ReadableStream<Buffer>) => {
const reader = stream.getReader();
let recordCount = 0;
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Count records in this chunk
const lines = value.toString().split('\n').length - 1;
recordCount += lines;
// Process chunk without storing
await processChunk(value);
}
console.log(`Processed ${recordCount} records`);
} finally {
reader.releaseLock();
}
};