JavaScript library for compressing and decompressing ZIP files in browsers, Node.js, and Deno with support for encryption, ZIP64, and web workers
Stream-based ZIP operations for efficient processing of large files without loading entire archives into memory.
Streaming ZIP reader that processes entries as they're encountered.
class ZipReaderStream<T> {
constructor(options?: ZipReaderConstructorOptions);
readonly readable: ReadableStream<Omit<Entry, "getData"> & { readable?: ReadableStream<Uint8Array> }>;
readonly writable: WritableStream<T>;
}readable: Stream of entry metadata with optional data streamswritable: Input stream for ZIP dataThe ZipReaderStream creates a transform stream that reads ZIP data and outputs entry information.
import { ZipReaderStream } from "@zip.js/zip.js";
// Create stream
const zipReaderStream = new ZipReaderStream();
// Pipe ZIP data into the stream
fetch('large-archive.zip')
.then(response => response.body.pipeTo(zipReaderStream.writable));
// Process entries as they're discovered
const reader = zipReaderStream.readable.getReader();
try {
while (true) {
const { done, value: entry } = await reader.read();
if (done) break;
console.log(`Found entry: ${entry.filename}`);
if (!entry.directory && entry.readable) {
// Stream the entry content
const entryReader = entry.readable.getReader();
try {
while (true) {
const { done, value: chunk } = await entryReader.read();
if (done) break;
// Process chunk of file data
console.log(`Read ${chunk.length} bytes from ${entry.filename}`);
}
} finally {
entryReader.releaseLock();
}
}
}
} finally {
reader.releaseLock();
}Streaming ZIP writer for creating ZIP files incrementally.
class ZipWriterStream {
constructor(options?: ZipWriterConstructorOptions);
readonly readable: ReadableStream<Uint8Array>;
readonly zipWriter: ZipWriter<unknown>;
transform<T>(path: string): { readable: ReadableStream<T>; writable: WritableStream<T> };
writable<T>(path: string): WritableStream<T>;
close(comment?: Uint8Array, options?: ZipWriterCloseOptions): Promise<unknown>;
}readable: Stream of ZIP file datazipWriter: Underlying ZipWriter instanceCreates a transform stream for a specific entry.
const zipWriterStream = new ZipWriterStream();
// Create transform stream for an entry
const { readable, writable } = zipWriterStream.transform("data.txt");
// Pipe data through the transform
fetch('source-data.txt')
.then(response => response.body.pipeTo(writable));
// The readable stream contains the processed data for the ZIP entryCreates a writable stream for a specific entry.
const zipWriterStream = new ZipWriterStream();
// Create writable stream for an entry
const entryWritable = zipWriterStream.writable("log-file.txt");
// Write data to the entry
const writer = entryWritable.getWriter();
await writer.write(new TextEncoder().encode("Log entry 1\n"));
await writer.write(new TextEncoder().encode("Log entry 2\n"));
await writer.close();Finalizes the ZIP stream and returns the complete ZIP data.
// After adding all entries
const zipData = await zipWriterStream.close();import { ZipReaderStream } from "@zip.js/zip.js";
async function extractLargeZip(zipBlob) {
const zipReaderStream = new ZipReaderStream();
// Start reading the ZIP
zipBlob.stream().pipeTo(zipReaderStream.writable);
// Process entries as they're found
for await (const entry of zipReaderStream.readable) {
console.log(`Processing: ${entry.filename} (${entry.uncompressedSize} bytes)`);
if (!entry.directory && entry.readable) {
// Stream each entry to a file or process incrementally
await processEntryStream(entry.filename, entry.readable);
}
}
}
async function processEntryStream(filename, readable) {
const reader = readable.getReader();
let totalBytes = 0;
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Process chunk without loading entire file
totalBytes += value.length;
// Example: Save chunk to disk, upload to server, etc.
await saveChunkToFile(filename, value);
}
} finally {
reader.releaseLock();
}
console.log(`Completed ${filename}: ${totalBytes} bytes processed`);
}import { ZipWriterStream } from "@zip.js/zip.js";
async function createZipFromStreams() {
const zipWriterStream = new ZipWriterStream({
level: 6,
keepOrder: true
});
// Pipe the ZIP output to a destination
const zipDestination = new WritableStream({
write(chunk) {
// Send chunk to server, save to file, etc.
console.log(`ZIP chunk: ${chunk.length} bytes`);
}
});
zipWriterStream.readable.pipeTo(zipDestination);
// Add multiple entries from streams
await addStreamingEntry(zipWriterStream, "log1.txt", logStream1);
await addStreamingEntry(zipWriterStream, "log2.txt", logStream2);
await addStreamingEntry(zipWriterStream, "data.json", jsonStream);
// Finalize ZIP
await zipWriterStream.close();
}
async function addStreamingEntry(zipWriterStream, filename, sourceStream) {
const entryWritable = zipWriterStream.writable(filename);
await sourceStream.pipeTo(entryWritable);
}import { ZipWriterStream } from "@zip.js/zip.js";
class RealTimeZipCreator {
constructor() {
this.zipWriterStream = new ZipWriterStream();
this.setupZipOutput();
}
setupZipOutput() {
// Stream ZIP data as it's created
this.zipWriterStream.readable.pipeTo(new WritableStream({
write: (chunk) => this.sendZipChunk(chunk),
close: () => this.finalizeZip()
}));
}
async addFileFromStream(filename, readableStream) {
const entryWritable = this.zipWriterStream.writable(filename);
await readableStream.pipeTo(entryWritable);
}
async addTextEntry(filename, text) {
const entryWritable = this.zipWriterStream.writable(filename);
const writer = entryWritable.getWriter();
await writer.write(new TextEncoder().encode(text));
await writer.close();
}
async finalize() {
return await this.zipWriterStream.close();
}
sendZipChunk(chunk) {
// Send to client via WebSocket, HTTP stream, etc.
console.log(`Sending ZIP chunk: ${chunk.length} bytes`);
}
finalizeZip() {
console.log("ZIP creation completed");
}
}
// Usage
const creator = new RealTimeZipCreator();
await creator.addTextEntry("readme.txt", "Welcome!");
await creator.addFileFromStream("data.csv", csvInputStream);
await creator.finalize();import { ZipWriterStream } from "@zip.js/zip.js";
async function createTransformPipeline() {
const zipWriterStream = new ZipWriterStream();
// Transform data while adding to ZIP
const { readable, writable } = zipWriterStream.transform("processed-data.txt");
// Create processing pipeline
const processedStream = new TransformStream({
transform(chunk, controller) {
// Process each chunk (e.g., uppercase text)
const processed = new TextEncoder().encode(
new TextDecoder().decode(chunk).toUpperCase()
);
controller.enqueue(processed);
}
});
// Pipeline: source → processing → ZIP entry
await fetch('source-data.txt')
.then(response => response.body)
.then(stream => stream.pipeThrough(processedStream))
.then(stream => stream.pipeTo(writable));
// Get final ZIP
const zipData = await zipWriterStream.close();
return zipData;
}import { ZipWriterStream } from "@zip.js/zip.js";
async function createZipConcurrently() {
const zipWriterStream = new ZipWriterStream({
keepOrder: false // Allow concurrent processing
});
// Process multiple entries concurrently
const entryPromises = [
addLargeFile(zipWriterStream, "file1.dat", source1),
addLargeFile(zipWriterStream, "file2.dat", source2),
addLargeFile(zipWriterStream, "file3.dat", source3)
];
// Wait for all entries to be added
await Promise.all(entryPromises);
// Finalize ZIP
return await zipWriterStream.close();
}
async function addLargeFile(zipWriterStream, filename, sourceStream) {
const entryWritable = zipWriterStream.writable(filename);
// Add progress tracking
let bytesProcessed = 0;
const progressStream = new TransformStream({
transform(chunk, controller) {
bytesProcessed += chunk.length;
console.log(`${filename}: ${bytesProcessed} bytes processed`);
controller.enqueue(chunk);
}
});
await sourceStream
.pipeThrough(progressStream)
.pipeTo(entryWritable);
}async function processLargeZipMemoryEfficient(zipUrl) {
const response = await fetch(zipUrl);
const zipReaderStream = new ZipReaderStream();
// Process ZIP without loading into memory
response.body.pipeTo(zipReaderStream.writable);
const results = [];
for await (const entry of zipReaderStream.readable) {
if (entry.filename.endsWith('.log') && entry.readable) {
// Process log files line by line
const lines = await processLogStream(entry.readable);
results.push({
filename: entry.filename,
lineCount: lines
});
} else if (entry.filename.endsWith('.json') && entry.readable) {
// Parse JSON incrementally
const data = await parseJsonStream(entry.readable);
results.push({
filename: entry.filename,
records: data.length
});
}
}
return results;
}
async function processLogStream(readable) {
const reader = readable.getReader();
const decoder = new TextDecoder();
let buffer = '';
let lineCount = 0;
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
// Process complete lines
const lines = buffer.split('\n');
buffer = lines.pop(); // Keep incomplete line
lineCount += lines.length;
// Process lines without storing them
for (const line of lines) {
if (line.includes('ERROR')) {
console.log(`Error found in line ${lineCount}: ${line}`);
}
}
}
} finally {
reader.releaseLock();
}
return lineCount;
}
async function parseJsonStream(readable) {
// Implement streaming JSON parser for large JSON files
// This is a simplified example
const reader = readable.getReader();
const decoder = new TextDecoder();
let jsonString = '';
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
jsonString += decoder.decode(value, { stream: true });
}
} finally {
reader.releaseLock();
}
return JSON.parse(jsonString);
}// Configure for optimal streaming performance
import { configure } from "@zip.js/zip.js";
configure({
chunkSize: 1024 * 64, // 64KB chunks for streaming
maxWorkers: 4, // Limit workers for memory control
useCompressionStream: true, // Use native streams when available
terminateWorkerTimeout: 2000 // Quick cleanup
});async function handleBackpressure(sourceStream, zipWriterStream, filename) {
const entryWritable = zipWriterStream.writable(filename);
const writer = entryWritable.getWriter();
const reader = sourceStream.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Wait for writer to be ready (handles backpressure)
await writer.ready;
await writer.write(value);
}
} finally {
reader.releaseLock();
await writer.close();
}
}The streaming API is particularly powerful for:
Install with Tessl CLI
npx tessl i tessl/npm-zip-js--zip-js