JavaScript library for compressing and decompressing ZIP files in browsers, Node.js, and Deno with support for encryption, ZIP64, and web workers
Classes for reading from and writing to different data sources and destinations. These form the foundation for all ZIP operations in zip.js.
Readers provide data input for ZIP operations, supporting various data sources.
class Reader<Type> implements Initializable, ReadableReader {
constructor(value: Type);
readonly readable: ReadableStream;
readonly size: number;
init?(): Promise<void>;
readUint8Array(index: number, length: number): Promise<Uint8Array>;
}All readers implement the same interface but read from different data sources.
Reads from string data.
class TextReader extends Reader<string> {}import { TextReader } from "@zip.js/zip.js";
const reader = new TextReader("Hello, World!");
console.log(reader.size); // String length in bytes (UTF-8)
// Use with ZIP operations
const zipWriter = new ZipWriter(new BlobWriter());
await zipWriter.add("greeting.txt", reader);Reads from Blob objects (files, images, etc.).
class BlobReader extends Reader<Blob> {}import { BlobReader } from "@zip.js/zip.js";
// From file input
const fileInput = document.querySelector('input[type="file"]');
const file = fileInput.files[0];
const reader = new BlobReader(file);
// From created blob
const textBlob = new Blob(["Some text content"], { type: "text/plain" });
const blobReader = new BlobReader(textBlob);
// Use in ZIP
await zipWriter.add(file.name, new BlobReader(file));Reads from Base64-encoded Data URIs.
class Data64URIReader extends Reader<string> {}import { Data64URIReader } from "@zip.js/zip.js";
const dataUri = "data:text/plain;base64,SGVsbG8gV29ybGQ="; // "Hello World"
const reader = new Data64URIReader(dataUri);
await zipWriter.add("from-data-uri.txt", reader);Reads from typed arrays (raw binary data).
class Uint8ArrayReader extends Reader<Uint8Array> {}import { Uint8ArrayReader } from "@zip.js/zip.js";
// From binary data
const binaryData = new Uint8Array([72, 101, 108, 108, 111]); // "Hello"
const reader = new Uint8ArrayReader(binaryData);
// From ArrayBuffer
const buffer = new ArrayBuffer(1024);
const uint8Array = new Uint8Array(buffer);
const bufferReader = new Uint8ArrayReader(uint8Array);
await zipWriter.add("binary-data.bin", reader);Reads data from HTTP URLs.
class HttpReader extends Reader<string> {
constructor(url: string | URL, options?: HttpOptions);
}import { HttpReader } from "@zip.js/zip.js";
// Simple HTTP read
const reader = new HttpReader("https://example.com/data.txt");
// With custom headers
const authenticatedReader = new HttpReader("https://api.example.com/file", {
headers: [
["Authorization", "Bearer your-token"],
["User-Agent", "MyApp/1.0"]
]
});
// Add remote file to ZIP without downloading to memory first
await zipWriter.add("remote-file.txt", reader);Specialized HTTP reader that supports range requests for efficient partial downloads.
class HttpRangeReader extends HttpReader {
constructor(url: string | URL, options?: HttpRangeOptions);
}import { HttpRangeReader } from "@zip.js/zip.js";
// Efficient for large remote files - only downloads needed parts
const reader = new HttpRangeReader("https://example.com/large-archive.zip", {
useRangeHeader: true,
preventHeadRequest: false
});
// Perfect for reading ZIP files via HTTP - only central directory is downloaded initially
const zipReader = new ZipReader(reader);
const entries = await zipReader.getEntries(); // Minimal downloadReads from multiple data sources representing split archives.
class SplitDataReader extends Reader<(Reader<unknown> | ReadableReader | ReadableStream)[]> {}import { SplitDataReader, BlobReader } from "@zip.js/zip.js";
// For split ZIP files (.z01, .z02, .zip)
const readers = [
new BlobReader(part1Blob), // .z01
new BlobReader(part2Blob), // .z02
new BlobReader(finalBlob) // .zip
];
const splitReader = new SplitDataReader(readers);
const zipReader = new ZipReader(splitReader);Writers provide data output destinations for ZIP operations.
class Writer<Type> implements Initializable, WritableWriter {
readonly writable: WritableStream;
init?(size?: number): Promise<void>;
writeUint8Array(array: Uint8Array): Promise<void>;
getData(): Promise<Type>;
}Writes data as text with optional encoding.
class TextWriter extends Writer<string> {
constructor(encoding?: string);
}import { TextWriter } from "@zip.js/zip.js";
// Default UTF-8 encoding
const writer = new TextWriter();
// Specific encoding
const latin1Writer = new TextWriter("latin1");
// Extract text from ZIP entry
const text = await fileEntry.getData(writer);
console.log(text); // Extracted text contentWrites data as a Blob with optional MIME type.
class BlobWriter implements Initializable, WritableWriter {
constructor(mimeString?: string);
readonly writable: WritableStream;
init(): Promise<void>;
getData(): Promise<Blob>;
}import { BlobWriter } from "@zip.js/zip.js";
// General blob
const writer = new BlobWriter();
// Specific MIME type
const imageWriter = new BlobWriter("image/png");
const zipWriter = new BlobWriter("application/zip");
// Create ZIP file
const zipWriter = new ZipWriter(new BlobWriter("application/zip"));
await zipWriter.add("file.txt", new TextReader("content"));
const zipBlob = await zipWriter.close(); // Returns BlobWrites data as a Base64-encoded Data URI.
class Data64URIWriter extends Writer<string> {
constructor(mimeString?: string);
}import { Data64URIWriter } from "@zip.js/zip.js";
const writer = new Data64URIWriter("text/plain");
const dataUri = await fileEntry.getData(writer);
console.log(dataUri); // "data:text/plain;base64,SGVsbG8gV29ybGQ="
// Can be used directly in HTML
const img = document.createElement('img');
img.src = await imageEntry.getData(new Data64URIWriter("image/png"));Writes data as a typed array (raw binary).
class Uint8ArrayWriter extends Writer<Uint8Array> {}import { Uint8ArrayWriter } from "@zip.js/zip.js";
const writer = new Uint8ArrayWriter();
const binaryData = await fileEntry.getData(writer);
console.log(`File size: ${binaryData.length} bytes`);
console.log(`First byte: 0x${binaryData[0].toString(16)}`);Writes data to multiple destinations for creating split archives.
class SplitDataWriter implements Initializable, WritableWriter {
constructor(
writerGenerator: AsyncGenerator<Writer<unknown> | WritableWriter | WritableStream, boolean>,
maxSize?: number
);
readonly writable: WritableStream;
init(): Promise<void>;
}import { SplitDataWriter, BlobWriter } from "@zip.js/zip.js";
// Create split ZIP files (1MB each)
async function* createSplitWriters() {
let partNumber = 1;
while (partNumber <= 10) { // Max 10 parts
const writer = new BlobWriter("application/zip");
writer.maxSize = 1024 * 1024; // 1MB chunks
console.log(`Creating part ${partNumber}`);
yield writer;
partNumber++;
}
return true; // Signal completion
}
const splitWriter = new SplitDataWriter(createSplitWriters(), 1024 * 1024);
const zipWriter = new ZipWriter(splitWriter);
// Add large files - they'll be automatically split
await zipWriter.add("large-file1.dat", new BlobReader(largeBlob1));
await zipWriter.add("large-file2.dat", new BlobReader(largeBlob2));
await zipWriter.close();interface HttpOptions extends HttpRangeOptions {
useRangeHeader?: boolean;
forceRangeRequests?: boolean;
preventHeadRequest?: boolean;
combineSizeEocd?: boolean;
}useRangeHeader: Use HTTP Range headers for partial requestsforceRangeRequests: Always use range requests even if not supportedpreventHeadRequest: Skip HEAD request to determine content lengthcombineSizeEocd: Optimize by combining size detection with EOCD readinginterface HttpRangeOptions {
useXHR?: boolean;
headers?: Iterable<[string, string]> | Map<string, string>;
}useXHR: Use XMLHttpRequest instead of fetch APIheaders: Custom HTTP headers// Create custom reader for database records
class DatabaseReader extends Reader {
constructor(query) {
super();
this.query = query;
this.data = null;
}
async init() {
// Fetch data from database
this.data = await database.query(this.query);
const jsonString = JSON.stringify(this.data);
this.size = new TextEncoder().encode(jsonString).length;
}
async readUint8Array(offset, length) {
if (!this.data) await this.init();
const jsonString = JSON.stringify(this.data);
const fullData = new TextEncoder().encode(jsonString);
return fullData.slice(offset, offset + length);
}
}
// Usage
const dbReader = new DatabaseReader("SELECT * FROM users");
await zipWriter.add("users.json", dbReader);import { HttpRangeReader, ZipReader } from "@zip.js/zip.js";
async function analyzeRemoteZip(url) {
const reader = new HttpRangeReader(url, {
preventHeadRequest: false, // Get content length first
useRangeHeader: true // Use range requests
});
const zipReader = new ZipReader(reader);
// Only central directory is downloaded at this point
const entries = await zipReader.getEntries();
console.log(`ZIP contains ${entries.length} entries`);
// Download specific files on demand
const readmeEntry = entries.find(e => e.filename === 'README.md');
if (readmeEntry && !readmeEntry.directory) {
// Only this file's data is downloaded
const text = await readmeEntry.getData(new TextWriter());
console.log("README content:", text);
}
await zipReader.close();
}import { Uint8ArrayReader, Uint8ArrayWriter } from "@zip.js/zip.js";
class ChunkedFileProcessor {
constructor(file) {
this.file = file;
this.chunkSize = 1024 * 1024; // 1MB chunks
}
async processInChunks() {
const zipWriter = new ZipWriter(new BlobWriter("application/zip"));
for (let offset = 0; offset < this.file.size; offset += this.chunkSize) {
const chunk = this.file.slice(offset, offset + this.chunkSize);
const chunkData = new Uint8Array(await chunk.arrayBuffer());
// Process chunk (e.g., compress, encrypt, transform)
const processedChunk = this.processChunk(chunkData);
const chunkReader = new Uint8ArrayReader(processedChunk);
await zipWriter.add(`chunk-${Math.floor(offset / this.chunkSize)}.dat`, chunkReader);
}
return await zipWriter.close();
}
processChunk(data) {
// Example: simple XOR encryption
const key = 0xAA;
return data.map(byte => byte ^ key);
}
}
// Process large file without loading it entirely into memory
const processor = new ChunkedFileProcessor(largeFile);
const zipBlob = await processor.processInChunks();import { SplitDataWriter, BlobWriter } from "@zip.js/zip.js";
// Create splits based on content type rather than size
async function* createContentBasedSplits() {
const splits = {
'application/json': new BlobWriter("application/zip"),
'text/plain': new BlobWriter("application/zip"),
'image/*': new BlobWriter("application/zip"),
'default': new BlobWriter("application/zip")
};
for (const [type, writer] of Object.entries(splits)) {
writer.contentType = type;
yield writer;
}
return true;
}
class ContentBasedSplitWriter extends SplitDataWriter {
constructor() {
super(createContentBasedSplits());
this.currentContentType = 'default';
}
setContentType(type) {
this.currentContentType = type;
}
}
// Usage would require custom ZIP writer integration// Transform data while reading/writing
class TransformReader extends Reader {
constructor(sourceReader, transformFn) {
super();
this.sourceReader = sourceReader;
this.transformFn = transformFn;
this.size = sourceReader.size; // May not be accurate after transform
}
async readUint8Array(offset, length) {
const sourceData = await this.sourceReader.readUint8Array(offset, length);
return this.transformFn(sourceData);
}
}
// Example: Uppercase text transform
function uppercaseTransform(data) {
const text = new TextDecoder().decode(data);
const uppercased = text.toUpperCase();
return new TextEncoder().encode(uppercased);
}
const originalReader = new TextReader("hello world");
const transformedReader = new TransformReader(originalReader, uppercaseTransform);
await zipWriter.add("uppercase.txt", transformedReader); // Contains "HELLO WORLD"import {
ERR_HTTP_RANGE,
ERR_WRITER_NOT_INITIALIZED
} from "@zip.js/zip.js";
async function robustHttpRead(url, fallbackUrls = []) {
const allUrls = [url, ...fallbackUrls];
for (const currentUrl of allUrls) {
try {
const reader = new HttpRangeReader(currentUrl, {
useRangeHeader: true,
headers: [["User-Agent", "zip.js-client/1.0"]]
});
// Test if reader works by reading first few bytes
await reader.readUint8Array(0, 10);
return reader;
} catch (error) {
console.warn(`Failed to read from ${currentUrl}:`, error.message);
if (error.message === ERR_HTTP_RANGE) {
// Fall back to regular HttpReader
try {
return new HttpReader(currentUrl);
} catch (fallbackError) {
console.warn(`HTTP fallback also failed:`, fallbackError.message);
}
}
}
}
throw new Error("All URL sources failed");
}
// Usage
try {
const reader = await robustHttpRead(
"https://primary.example.com/file.zip",
[
"https://backup1.example.com/file.zip",
"https://backup2.example.com/file.zip"
]
);
const zipReader = new ZipReader(reader);
// Continue with ZIP processing
} catch (error) {
console.error("Could not establish reliable data source:", error);
}The data I/O classes provide flexible and powerful ways to handle various data sources and destinations, forming the foundation for all ZIP operations in zip.js.
Install with Tessl CLI
npx tessl i tessl/npm-zip-js--zip-js