JavaScript library for compressing and decompressing ZIP files in browsers, Node.js, and Deno with support for encryption, ZIP64, and web workers
Global configuration system for zip.js that controls web workers, compression codecs, and performance settings.
The main configuration function that accepts a configuration object.
function configure(configuration: Configuration): void;import { configure } from "@zip.js/zip.js";
// Basic configuration
configure({
useWebWorkers: true,
maxWorkers: 4,
chunkSize: 1024 * 64, // 64KB chunks
level: 6 // Default compression level
});interface Configuration extends WorkerConfiguration {
maxWorkers?: number;
terminateWorkerTimeout?: number;
workerScripts?: {
deflate?: string[];
inflate?: string[];
};
chunkSize?: number;
Deflate?: typeof ZipDeflate;
Inflate?: typeof ZipInflate;
CompressionStream?: typeof TransformStream;
DecompressionStream?: typeof TransformStream;
}Maximum number of web workers used for parallel compression/decompression.
configure({
maxWorkers: navigator.hardwareConcurrency || 4
});navigator.hardwareConcurrency (number of CPU cores)Timeout in milliseconds before idle workers are terminated.
configure({
terminateWorkerTimeout: 10000 // 10 seconds
});Custom URLs for worker scripts, useful for CDN deployments or CSP restrictions.
configure({
workerScripts: {
deflate: ["https://cdn.example.com/zip-deflate-worker.js"],
inflate: ["https://cdn.example.com/zip-inflate-worker.js"]
}
});
// Multiple scripts (executed in order)
configure({
workerScripts: {
deflate: [
"https://cdn.example.com/base-worker.js",
"./custom-deflate-extensions.js"
]
}
});Size of data chunks processed at a time.
configure({
chunkSize: 1024 * 128 // 128KB chunks
});Custom compression/decompression implementations.
import { Deflate, Inflate } from "./custom-codecs.js";
configure({
Deflate: Deflate, // Custom compression
Inflate: Inflate // Custom decompression
});Custom stream implementations when native compression streams are not used.
configure({
CompressionStream: CustomCompressionStream,
DecompressionStream: CustomDecompressionStream
});Base configuration inherited by many options interfaces.
interface WorkerConfiguration {
useWebWorkers?: boolean;
useCompressionStream?: boolean;
}Enable or disable web workers for background processing.
// Disable web workers (synchronous processing)
configure({
useWebWorkers: false
});
// Enable web workers (default)
configure({
useWebWorkers: true
});trueEnable native browser compression streams when available.
configure({
useCompressionStream: true // Use native streams
});
configure({
useCompressionStream: false // Use JavaScript implementation
});trueimport { configure } from "@zip.js/zip.js";
// Optimize for speed with more resources
configure({
maxWorkers: Math.min(navigator.hardwareConcurrency, 8),
chunkSize: 1024 * 1024, // 1MB chunks for better throughput
useWebWorkers: true,
useCompressionStream: true,
terminateWorkerTimeout: 30000 // Keep workers alive longer
});// Optimize for lower memory usage
configure({
maxWorkers: 2, // Fewer workers
chunkSize: 1024 * 32, // 32KB chunks
useWebWorkers: true,
terminateWorkerTimeout: 1000 // Quick cleanup
});// For environments with strict Content Security Policy
configure({
useWebWorkers: false, // Avoid blob URLs for workers
useCompressionStream: true // Rely on native streams
});
// Or use pre-deployed worker scripts
configure({
workerScripts: {
deflate: ["/static/js/zip-deflate-worker.js"],
inflate: ["/static/js/zip-inflate-worker.js"]
}
});// Integrate with pako library
import pako from 'pako';
class PakoDeflate {
constructor() {
this.deflate = new pako.Deflate();
}
append(data) {
this.deflate.push(data, false);
return new Uint8Array(this.deflate.result);
}
flush() {
this.deflate.push(new Uint8Array(0), true);
return new Uint8Array(this.deflate.result);
}
}
class PakoInflate {
constructor() {
this.inflate = new pako.Inflate();
}
append(data) {
this.inflate.push(data, false);
return new Uint8Array(this.inflate.result);
}
flush() {
this.inflate.push(new Uint8Array(0), true);
}
}
configure({
Deflate: PakoDeflate,
Inflate: PakoInflate
});function configureForEnvironment() {
// Detect environment
const isNode = typeof process !== 'undefined' && process.versions?.node;
const isDeno = typeof Deno !== 'undefined';
const isBrowser = typeof window !== 'undefined';
if (isNode) {
// Node.js configuration
configure({
useWebWorkers: false, // Node doesn't have web workers
useCompressionStream: false, // Use JavaScript implementation
maxWorkers: 1,
chunkSize: 1024 * 1024 // Larger chunks for server processing
});
} else if (isDeno) {
// Deno configuration
configure({
useWebWorkers: true,
useCompressionStream: true,
maxWorkers: 4,
chunkSize: 1024 * 512
});
} else if (isBrowser) {
// Browser configuration
const cores = navigator.hardwareConcurrency || 4;
configure({
useWebWorkers: true,
useCompressionStream: 'CompressionStream' in window,
maxWorkers: Math.min(cores, 6),
chunkSize: 1024 * 256 // Balance for web performance
});
}
}
configureForEnvironment();class AdaptiveConfiguration {
constructor() {
this.performanceMetrics = {
avgCompressionTime: 0,
memoryUsage: 0,
workerUtilization: 0
};
}
updateMetrics(compressionTime, memoryUsed, workersUsed) {
this.performanceMetrics = {
avgCompressionTime: (this.performanceMetrics.avgCompressionTime + compressionTime) / 2,
memoryUsage: memoryUsed,
workerUtilization: workersUsed
};
this.adaptConfiguration();
}
adaptConfiguration() {
const { avgCompressionTime, memoryUsage, workerUtilization } = this.performanceMetrics;
let newConfig = {};
// If compression is slow but memory is fine, use more workers
if (avgCompressionTime > 5000 && memoryUsage < 100 * 1024 * 1024) { // 100MB
newConfig.maxWorkers = Math.min(8, (newConfig.maxWorkers || 4) + 1);
newConfig.chunkSize = 1024 * 1024; // Larger chunks
}
// If memory usage is high, reduce workers and chunk size
if (memoryUsage > 500 * 1024 * 1024) { // 500MB
newConfig.maxWorkers = Math.max(1, (newConfig.maxWorkers || 4) - 1);
newConfig.chunkSize = 1024 * 64; // Smaller chunks
newConfig.terminateWorkerTimeout = 1000; // Quick cleanup
}
// If workers are underutilized and performance is good, reduce workers
if (workerUtilization < 0.5 && avgCompressionTime < 2000) {
newConfig.maxWorkers = Math.max(2, Math.floor((newConfig.maxWorkers || 4) * 0.8));
}
if (Object.keys(newConfig).length > 0) {
console.log('Adapting zip.js configuration:', newConfig);
configure(newConfig);
}
}
}
const adaptiveConfig = new AdaptiveConfiguration();
// Use in ZIP operations
async function monitoredZipOperation() {
const startTime = Date.now();
const startMemory = performance.memory?.usedJSHeapSize || 0;
// Perform ZIP operation
const zipWriter = new ZipWriter(new BlobWriter());
await zipWriter.add("data.txt", new TextReader("Large data..."));
const result = await zipWriter.close();
const endTime = Date.now();
const endMemory = performance.memory?.usedJSHeapSize || 0;
adaptiveConfig.updateMetrics(
endTime - startTime,
endMemory - startMemory,
navigator.hardwareConcurrency || 4 // Simplified worker utilization
);
return result;
}function validateConfiguration(config) {
const warnings = [];
const errors = [];
// Validate maxWorkers
if (config.maxWorkers !== undefined) {
if (config.maxWorkers < 1) {
errors.push("maxWorkers must be at least 1");
} else if (config.maxWorkers > 16) {
warnings.push("maxWorkers > 16 may cause performance issues");
}
}
// Validate chunkSize
if (config.chunkSize !== undefined) {
if (config.chunkSize < 1024) {
warnings.push("chunkSize < 1KB may cause performance issues");
} else if (config.chunkSize > 10 * 1024 * 1024) {
warnings.push("chunkSize > 10MB may cause memory issues");
}
}
// Validate terminateWorkerTimeout
if (config.terminateWorkerTimeout !== undefined) {
if (config.terminateWorkerTimeout < 100) {
warnings.push("terminateWorkerTimeout < 100ms may cause frequent worker recreation");
}
}
// Log issues
errors.forEach(error => console.error("Configuration error:", error));
warnings.forEach(warning => console.warn("Configuration warning:", warning));
if (errors.length > 0) {
throw new Error(`Invalid configuration: ${errors.join(", ")}`);
}
return { warnings, errors };
}
// Use before configuring
const config = {
maxWorkers: 8,
chunkSize: 1024 * 512,
terminateWorkerTimeout: 5000
};
const validation = validateConfiguration(config);
if (validation.errors.length === 0) {
configure(config);
}Manually terminate all active workers.
function terminateWorkers(): Promise<void>;import { terminateWorkers } from "@zip.js/zip.js";
// Clean up all workers immediately
await terminateWorkers();
// Useful before page unload or when switching contexts
window.addEventListener('beforeunload', () => {
terminateWorkers();
});Transform event-based third-party codec implementations into zip.js-compatible codecs.
function initShimAsyncCodec(
library: EventBasedZipLibrary,
constructorOptions: unknown | null,
registerDataHandler: registerDataHandler
): ZipLibrary;import { initShimAsyncCodec, configure } from "@zip.js/zip.js";
// Example with a hypothetical async codec library
const asyncCodecs = initShimAsyncCodec(
AsyncCompressionLibrary, // Third-party library
{ level: 6 }, // Options passed to library constructors
(codec, onData) => {
// Register data handler
codec.on('data', onData);
}
);
configure({
Deflate: asyncCodecs.Deflate,
Inflate: asyncCodecs.Inflate
});// Recommended production settings
configure({
maxWorkers: Math.min(navigator.hardwareConcurrency || 4, 6),
chunkSize: 1024 * 256, // 256KB - good balance
useWebWorkers: true,
useCompressionStream: true,
terminateWorkerTimeout: 10000 // 10 seconds
});// Development settings with more debugging
configure({
maxWorkers: 2, // Easier debugging
chunkSize: 1024 * 64, // Smaller chunks for testing
useWebWorkers: true,
terminateWorkerTimeout: 2000 // Quick cleanup
});// Deterministic settings for testing
configure({
maxWorkers: 1, // Predictable behavior
chunkSize: 1024 * 32, // Small chunks
useWebWorkers: false, // Synchronous for easier testing
useCompressionStream: false
});The configuration system provides fine-grained control over zip.js behavior, allowing optimization for different environments, performance requirements, and resource constraints.
Install with Tessl CLI
npx tessl i tessl/npm-zip-js--zip-js