Cross-platform streaming API for ZIP file extraction and manipulation in Node.js environments
—
Transform stream for extracting entire ZIP archives to the filesystem with configurable concurrency and destination paths. This high-level interface provides a simple way to extract complete archives while maintaining memory efficiency through streaming.
Creates a transform stream that extracts all contents of a ZIP archive to a specified directory.
/**
* Creates a transform stream for extracting ZIP archives to filesystem
* @param options - Configuration object with extraction settings
* @returns Extract transform stream instance
*/
class Extract extends Transform {
constructor(options: ExtractOptions);
}
interface ExtractOptions {
/** Destination directory path for extraction */
path: string;
/** Number of concurrent file extractions (optional) */
concurrency?: number;
}Usage Examples:
const unzipper = require("unzipper");
const fs = require("fs");
// Basic extraction to directory
fs.createReadStream("archive.zip")
.pipe(unzipper.Extract({ path: "extracted-files" }))
.on("close", () => {
console.log("Extraction completed");
});
// Extraction with custom concurrency
fs.createReadStream("large-archive.zip")
.pipe(unzipper.Extract({
path: "output",
concurrency: 5
}))
.on("close", () => {
console.log("Large archive extracted");
});
// Promise-based extraction
const extractPromise = new Promise((resolve, reject) => {
fs.createReadStream("data.zip")
.pipe(unzipper.Extract({ path: "data" }))
.on("close", resolve)
.on("error", reject);
});
await extractPromise;
console.log("Data extraction completed");The Extract stream automatically creates the destination directory and any necessary parent directories.
// Automatically creates nested directory structure
fs.createReadStream("project.zip")
.pipe(unzipper.Extract({ path: "projects/new-project/files" }));Configure the number of files extracted simultaneously to balance speed and resource usage.
// Low concurrency for resource-constrained environments
unzipper.Extract({ path: "output", concurrency: 1 })
// High concurrency for fast extraction (default behavior)
unzipper.Extract({ path: "output", concurrency: 10 })
// Balanced concurrency
unzipper.Extract({ path: "output", concurrency: 3 })Standard transform stream events for monitoring extraction progress.
const extractor = fs.createReadStream("archive.zip")
.pipe(unzipper.Extract({ path: "output" }));
// Extraction completed successfully
extractor.on('close', () => {
console.log('All files extracted');
});
// Transform stream finished processing
extractor.on('finish', () => {
console.log('Stream processing finished');
});
// Error occurred during extraction
extractor.on('error', (error: Error) => {
console.error('Extraction failed:', error);
});const path = require("path");
const fs = require("fs");
const extractWithValidation = (zipPath, outputPath) => {
return new Promise((resolve, reject) => {
// Ensure output directory exists
if (!fs.existsSync(outputPath)) {
fs.mkdirSync(outputPath, { recursive: true });
}
const startTime = Date.now();
let fileCount = 0;
fs.createReadStream(zipPath)
.pipe(unzipper.Extract({ path: outputPath }))
.on("close", () => {
const duration = Date.now() - startTime;
console.log(`Extracted ${fileCount} files in ${duration}ms`);
resolve({ fileCount, duration });
})
.on("error", (error) => {
console.error(`Extraction failed: ${error.message}`);
reject(error);
});
// Count files during extraction (approximate)
fs.createReadStream(zipPath)
.pipe(new unzipper.Parse())
.on("entry", (entry) => {
if (entry.type === "File") {
fileCount++;
}
entry.autodrain();
});
});
};
// Usage
try {
const result = await extractWithValidation("data.zip", "extracted");
console.log(`Successfully extracted ${result.fileCount} files`);
} catch (error) {
console.error("Extraction failed:", error);
}const extractWithProgress = (zipPath, outputPath) => {
return new Promise((resolve, reject) => {
let extractedFiles = 0;
let totalFiles = 0;
// First pass: count total files
fs.createReadStream(zipPath)
.pipe(new unzipper.Parse())
.on("entry", (entry) => {
if (entry.type === "File") {
totalFiles++;
}
entry.autodrain();
})
.on("finish", () => {
console.log(`Starting extraction of ${totalFiles} files...`);
// Second pass: extract with progress
fs.createReadStream(zipPath)
.pipe(unzipper.Extract({ path: outputPath }))
.on("close", () => {
console.log(`\nExtraction completed: ${extractedFiles}/${totalFiles} files`);
resolve({ extractedFiles, totalFiles });
})
.on("error", reject);
// Monitor extraction progress (approximate)
const progressInterval = setInterval(() => {
extractedFiles++;
const percent = Math.round((extractedFiles / totalFiles) * 100);
process.stdout.write(`\rProgress: ${percent}% (${extractedFiles}/${totalFiles})`);
if (extractedFiles >= totalFiles) {
clearInterval(progressInterval);
}
}, 100);
})
.on("error", reject);
});
};// Extract only specific file types
const extractFiltered = (zipPath, outputPath, allowedExtensions) => {
return new Promise((resolve, reject) => {
const tempPath = `${outputPath}-temp`;
// First extract everything to temp directory
fs.createReadStream(zipPath)
.pipe(unzipper.Extract({ path: tempPath }))
.on("close", () => {
// Then filter and move desired files
const moveFilteredFiles = async () => {
const glob = require("glob");
if (!fs.existsSync(outputPath)) {
fs.mkdirSync(outputPath, { recursive: true });
}
for (const ext of allowedExtensions) {
const pattern = `${tempPath}/**/*.${ext}`;
const files = glob.sync(pattern);
for (const file of files) {
const relativePath = path.relative(tempPath, file);
const destPath = path.join(outputPath, relativePath);
const destDir = path.dirname(destPath);
if (!fs.existsSync(destDir)) {
fs.mkdirSync(destDir, { recursive: true });
}
fs.renameSync(file, destPath);
}
}
// Clean up temp directory
fs.rmSync(tempPath, { recursive: true });
resolve();
};
moveFilteredFiles().catch(reject);
})
.on("error", reject);
});
};
// Usage: extract only JavaScript and JSON files
await extractFiltered("project.zip", "src", ["js", "json", "ts"]);const zlib = require("zlib");
// Extract compressed ZIP file
fs.createReadStream("archive.zip.gz")
.pipe(zlib.createGunzip())
.pipe(unzipper.Extract({ path: "output" }))
.on("close", () => {
console.log("Compressed archive extracted");
});
// Extract from HTTP response
const https = require("https");
const downloadAndExtract = (url, outputPath) => {
return new Promise((resolve, reject) => {
https.get(url, (response) => {
response
.pipe(unzipper.Extract({ path: outputPath }))
.on("close", resolve)
.on("error", reject);
}).on("error", reject);
});
};
await downloadAndExtract("https://example.com/archive.zip", "downloads");Install with Tessl CLI
npx tessl i tessl/npm-unzipper