0
# Streaming API
1
2
Stream-based ZIP operations for efficient processing of large files without loading entire archives into memory.
3
4
## ZipReaderStream Class
5
6
Streaming ZIP reader that processes entries as they're encountered.
7
8
```typescript { .api }
9
class ZipReaderStream<T> {
10
constructor(options?: ZipReaderConstructorOptions);
11
12
readonly readable: ReadableStream<Omit<Entry, "getData"> & { readable?: ReadableStream<Uint8Array> }>;
13
readonly writable: WritableStream<T>;
14
}
15
```
16
17
### Properties
18
19
- **`readable`**: Stream of entry metadata with optional data streams
20
- **`writable`**: Input stream for ZIP data
21
22
### Usage
23
24
The ZipReaderStream creates a transform stream that reads ZIP data and outputs entry information.
25
26
```javascript
27
import { ZipReaderStream } from "@zip.js/zip.js";
28
29
// Create stream
30
const zipReaderStream = new ZipReaderStream();
31
32
// Pipe ZIP data into the stream
33
fetch('large-archive.zip')
34
.then(response => response.body.pipeTo(zipReaderStream.writable));
35
36
// Process entries as they're discovered
37
const reader = zipReaderStream.readable.getReader();
38
try {
39
while (true) {
40
const { done, value: entry } = await reader.read();
41
if (done) break;
42
43
console.log(`Found entry: ${entry.filename}`);
44
45
if (!entry.directory && entry.readable) {
46
// Stream the entry content
47
const entryReader = entry.readable.getReader();
48
try {
49
while (true) {
50
const { done, value: chunk } = await entryReader.read();
51
if (done) break;
52
// Process chunk of file data
53
console.log(`Read ${chunk.length} bytes from ${entry.filename}`);
54
}
55
} finally {
56
entryReader.releaseLock();
57
}
58
}
59
}
60
} finally {
61
reader.releaseLock();
62
}
63
```
64
65
## ZipWriterStream Class
66
67
Streaming ZIP writer for creating ZIP files incrementally.
68
69
```typescript { .api }
70
class ZipWriterStream {
71
constructor(options?: ZipWriterConstructorOptions);
72
73
readonly readable: ReadableStream<Uint8Array>;
74
readonly zipWriter: ZipWriter<unknown>;
75
76
transform<T>(path: string): { readable: ReadableStream<T>; writable: WritableStream<T> };
77
writable<T>(path: string): WritableStream<T>;
78
close(comment?: Uint8Array, options?: ZipWriterCloseOptions): Promise<unknown>;
79
}
80
```
81
82
### Properties
83
84
- **`readable`**: Stream of ZIP file data
85
- **`zipWriter`**: Underlying ZipWriter instance
86
87
### Methods
88
89
#### transform()
90
91
Creates a transform stream for a specific entry.
92
93
```javascript
94
const zipWriterStream = new ZipWriterStream();
95
96
// Create transform stream for an entry
97
const { readable, writable } = zipWriterStream.transform("data.txt");
98
99
// Pipe data through the transform
100
fetch('source-data.txt')
101
.then(response => response.body.pipeTo(writable));
102
103
// The readable stream contains the processed data for the ZIP entry
104
```
105
106
#### writable()
107
108
Creates a writable stream for a specific entry.
109
110
```javascript
111
const zipWriterStream = new ZipWriterStream();
112
113
// Create writable stream for an entry
114
const entryWritable = zipWriterStream.writable("log-file.txt");
115
116
// Write data to the entry
117
const writer = entryWritable.getWriter();
118
await writer.write(new TextEncoder().encode("Log entry 1\n"));
119
await writer.write(new TextEncoder().encode("Log entry 2\n"));
120
await writer.close();
121
```
122
123
#### close()
124
125
Finalizes the ZIP stream and returns the complete ZIP data.
126
127
```javascript
128
// After adding all entries
129
const zipData = await zipWriterStream.close();
130
```
131
132
## Streaming Examples
133
134
### Extract Large ZIP Without Memory Loading
135
136
```javascript
137
import { ZipReaderStream } from "@zip.js/zip.js";
138
139
async function extractLargeZip(zipBlob) {
140
const zipReaderStream = new ZipReaderStream();
141
142
// Start reading the ZIP
143
zipBlob.stream().pipeTo(zipReaderStream.writable);
144
145
// Process entries as they're found
146
for await (const entry of zipReaderStream.readable) {
147
console.log(`Processing: ${entry.filename} (${entry.uncompressedSize} bytes)`);
148
149
if (!entry.directory && entry.readable) {
150
// Stream each entry to a file or process incrementally
151
await processEntryStream(entry.filename, entry.readable);
152
}
153
}
154
}
155
156
async function processEntryStream(filename, readable) {
157
const reader = readable.getReader();
158
let totalBytes = 0;
159
160
try {
161
while (true) {
162
const { done, value } = await reader.read();
163
if (done) break;
164
165
// Process chunk without loading entire file
166
totalBytes += value.length;
167
168
// Example: Save chunk to disk, upload to server, etc.
169
await saveChunkToFile(filename, value);
170
}
171
} finally {
172
reader.releaseLock();
173
}
174
175
console.log(`Completed ${filename}: ${totalBytes} bytes processed`);
176
}
177
```
178
179
### Create ZIP from Streaming Data
180
181
```javascript
182
import { ZipWriterStream } from "@zip.js/zip.js";
183
184
async function createZipFromStreams() {
185
const zipWriterStream = new ZipWriterStream({
186
level: 6,
187
keepOrder: true
188
});
189
190
// Pipe the ZIP output to a destination
191
const zipDestination = new WritableStream({
192
write(chunk) {
193
// Send chunk to server, save to file, etc.
194
console.log(`ZIP chunk: ${chunk.length} bytes`);
195
}
196
});
197
198
zipWriterStream.readable.pipeTo(zipDestination);
199
200
// Add multiple entries from streams
201
await addStreamingEntry(zipWriterStream, "log1.txt", logStream1);
202
await addStreamingEntry(zipWriterStream, "log2.txt", logStream2);
203
await addStreamingEntry(zipWriterStream, "data.json", jsonStream);
204
205
// Finalize ZIP
206
await zipWriterStream.close();
207
}
208
209
async function addStreamingEntry(zipWriterStream, filename, sourceStream) {
210
const entryWritable = zipWriterStream.writable(filename);
211
await sourceStream.pipeTo(entryWritable);
212
}
213
```
214
215
### Real-time ZIP Creation
216
217
```javascript
218
import { ZipWriterStream } from "@zip.js/zip.js";
219
220
class RealTimeZipCreator {
221
constructor() {
222
this.zipWriterStream = new ZipWriterStream();
223
this.setupZipOutput();
224
}
225
226
setupZipOutput() {
227
// Stream ZIP data as it's created
228
this.zipWriterStream.readable.pipeTo(new WritableStream({
229
write: (chunk) => this.sendZipChunk(chunk),
230
close: () => this.finalizeZip()
231
}));
232
}
233
234
async addFileFromStream(filename, readableStream) {
235
const entryWritable = this.zipWriterStream.writable(filename);
236
await readableStream.pipeTo(entryWritable);
237
}
238
239
async addTextEntry(filename, text) {
240
const entryWritable = this.zipWriterStream.writable(filename);
241
const writer = entryWritable.getWriter();
242
await writer.write(new TextEncoder().encode(text));
243
await writer.close();
244
}
245
246
async finalize() {
247
return await this.zipWriterStream.close();
248
}
249
250
sendZipChunk(chunk) {
251
// Send to client via WebSocket, HTTP stream, etc.
252
console.log(`Sending ZIP chunk: ${chunk.length} bytes`);
253
}
254
255
finalizeZip() {
256
console.log("ZIP creation completed");
257
}
258
}
259
260
// Usage
261
const creator = new RealTimeZipCreator();
262
await creator.addTextEntry("readme.txt", "Welcome!");
263
await creator.addFileFromStream("data.csv", csvInputStream);
264
await creator.finalize();
265
```
266
267
### Transform Pipeline
268
269
```javascript
270
import { ZipWriterStream } from "@zip.js/zip.js";
271
272
async function createTransformPipeline() {
273
const zipWriterStream = new ZipWriterStream();
274
275
// Transform data while adding to ZIP
276
const { readable, writable } = zipWriterStream.transform("processed-data.txt");
277
278
// Create processing pipeline
279
const processedStream = new TransformStream({
280
transform(chunk, controller) {
281
// Process each chunk (e.g., uppercase text)
282
const processed = new TextEncoder().encode(
283
new TextDecoder().decode(chunk).toUpperCase()
284
);
285
controller.enqueue(processed);
286
}
287
});
288
289
// Pipeline: source → processing → ZIP entry
290
await fetch('source-data.txt')
291
.then(response => response.body)
292
.then(stream => stream.pipeThrough(processedStream))
293
.then(stream => stream.pipeTo(writable));
294
295
// Get final ZIP
296
const zipData = await zipWriterStream.close();
297
return zipData;
298
}
299
```
300
301
### Concurrent Entry Processing
302
303
```javascript
304
import { ZipWriterStream } from "@zip.js/zip.js";
305
306
async function createZipConcurrently() {
307
const zipWriterStream = new ZipWriterStream({
308
keepOrder: false // Allow concurrent processing
309
});
310
311
// Process multiple entries concurrently
312
const entryPromises = [
313
addLargeFile(zipWriterStream, "file1.dat", source1),
314
addLargeFile(zipWriterStream, "file2.dat", source2),
315
addLargeFile(zipWriterStream, "file3.dat", source3)
316
];
317
318
// Wait for all entries to be added
319
await Promise.all(entryPromises);
320
321
// Finalize ZIP
322
return await zipWriterStream.close();
323
}
324
325
async function addLargeFile(zipWriterStream, filename, sourceStream) {
326
const entryWritable = zipWriterStream.writable(filename);
327
328
// Add progress tracking
329
let bytesProcessed = 0;
330
const progressStream = new TransformStream({
331
transform(chunk, controller) {
332
bytesProcessed += chunk.length;
333
console.log(`${filename}: ${bytesProcessed} bytes processed`);
334
controller.enqueue(chunk);
335
}
336
});
337
338
await sourceStream
339
.pipeThrough(progressStream)
340
.pipeTo(entryWritable);
341
}
342
```
343
344
### Memory Efficient Large File Processing
345
346
```javascript
347
async function processLargeZipMemoryEfficient(zipUrl) {
348
const response = await fetch(zipUrl);
349
const zipReaderStream = new ZipReaderStream();
350
351
// Process ZIP without loading into memory
352
response.body.pipeTo(zipReaderStream.writable);
353
354
const results = [];
355
356
for await (const entry of zipReaderStream.readable) {
357
if (entry.filename.endsWith('.log') && entry.readable) {
358
// Process log files line by line
359
const lines = await processLogStream(entry.readable);
360
results.push({
361
filename: entry.filename,
362
lineCount: lines
363
});
364
} else if (entry.filename.endsWith('.json') && entry.readable) {
365
// Parse JSON incrementally
366
const data = await parseJsonStream(entry.readable);
367
results.push({
368
filename: entry.filename,
369
records: data.length
370
});
371
}
372
}
373
374
return results;
375
}
376
377
async function processLogStream(readable) {
378
const reader = readable.getReader();
379
const decoder = new TextDecoder();
380
let buffer = '';
381
let lineCount = 0;
382
383
try {
384
while (true) {
385
const { done, value } = await reader.read();
386
if (done) break;
387
388
buffer += decoder.decode(value, { stream: true });
389
390
// Process complete lines
391
const lines = buffer.split('\n');
392
buffer = lines.pop(); // Keep incomplete line
393
394
lineCount += lines.length;
395
396
// Process lines without storing them
397
for (const line of lines) {
398
if (line.includes('ERROR')) {
399
console.log(`Error found in line ${lineCount}: ${line}`);
400
}
401
}
402
}
403
} finally {
404
reader.releaseLock();
405
}
406
407
return lineCount;
408
}
409
410
async function parseJsonStream(readable) {
411
// Implement streaming JSON parser for large JSON files
412
// This is a simplified example
413
const reader = readable.getReader();
414
const decoder = new TextDecoder();
415
let jsonString = '';
416
417
try {
418
while (true) {
419
const { done, value } = await reader.read();
420
if (done) break;
421
jsonString += decoder.decode(value, { stream: true });
422
}
423
} finally {
424
reader.releaseLock();
425
}
426
427
return JSON.parse(jsonString);
428
}
429
```
430
431
## Performance Considerations
432
433
### Stream Buffer Management
434
435
```javascript
436
// Configure for optimal streaming performance
437
import { configure } from "@zip.js/zip.js";
438
439
configure({
440
chunkSize: 1024 * 64, // 64KB chunks for streaming
441
maxWorkers: 4, // Limit workers for memory control
442
useCompressionStream: true, // Use native streams when available
443
terminateWorkerTimeout: 2000 // Quick cleanup
444
});
445
```
446
447
### Backpressure Handling
448
449
```javascript
450
async function handleBackpressure(sourceStream, zipWriterStream, filename) {
451
const entryWritable = zipWriterStream.writable(filename);
452
const writer = entryWritable.getWriter();
453
const reader = sourceStream.getReader();
454
455
try {
456
while (true) {
457
const { done, value } = await reader.read();
458
if (done) break;
459
460
// Wait for writer to be ready (handles backpressure)
461
await writer.ready;
462
await writer.write(value);
463
}
464
} finally {
465
reader.releaseLock();
466
await writer.close();
467
}
468
}
469
```
470
471
The streaming API is particularly powerful for:
472
- Processing large ZIP files without memory constraints
473
- Creating ZIP files from real-time data sources
474
- Building data processing pipelines
475
- Handling network streams efficiently
476
- Creating responsive applications that don't block during ZIP operations