0
# Stream Operations
1
2
Low-level stream creation for direct read/write access to remote files with full client control over stream lifecycle and event handling.
3
4
## Capabilities
5
6
### Create Read Stream
7
8
Creates a readable stream connected to a remote file for streaming data from the server.
9
10
```javascript { .api }
11
/**
12
* Create a read stream connected to remote file
13
* @param remotePath - Path to remote file
14
* @param options - Stream configuration options
15
* @returns Readable stream object
16
*/
17
createReadStream(remotePath, options): Object;
18
```
19
20
**Usage Examples:**
21
22
```javascript
23
// Basic read stream
24
const readStream = sftp.createReadStream('/remote/large-file.txt');
25
26
readStream.on('data', (chunk) => {
27
console.log(`Received ${chunk.length} bytes`);
28
});
29
30
readStream.on('end', () => {
31
console.log('Read stream ended');
32
});
33
34
readStream.on('error', (err) => {
35
console.error('Read stream error:', err);
36
});
37
38
// Read stream with options
39
const readStream = sftp.createReadStream('/remote/data.bin', {
40
start: 1024, // Start reading from byte 1024
41
end: 2048, // Stop reading at byte 2048
42
highWaterMark: 64 * 1024 // 64KB buffer size
43
});
44
45
// Pipe to local file
46
const fs = require('fs');
47
const writeStream = fs.createWriteStream('/local/output.bin');
48
readStream.pipe(writeStream);
49
50
// Transform data while streaming
51
const { Transform } = require('stream');
52
const transformer = new Transform({
53
transform(chunk, encoding, callback) {
54
// Convert to uppercase
55
const transformed = chunk.toString().toUpperCase();
56
callback(null, transformed);
57
}
58
});
59
60
sftp.createReadStream('/remote/text.txt')
61
.pipe(transformer)
62
.pipe(fs.createWriteStream('/local/uppercase.txt'));
63
```
64
65
### Create Write Stream
66
67
Creates a writable stream connected to a remote file for streaming data to the server.
68
69
```javascript { .api }
70
/**
71
* Create a write stream connected to remote file
72
* @param remotePath - Path to remote file
73
* @param options - Stream configuration options
74
* @returns Writable stream object
75
*/
76
createWriteStream(remotePath, options): Object;
77
```
78
79
**Usage Examples:**
80
81
```javascript
82
// Basic write stream
83
const writeStream = sftp.createWriteStream('/remote/output.txt');
84
85
writeStream.write('Hello, ');
86
writeStream.write('world!');
87
writeStream.end();
88
89
writeStream.on('close', () => {
90
console.log('Write stream closed');
91
});
92
93
writeStream.on('error', (err) => {
94
console.error('Write stream error:', err);
95
});
96
97
// Write stream with options
98
const writeStream = sftp.createWriteStream('/remote/data.log', {
99
flags: 'a', // Append mode
100
mode: 0o644, // File permissions
101
encoding: 'utf8' // Text encoding
102
});
103
104
// Stream from local file to remote
105
const fs = require('fs');
106
const readStream = fs.createReadStream('/local/input.txt');
107
const writeStream = sftp.createWriteStream('/remote/output.txt');
108
109
readStream.pipe(writeStream);
110
111
// Handle completion
112
writeStream.on('close', () => {
113
console.log('File uploaded via stream');
114
});
115
```
116
117
## Stream Options
118
119
```javascript { .api }
120
interface StreamOptions {
121
flags?: string; // File open flags
122
encoding?: string; // Character encoding
123
mode?: number; // File permissions (write streams)
124
autoClose?: boolean; // Auto close stream when done
125
start?: number; // Start position (read streams)
126
end?: number; // End position (read streams)
127
highWaterMark?: number; // Buffer size in bytes
128
}
129
```
130
131
### Common Flag Values
132
133
**Read Stream Flags:**
134
- `'r'`: Read only (default)
135
- `'r+'`: Read and write
136
137
**Write Stream Flags:**
138
- `'w'`: Write only, truncate if exists (default)
139
- `'w+'`: Read and write, truncate if exists
140
- `'a'`: Append only
141
- `'a+'`: Read and append
142
143
## Advanced Stream Usage
144
145
### Memory-Efficient File Processing
146
147
```javascript
148
// Process large files without loading into memory
149
const { Transform } = require('stream');
150
const { pipeline } = require('stream/promises');
151
152
// Custom transform stream for data processing
153
class DataProcessor extends Transform {
154
_transform(chunk, encoding, callback) {
155
// Process chunk of data
156
const processed = this.processData(chunk);
157
callback(null, processed);
158
}
159
160
processData(chunk) {
161
// Your data processing logic here
162
return chunk.toString().replace(/old/g, 'new');
163
}
164
}
165
166
// Memory-efficient pipeline
167
try {
168
await pipeline(
169
sftp.createReadStream('/remote/huge-file.txt'),
170
new DataProcessor(),
171
sftp.createWriteStream('/remote/processed-file.txt')
172
);
173
console.log('Large file processed successfully');
174
} catch (err) {
175
console.error('Pipeline failed:', err);
176
}
177
```
178
179
### Concurrent Stream Operations
180
181
```javascript
182
// Multiple streams for parallel processing
183
const streams = [];
184
const files = ['/remote/file1.txt', '/remote/file2.txt', '/remote/file3.txt'];
185
186
for (const file of files) {
187
const readStream = sftp.createReadStream(file);
188
const writeStream = fs.createWriteStream(`/local/${path.basename(file)}`);
189
190
streams.push(new Promise((resolve, reject) => {
191
readStream.pipe(writeStream);
192
writeStream.on('close', resolve);
193
writeStream.on('error', reject);
194
readStream.on('error', reject);
195
}));
196
}
197
198
// Wait for all downloads to complete
199
await Promise.all(streams);
200
console.log('All files downloaded');
201
```
202
203
### Stream Progress Monitoring
204
205
```javascript
206
// Monitor stream progress
207
function createProgressReadStream(remotePath, totalSize) {
208
const readStream = sftp.createReadStream(remotePath);
209
let bytesRead = 0;
210
211
readStream.on('data', (chunk) => {
212
bytesRead += chunk.length;
213
const progress = ((bytesRead / totalSize) * 100).toFixed(2);
214
console.log(`Progress: ${progress}% (${bytesRead}/${totalSize} bytes)`);
215
});
216
217
return readStream;
218
}
219
220
// Get file size first
221
const stats = await sftp.stat('/remote/large-file.zip');
222
const readStream = createProgressReadStream('/remote/large-file.zip', stats.size);
223
const writeStream = fs.createWriteStream('/local/large-file.zip');
224
225
readStream.pipe(writeStream);
226
```
227
228
### Error Handling with Streams
229
230
```javascript
231
// Comprehensive error handling
232
function createRobustStream(remotePath, localPath) {
233
return new Promise((resolve, reject) => {
234
const readStream = sftp.createReadStream(remotePath);
235
const writeStream = fs.createWriteStream(localPath);
236
237
let hasError = false;
238
239
const cleanup = () => {
240
if (!readStream.destroyed) readStream.destroy();
241
if (!writeStream.destroyed) writeStream.destroy();
242
};
243
244
const handleError = (err) => {
245
if (hasError) return; // Prevent multiple error handling
246
hasError = true;
247
cleanup();
248
reject(err);
249
};
250
251
readStream.on('error', handleError);
252
writeStream.on('error', handleError);
253
254
writeStream.on('close', () => {
255
if (!hasError) {
256
resolve(localPath);
257
}
258
});
259
260
readStream.pipe(writeStream);
261
});
262
}
263
264
// Usage with proper error handling
265
try {
266
await createRobustStream('/remote/file.txt', '/local/file.txt');
267
console.log('File transferred successfully');
268
} catch (err) {
269
console.error('Transfer failed:', err.message);
270
}
271
```
272
273
### Stream Encryption/Decryption
274
275
```javascript
276
// Encrypt data while streaming
277
const crypto = require('crypto');
278
279
function createEncryptStream(password) {
280
const algorithm = 'aes-256-ctr';
281
const key = crypto.scryptSync(password, 'salt', 32);
282
const iv = crypto.randomBytes(16);
283
284
const cipher = crypto.createCipher(algorithm, key, iv);
285
return cipher;
286
}
287
288
// Encrypt while uploading
289
const encryptStream = createEncryptStream('mypassword');
290
const readStream = fs.createReadStream('/local/sensitive.txt');
291
const writeStream = sftp.createWriteStream('/remote/encrypted.txt');
292
293
readStream
294
.pipe(encryptStream)
295
.pipe(writeStream);
296
```
297
298
## Stream Lifecycle Management
299
300
### Proper Resource Cleanup
301
302
```javascript
303
// Always clean up streams properly
304
class StreamManager {
305
constructor() {
306
this.activeStreams = new Set();
307
}
308
309
createReadStream(remotePath, options) {
310
const stream = sftp.createReadStream(remotePath, options);
311
this.activeStreams.add(stream);
312
313
stream.on('close', () => {
314
this.activeStreams.delete(stream);
315
});
316
317
return stream;
318
}
319
320
createWriteStream(remotePath, options) {
321
const stream = sftp.createWriteStream(remotePath, options);
322
this.activeStreams.add(stream);
323
324
stream.on('close', () => {
325
this.activeStreams.delete(stream);
326
});
327
328
return stream;
329
}
330
331
cleanup() {
332
for (const stream of this.activeStreams) {
333
if (!stream.destroyed) {
334
stream.destroy();
335
}
336
}
337
this.activeStreams.clear();
338
}
339
}
340
341
// Usage
342
const streamManager = new StreamManager();
343
344
// Create streams through manager
345
const readStream = streamManager.createReadStream('/remote/file.txt');
346
const writeStream = streamManager.createWriteStream('/remote/output.txt');
347
348
// Cleanup on exit
349
process.on('exit', () => {
350
streamManager.cleanup();
351
});
352
```
353
354
## Performance Considerations
355
356
- **Buffer Size**: Adjust `highWaterMark` based on network conditions and file sizes
357
- **Memory Usage**: Streams are memory-efficient for large files compared to loading entire files
358
- **Error Handling**: Always handle stream errors to prevent memory leaks
359
- **Resource Cleanup**: Destroy streams when done to free resources
360
- **Concurrent Streams**: Limit concurrent streams to avoid overwhelming the server
361
362
## Error Handling
363
364
Stream operation errors include:
365
366
- `ENOENT`: Remote file does not exist (read streams)
367
- `EACCES`: Permission denied
368
- `EMFILE`: Too many open files
369
- Network errors during stream operations
370
- Stream-specific errors (encoding, truncation, etc.)