Promise-based SFTP client for Node.js that wraps the ssh2 module
—
Low-level stream creation for direct read/write access to remote files with full client control over stream lifecycle and event handling.
Creates a readable stream connected to a remote file for streaming data from the server.
/**
* Create a read stream connected to remote file
* @param remotePath - Path to remote file
* @param options - Stream configuration options
* @returns Readable stream object
*/
createReadStream(remotePath, options): Object;Usage Examples:
// Basic read stream
const readStream = sftp.createReadStream('/remote/large-file.txt');
readStream.on('data', (chunk) => {
console.log(`Received ${chunk.length} bytes`);
});
readStream.on('end', () => {
console.log('Read stream ended');
});
readStream.on('error', (err) => {
console.error('Read stream error:', err);
});
// Read stream with options
const readStream = sftp.createReadStream('/remote/data.bin', {
start: 1024, // Start reading from byte 1024
end: 2048, // Stop reading at byte 2048
highWaterMark: 64 * 1024 // 64KB buffer size
});
// Pipe to local file
const fs = require('fs');
const writeStream = fs.createWriteStream('/local/output.bin');
readStream.pipe(writeStream);
// Transform data while streaming
const { Transform } = require('stream');
const transformer = new Transform({
transform(chunk, encoding, callback) {
// Convert to uppercase
const transformed = chunk.toString().toUpperCase();
callback(null, transformed);
}
});
sftp.createReadStream('/remote/text.txt')
.pipe(transformer)
.pipe(fs.createWriteStream('/local/uppercase.txt'));Creates a writable stream connected to a remote file for streaming data to the server.
/**
* Create a write stream connected to remote file
* @param remotePath - Path to remote file
* @param options - Stream configuration options
* @returns Writable stream object
*/
createWriteStream(remotePath, options): Object;Usage Examples:
// Basic write stream
const writeStream = sftp.createWriteStream('/remote/output.txt');
writeStream.write('Hello, ');
writeStream.write('world!');
writeStream.end();
writeStream.on('close', () => {
console.log('Write stream closed');
});
writeStream.on('error', (err) => {
console.error('Write stream error:', err);
});
// Write stream with options
const writeStream = sftp.createWriteStream('/remote/data.log', {
flags: 'a', // Append mode
mode: 0o644, // File permissions
encoding: 'utf8' // Text encoding
});
// Stream from local file to remote
const fs = require('fs');
const readStream = fs.createReadStream('/local/input.txt');
const writeStream = sftp.createWriteStream('/remote/output.txt');
readStream.pipe(writeStream);
// Handle completion
writeStream.on('close', () => {
console.log('File uploaded via stream');
});interface StreamOptions {
flags?: string; // File open flags
encoding?: string; // Character encoding
mode?: number; // File permissions (write streams)
autoClose?: boolean; // Auto close stream when done
start?: number; // Start position (read streams)
end?: number; // End position (read streams)
highWaterMark?: number; // Buffer size in bytes
}Read Stream Flags:
'r': Read only (default)'r+': Read and writeWrite Stream Flags:
'w': Write only, truncate if exists (default)'w+': Read and write, truncate if exists'a': Append only'a+': Read and append// Process large files without loading into memory
const { Transform } = require('stream');
const { pipeline } = require('stream/promises');
// Custom transform stream for data processing
class DataProcessor extends Transform {
_transform(chunk, encoding, callback) {
// Process chunk of data
const processed = this.processData(chunk);
callback(null, processed);
}
processData(chunk) {
// Your data processing logic here
return chunk.toString().replace(/old/g, 'new');
}
}
// Memory-efficient pipeline
try {
await pipeline(
sftp.createReadStream('/remote/huge-file.txt'),
new DataProcessor(),
sftp.createWriteStream('/remote/processed-file.txt')
);
console.log('Large file processed successfully');
} catch (err) {
console.error('Pipeline failed:', err);
}// Multiple streams for parallel processing
const streams = [];
const files = ['/remote/file1.txt', '/remote/file2.txt', '/remote/file3.txt'];
for (const file of files) {
const readStream = sftp.createReadStream(file);
const writeStream = fs.createWriteStream(`/local/${path.basename(file)}`);
streams.push(new Promise((resolve, reject) => {
readStream.pipe(writeStream);
writeStream.on('close', resolve);
writeStream.on('error', reject);
readStream.on('error', reject);
}));
}
// Wait for all downloads to complete
await Promise.all(streams);
console.log('All files downloaded');// Monitor stream progress
function createProgressReadStream(remotePath, totalSize) {
const readStream = sftp.createReadStream(remotePath);
let bytesRead = 0;
readStream.on('data', (chunk) => {
bytesRead += chunk.length;
const progress = ((bytesRead / totalSize) * 100).toFixed(2);
console.log(`Progress: ${progress}% (${bytesRead}/${totalSize} bytes)`);
});
return readStream;
}
// Get file size first
const stats = await sftp.stat('/remote/large-file.zip');
const readStream = createProgressReadStream('/remote/large-file.zip', stats.size);
const writeStream = fs.createWriteStream('/local/large-file.zip');
readStream.pipe(writeStream);// Comprehensive error handling
function createRobustStream(remotePath, localPath) {
return new Promise((resolve, reject) => {
const readStream = sftp.createReadStream(remotePath);
const writeStream = fs.createWriteStream(localPath);
let hasError = false;
const cleanup = () => {
if (!readStream.destroyed) readStream.destroy();
if (!writeStream.destroyed) writeStream.destroy();
};
const handleError = (err) => {
if (hasError) return; // Prevent multiple error handling
hasError = true;
cleanup();
reject(err);
};
readStream.on('error', handleError);
writeStream.on('error', handleError);
writeStream.on('close', () => {
if (!hasError) {
resolve(localPath);
}
});
readStream.pipe(writeStream);
});
}
// Usage with proper error handling
try {
await createRobustStream('/remote/file.txt', '/local/file.txt');
console.log('File transferred successfully');
} catch (err) {
console.error('Transfer failed:', err.message);
}// Encrypt data while streaming
const crypto = require('crypto');
function createEncryptStream(password) {
const algorithm = 'aes-256-ctr';
const key = crypto.scryptSync(password, 'salt', 32);
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipher(algorithm, key, iv);
return cipher;
}
// Encrypt while uploading
const encryptStream = createEncryptStream('mypassword');
const readStream = fs.createReadStream('/local/sensitive.txt');
const writeStream = sftp.createWriteStream('/remote/encrypted.txt');
readStream
.pipe(encryptStream)
.pipe(writeStream);// Always clean up streams properly
class StreamManager {
constructor() {
this.activeStreams = new Set();
}
createReadStream(remotePath, options) {
const stream = sftp.createReadStream(remotePath, options);
this.activeStreams.add(stream);
stream.on('close', () => {
this.activeStreams.delete(stream);
});
return stream;
}
createWriteStream(remotePath, options) {
const stream = sftp.createWriteStream(remotePath, options);
this.activeStreams.add(stream);
stream.on('close', () => {
this.activeStreams.delete(stream);
});
return stream;
}
cleanup() {
for (const stream of this.activeStreams) {
if (!stream.destroyed) {
stream.destroy();
}
}
this.activeStreams.clear();
}
}
// Usage
const streamManager = new StreamManager();
// Create streams through manager
const readStream = streamManager.createReadStream('/remote/file.txt');
const writeStream = streamManager.createWriteStream('/remote/output.txt');
// Cleanup on exit
process.on('exit', () => {
streamManager.cleanup();
});highWaterMark based on network conditions and file sizesStream operation errors include:
ENOENT: Remote file does not exist (read streams)EACCES: Permission deniedEMFILE: Too many open filesInstall with Tessl CLI
npx tessl i tessl/npm-ssh2-sftp-client