CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/npm-metro-memory-fs

A memory-based implementation of Node.js fs module for testing purposes

Overall
score

96%

Overview
Eval results
Files

streams.mddocs/

Streams

Readable and writable stream implementations for efficient handling of large files with position control, encoding options, and event handling. Provides Node.js compatible stream interfaces for memory filesystem operations.

Capabilities

Readable Streams

Create readable streams for efficient reading of large files with precise control over reading position and encoding.

/**
 * Create a readable stream from a file
 * @param path - File path to read from
 * @param options - Stream options including encoding, position control, and file descriptor
 * @returns Readable stream instance
 */
createReadStream(path: string | Buffer, options?: ReadStreamOptions): ReadStream;
createReadStream(path: string | Buffer, encoding: string): ReadStream;

interface ReadStreamOptions {
  /** File open flags */
  flags?: string;
  /** Text encoding for string output */
  encoding?: string;
  /** Use existing file descriptor instead of opening file */
  fd?: number;
  /** File mode for newly created files */
  mode?: number;
  /** Auto-close file descriptor when stream ends */
  autoClose?: boolean;
  /** Starting byte position in file */
  start?: number;
  /** Ending byte position in file (inclusive) */
  end?: number;
  /** Stream buffer size */
  highWaterMark?: number;
}

interface ReadStream extends NodeJS.ReadableStream {
  /** File path being read */
  path: string | Buffer;
  /** Number of bytes read so far */
  bytesRead: number;
  /** File descriptor (if opened) */
  fd?: number;
  
  // Event emitters
  on(event: 'open', listener: (fd: number) => void): this;
  on(event: 'close', listener: () => void): this;
  on(event: 'data', listener: (chunk: Buffer | string) => void): this;
  on(event: 'end', listener: () => void): this;
  on(event: 'error', listener: (err: Error) => void): this;
}

Usage Examples:

// Basic file reading stream
const readStream = fs.createReadStream('/large-file.txt');
readStream.on('data', (chunk) => {
  console.log(`Received ${chunk.length} bytes`);
});
readStream.on('end', () => {
  console.log('File reading completed');
});

// Read with encoding
const textStream = fs.createReadStream('/document.txt', 'utf8');
textStream.on('data', (chunk) => {
  console.log('Text chunk:', chunk); // String chunks
});

// Read specific file range
const rangeStream = fs.createReadStream('/data.bin', {
  start: 1000,      // Start at byte 1000
  end: 2000,        // End at byte 2000 (inclusive)
  encoding: 'hex'   // Output as hex string
});

// Use existing file descriptor
const fd = fs.openSync('/file.txt', 'r');
const fdStream = fs.createReadStream('/file.txt', { fd });
fdStream.on('open', (openedFd) => {
  console.log('Stream opened with fd:', openedFd);
});

// Stream with custom buffer size
const bufferedStream = fs.createReadStream('/big-file.txt', {
  highWaterMark: 64 * 1024, // 64KB buffer
  encoding: 'utf8'
});

// Handle stream events
const stream = fs.createReadStream('/log.txt');
stream.on('open', (fd) => {
  console.log('File opened, fd:', fd);
});
stream.on('close', () => {
  console.log('File closed');
});
stream.on('error', (err) => {
  console.error('Stream error:', err);
});

// Pipe to another stream
const readStream = fs.createReadStream('/input.txt');
const writeStream = fs.createWriteStream('/output.txt');
readStream.pipe(writeStream);

Writable Streams

Create writable streams for efficient writing to files with position control and buffering.

/**
 * Create a writable stream to a file
 * @param path - File path to write to
 * @param options - Stream options including encoding, position control, and file descriptor
 * @returns Writable stream instance
 */
createWriteStream(path: string | Buffer, options?: WriteStreamOptions): WriteStream;
createWriteStream(path: string | Buffer, encoding: string): WriteStream;

interface WriteStreamOptions {
  /** File open flags */
  flags?: string;
  /** Default text encoding for string writes */
  encoding?: string;
  /** Use existing file descriptor instead of opening file */
  fd?: number;
  /** File mode for newly created files */
  mode?: number;
  /** Auto-close file descriptor when stream finishes */
  autoClose?: boolean;
  /** Starting byte position in file */
  start?: number;
  /** Emit 'close' event when stream closes */
  emitClose?: boolean;
}

interface WriteStream extends NodeJS.WritableStream {
  /** File path being written to */
  path: string | Buffer;
  /** Number of bytes written so far */
  bytesWritten: number;
  /** File descriptor (if opened) */
  fd?: number;
  
  // Event emitters
  on(event: 'open', listener: (fd: number) => void): this;
  on(event: 'close', listener: () => void): this;
  on(event: 'finish', listener: () => void): this;
  on(event: 'error', listener: (err: Error) => void): this;
  
  // Write methods
  write(chunk: string | Buffer, encoding?: string): boolean;
  write(chunk: string | Buffer, callback?: (error?: Error) => void): boolean;
  write(chunk: string | Buffer, encoding?: string, callback?: (error?: Error) => void): boolean;
  end(): void;
  end(chunk: string | Buffer): void;
  end(chunk: string | Buffer, encoding?: string): void;
  end(chunk: string | Buffer, callback?: () => void): void;
  end(chunk: string | Buffer, encoding?: string, callback?: () => void): void;
}

Usage Examples:

// Basic file writing stream
const writeStream = fs.createWriteStream('/output.txt');
writeStream.write('Hello ');
writeStream.write('World!');
writeStream.end();

writeStream.on('finish', () => {
  console.log('Writing completed');
});

// Write with encoding
const textStream = fs.createWriteStream('/document.txt', 'utf8');
textStream.write('Unicode content: ');
textStream.write('Hello 世界');
textStream.end();

// Write to specific position
const positionStream = fs.createWriteStream('/patchable.txt', {
  flags: 'r+',    // Open for reading and writing
  start: 100      // Start writing at byte 100
});
positionStream.write('Inserted text');
positionStream.end();

// Use existing file descriptor
const fd = fs.openSync('/target.txt', 'w');
const fdStream = fs.createWriteStream('/target.txt', { 
  fd,
  emitClose: true 
});
fdStream.on('open', (openedFd) => {
  console.log('Stream opened with fd:', openedFd);
});

// Stream with callback handling
const callbackStream = fs.createWriteStream('/callback.txt');
callbackStream.write('First chunk', (err) => {
  if (!err) {
    console.log('First chunk written');
  }
});
callbackStream.write('Second chunk', 'utf8', (err) => {
  if (!err) {
    console.log('Second chunk written');
  }
});
callbackStream.end('Final chunk', () => {
  console.log('Stream ended');
});

// Handle stream events
const stream = fs.createWriteStream('/log.txt');
stream.on('open', (fd) => {
  console.log('File opened for writing, fd:', fd);
});
stream.on('finish', () => {
  console.log('All data written to file');
});
stream.on('close', () => {
  console.log('File closed');
});
stream.on('error', (err) => {
  console.error('Write error:', err);
});

Stream Processing Patterns

Common patterns for working with streams in Metro Memory FS.

// Copy file using streams
function copyFileWithStreams(src, dest) {
  return new Promise((resolve, reject) => {
    const readStream = fs.createReadStream(src);
    const writeStream = fs.createWriteStream(dest);
    
    readStream.on('error', reject);
    writeStream.on('error', reject);
    writeStream.on('finish', resolve);
    
    readStream.pipe(writeStream);
  });
}

// Process large file line by line
function processFileLineByLine(filePath, processor) {
  return new Promise((resolve, reject) => {
    const stream = fs.createReadStream(filePath, 'utf8');
    let buffer = '';
    
    stream.on('data', (chunk) => {
      buffer += chunk;
      const lines = buffer.split('\n');
      buffer = lines.pop() || ''; // Keep incomplete line in buffer
      
      lines.forEach(line => {
        processor(line);
      });
    });
    
    stream.on('end', () => {
      if (buffer) {
        processor(buffer); // Process final line
      }
      resolve();
    });
    
    stream.on('error', reject);
  });
}

// Transform stream data
function transformStream(inputPath, outputPath, transformer) {
  return new Promise((resolve, reject) => {
    const readStream = fs.createReadStream(inputPath);
    const writeStream = fs.createWriteStream(outputPath);
    
    readStream.on('data', (chunk) => {
      const transformed = transformer(chunk);
      writeStream.write(transformed);
    });
    
    readStream.on('end', () => {
      writeStream.end();
    });
    
    readStream.on('error', reject);
    writeStream.on('error', reject);
    writeStream.on('finish', resolve);
  });
}

// Append to file using stream
function appendToFile(filePath, data) {
  return new Promise((resolve, reject) => {
    const stream = fs.createWriteStream(filePath, { flags: 'a' });
    
    stream.write(data, (err) => {
      if (err) {
        reject(err);
      } else {
        stream.end();
      }
    });
    
    stream.on('finish', resolve);
    stream.on('error', reject);
  });
}

Stream Buffering and Flow Control

Control stream buffering and handle backpressure.

// Handle backpressure in write streams
function writeWithBackpressure(stream, data) {
  return new Promise((resolve, reject) => {
    if (!stream.write(data)) {
      // Stream buffer is full, wait for drain
      stream.once('drain', resolve);
      stream.once('error', reject);
    } else {
      // Data was written immediately
      resolve();
    }
  });
}

// Read file in controlled chunks
async function readFileInChunks(filePath, chunkSize = 1024) {
  const chunks = [];
  const stream = fs.createReadStream(filePath, { 
    highWaterMark: chunkSize 
  });
  
  return new Promise((resolve, reject) => {
    stream.on('data', (chunk) => {
      chunks.push(chunk);
      console.log(`Read chunk of ${chunk.length} bytes`);
    });
    
    stream.on('end', () => {
      resolve(Buffer.concat(chunks));
    });
    
    stream.on('error', reject);
  });
}

// Pause and resume streams
function pauseResumeExample(filePath) {
  const stream = fs.createReadStream(filePath);
  
  stream.on('data', (chunk) => {
    console.log(`Received ${chunk.length} bytes`);
    
    // Pause stream for processing
    stream.pause();
    
    // Simulate processing time
    setTimeout(() => {
      console.log('Processing complete, resuming...');
      stream.resume();
    }, 100);
  });
  
  stream.on('end', () => {
    console.log('Stream ended');
  });
}

File Descriptor Management with Streams

Proper handling of file descriptors in stream operations.

// Manual file descriptor management
function streamWithManualFd(filePath) {
  const fd = fs.openSync(filePath, 'r');
  
  const stream = fs.createReadStream(filePath, {
    fd,
    autoClose: false // Don't auto-close the fd
  });
  
  stream.on('end', () => {
    // Manually close the file descriptor
    fs.closeSync(fd);
    console.log('File descriptor closed manually');
  });
  
  stream.on('error', () => {
    fs.closeSync(fd); // Close on error too
  });
  
  return stream;
}

// Stream without auto-close
function createNonClosingStream(filePath) {
  return fs.createReadStream(filePath, {
    autoClose: false
  });
}

// Check stream state
function checkStreamState(stream) {
  console.log('Stream readable:', stream.readable);
  console.log('Stream destroyed:', stream.destroyed);
  console.log('Bytes read:', stream.bytesRead);
  
  if (stream.fd !== undefined) {
    console.log('File descriptor:', stream.fd);
  }
}

Performance Considerations

// Optimize buffer sizes for large files
const largeFileStream = fs.createReadStream('/huge-file.dat', {
  highWaterMark: 1024 * 1024 // 1MB buffer for large files
});

// Use smaller buffers for many small operations
const smallFileStream = fs.createReadStream('/config.json', {
  highWaterMark: 1024 // 1KB buffer for small files
});

// Efficient file copying
function efficientCopy(src, dest) {
  const readStream = fs.createReadStream(src, {
    highWaterMark: 64 * 1024 // 64KB chunks
  });
  const writeStream = fs.createWriteStream(dest, {
    highWaterMark: 64 * 1024
  });
  
  return new Promise((resolve, reject) => {
    readStream.pipe(writeStream);
    writeStream.on('finish', resolve);
    writeStream.on('error', reject);
    readStream.on('error', reject);
  });
}

Error Handling

Common stream errors and handling patterns:

// Comprehensive error handling
function robustStreamOperation(inputPath, outputPath) {
  return new Promise((resolve, reject) => {
    const readStream = fs.createReadStream(inputPath);
    const writeStream = fs.createWriteStream(outputPath);
    
    let finished = false;
    
    function cleanup(err) {
      if (finished) return;
      finished = true;
      
      readStream.destroy();
      writeStream.destroy();
      
      if (err) {
        // Clean up partial output file on error
        try {
          fs.unlinkSync(outputPath);
        } catch (cleanupErr) {
          // Ignore cleanup errors
        }
        reject(err);
      } else {
        resolve();
      }
    }
    
    readStream.on('error', cleanup);
    writeStream.on('error', cleanup);
    writeStream.on('finish', () => cleanup(null));
    
    readStream.pipe(writeStream);
  });
}

Common stream errors:

  • ENOENT - Source file doesn't exist (read stream)
  • EISDIR - Trying to read/write a directory as a file
  • EMFILE - Too many open files
  • ENOSPC - No space left on device (simulated in memory constraints)

Install with Tessl CLI

npx tessl i tessl/npm-metro-memory-fs

docs

directory-operations.md

file-descriptors.md

file-operations.md

file-watching.md

index.md

stats-permissions.md

streams.md

symbolic-links.md

tile.json