Readable and writable streams for efficient handling of large files with backpressure control, memory-efficient processing, and low-level file handle operations.
Low-level file handle for direct byte-level operations with manual offset control.
/**
* Handle for streaming file operations with direct byte access
*/
class FileHandle {
/** Current byte offset in the file, null if handle is closed */
offset: number | null;
/** File size in bytes, null if handle is closed */
size: number | null;
/**
* Closes the file handle, allowing file to be deleted or moved
* Subsequent read/write operations will throw an error
*/
close(): void;
/**
* Reads specified number of bytes from current offset
* @param length Number of bytes to read
* @returns Uint8Array containing the read bytes
*/
readBytes(length: number): Uint8Array<ArrayBuffer>;
/**
* Writes bytes to file at current offset
* @param bytes Uint8Array containing bytes to write
*/
writeBytes(bytes: Uint8Array): void;
}Usage Examples:
import { File, Paths } from "expo-file-system";
const dataFile = new File(Paths.document, "binary-data.bin");
// Low-level file handle operations
const handle = dataFile.open();
// Read file in chunks
const chunkSize = 1024;
while (handle.offset !== null && handle.offset < handle.size!) {
const chunk = handle.readBytes(chunkSize);
console.log(`Read ${chunk.length} bytes at offset ${handle.offset}`);
// Process chunk...
processDataChunk(chunk);
}
handle.close();
// Write data with precise offset control
const writeHandle = dataFile.open();
const header = new Uint8Array([0x50, 0x4B, 0x03, 0x04]); // ZIP header
writeHandle.writeBytes(header);
console.log(`Written header, current offset: ${writeHandle.offset}`);
writeHandle.close();Readable stream source for memory-efficient file reading with backpressure support.
/**
* Readable stream source for file operations
*/
class FileSystemReadableStreamSource implements UnderlyingByteSource {
/** File handle for reading operations */
handle: FileHandle;
/** Buffer size for reading chunks (default: 1024 bytes) */
size: number;
/** Stream type indicator */
type: 'bytes';
/**
* Creates a readable stream source
* @param handle FileHandle for the source file
*/
constructor(handle: FileHandle);
/** Cancels the stream and closes the file handle */
cancel(): void;
/**
* Pulls data from the file into the stream
* @param controller Stream controller for managing flow
*/
pull(controller: ReadableByteStreamController): void;
}Writable stream sink for memory-efficient file writing operations.
/**
* Writable stream sink for file operations
*/
class FileSystemWritableSink implements UnderlyingSink {
/** File handle for writing operations */
handle: FileHandle;
/**
* Creates a writable stream sink
* @param handle FileHandle for the target file
*/
constructor(handle: FileHandle);
/** Aborts the write stream and closes the handle */
abort(): void;
/** Closes the write stream and file handle */
close(): void;
/**
* Writes a chunk of data to the file
* @param chunk Uint8Array containing data to write
*/
write(chunk: Uint8Array): void;
}High-level streaming methods on File instances for convenient stream access.
/**
* Creates a readable stream for the file
* @returns ReadableStream for streaming file content
*/
readableStream(): ReadableStream<Uint8Array>;
/**
* Creates a writable stream for the file
* @returns WritableStream for streaming content to file
*/
writableStream(): WritableStream<Uint8Array>;
/**
* Opens a file handle for low-level streaming operations
* @returns FileHandle for direct byte operations
* @throws Error if file doesn't exist or cannot be opened
*/
open(): FileHandle;Usage Examples:
import { File, Paths } from "expo-file-system";
// Streaming large file download
async function downloadLargeFile(url: string, destinationPath: string) {
const file = new File(Paths.document, destinationPath);
const writableStream = file.writableStream();
const writer = writableStream.getWriter();
try {
const response = await fetch(url);
const reader = response.body?.getReader();
if (!reader) throw new Error("No response body");
while (true) {
const { done, value } = await reader.read();
if (done) break;
await writer.write(value);
console.log(`Downloaded ${value.length} bytes`);
}
console.log("Download completed");
} finally {
writer.close();
}
}
// Streaming file processing
async function processLargeFile(inputFile: File, outputFile: File) {
const readableStream = inputFile.readableStream();
const writableStream = outputFile.writableStream();
const reader = readableStream.getReader();
const writer = writableStream.getWriter();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Process chunk (e.g., compression, encryption, etc.)
const processedChunk = await processChunk(value);
await writer.write(processedChunk);
}
} finally {
reader.releaseLock();
writer.close();
}
}Common patterns for efficient streaming operations with proper resource management.
Usage Examples:
import { File, Paths } from "expo-file-system";
// Streaming file copy with progress tracking
async function copyFileWithProgress(
source: File,
destination: File,
onProgress?: (bytesTransferred: number, totalBytes: number) => void
) {
const sourceSize = source.size;
let bytesTransferred = 0;
const readableStream = source.readableStream();
const writableStream = destination.writableStream();
const reader = readableStream.getReader();
const writer = writableStream.getWriter();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
await writer.write(value);
bytesTransferred += value.length;
onProgress?.(bytesTransferred, sourceSize);
}
console.log(`Copy completed: ${bytesTransferred} bytes`);
} finally {
reader.releaseLock();
writer.close();
}
}
// Streaming file concatenation
async function concatenateFiles(inputFiles: File[], outputFile: File) {
const writableStream = outputFile.writableStream();
const writer = writableStream.getWriter();
try {
for (const inputFile of inputFiles) {
console.log(`Processing: ${inputFile.name}`);
const readableStream = inputFile.readableStream();
const reader = readableStream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
await writer.write(value);
}
reader.releaseLock();
}
console.log("Concatenation completed");
} finally {
writer.close();
}
}
// Streaming file splitting
async function splitFile(
inputFile: File,
chunkSize: number,
outputDirectory: Directory
) {
const readableStream = inputFile.readableStream();
const reader = readableStream.getReader();
let chunkIndex = 0;
let currentChunkSize = 0;
let currentWriter: WritableStreamDefaultWriter<Uint8Array> | null = null;
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Start new chunk if needed
if (!currentWriter || currentChunkSize >= chunkSize) {
if (currentWriter) {
await currentWriter.close();
}
const chunkFile = outputDirectory.createFile(
`${inputFile.name}.part${chunkIndex.toString().padStart(3, '0')}`,
null
);
currentWriter = chunkFile.writableStream().getWriter();
currentChunkSize = 0;
chunkIndex++;
}
await currentWriter.write(value);
currentChunkSize += value.length;
}
console.log(`File split into ${chunkIndex} chunks`);
} finally {
if (currentWriter) {
await currentWriter.close();
}
reader.releaseLock();
}
}
// Streaming data transformation
async function transformFileData(
inputFile: File,
outputFile: File,
transformer: (chunk: Uint8Array) => Uint8Array
) {
const readableStream = inputFile.readableStream();
const writableStream = outputFile.writableStream();
// Use transform stream for efficient processing
const transformStream = new TransformStream<Uint8Array, Uint8Array>({
transform(chunk, controller) {
const transformedChunk = transformer(chunk);
controller.enqueue(transformedChunk);
}
});
// Pipe streams together
const pipeline = readableStream
.pipeThrough(transformStream)
.pipeTo(writableStream);
await pipeline;
console.log("Transformation completed");
}
// Usage examples
async function demonstrateStreaming() {
const sourceFile = new File(Paths.document, "large-video.mp4");
const backupFile = new File(Paths.document, "backup", "video-backup.mp4");
// Copy with progress
await copyFileWithProgress(sourceFile, backupFile, (transferred, total) => {
const progress = (transferred / total * 100).toFixed(1);
console.log(`Copy progress: ${progress}%`);
});
// Split large file
const chunksDir = new Directory(Paths.document, "chunks");
chunksDir.create();
await splitFile(sourceFile, 10 * 1024 * 1024, chunksDir); // 10MB chunks
// Transform data (example: simple encryption)
const encryptedFile = new File(Paths.document, "encrypted-data.bin");
await transformFileData(sourceFile, encryptedFile, (chunk) => {
// Simple XOR encryption example
const encrypted = new Uint8Array(chunk.length);
const key = 0x42;
for (let i = 0; i < chunk.length; i++) {
encrypted[i] = chunk[i] ^ key;
}
return encrypted;
});
}Optimize streaming operations for memory efficiency and performance.
Usage Examples:
import { File, Paths } from "expo-file-system";
// Configure optimal buffer sizes based on file size
function getOptimalBufferSize(fileSize: number): number {
if (fileSize < 1024 * 1024) return 4096; // 4KB for small files
if (fileSize < 10 * 1024 * 1024) return 16384; // 16KB for medium files
return 65536; // 64KB for large files
}
// Memory-efficient file processing
async function processLargeFileEfficiently(inputFile: File) {
const fileSize = inputFile.size;
const bufferSize = getOptimalBufferSize(fileSize);
const handle = inputFile.open();
const processedData: number[] = [];
try {
while (handle.offset !== null && handle.offset < handle.size!) {
const chunk = handle.readBytes(bufferSize);
// Process chunk immediately to avoid memory buildup
for (let i = 0; i < chunk.length; i++) {
const processedByte = chunk[i] * 2; // Example processing
processedData.push(processedByte);
// Batch process to avoid excessive memory usage
if (processedData.length >= 1000) {
await saveProcessedBatch(processedData);
processedData.length = 0; // Clear array
}
}
}
// Process remaining data
if (processedData.length > 0) {
await saveProcessedBatch(processedData);
}
} finally {
handle.close();
}
}
// Streaming with backpressure handling
async function streamWithBackpressure(source: File, destination: File) {
const readableStream = source.readableStream();
const writableStream = destination.writableStream();
const reader = readableStream.getReader();
const writer = writableStream.getWriter();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Wait for writer to be ready (handles backpressure)
await writer.ready;
await writer.write(value);
}
} finally {
reader.releaseLock();
writer.close();
}
}
// Concurrent streaming operations
async function concurrentFileProcessing(files: File[]) {
const maxConcurrent = 3; // Limit concurrent operations
const semaphore = new Array(maxConcurrent).fill(0);
const processFile = async (file: File, index: number) => {
console.log(`Starting processing file ${index}: ${file.name}`);
const readableStream = file.readableStream();
const reader = readableStream.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));
}
console.log(`Completed processing file ${index}: ${file.name}`);
} finally {
reader.releaseLock();
}
};
// Process files with concurrency control
const promises = files.map((file, index) => processFile(file, index));
await Promise.all(promises);
}
async function saveProcessedBatch(data: number[]): Promise<void> {
// Implementation for saving processed data batches
console.log(`Saving batch of ${data.length} processed items`);
}