CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/npm-mem-fs-editor

File edition helpers working on top of mem-fs

Pending
Quality

Pending

Does it follow best practices?

Impact

Pending

No eval scenarios have been run

SecuritybySnyk

Pending

The risk profile of this skill

Overview
Eval results
Files

commit-pipeline.mddocs/

Commit and Pipeline Operations

Pipeline-based commit system for batch file operations with custom transforms and filtering, enabling efficient processing of file changes before writing to disk.

Capabilities

Commit Files to Disk

Commit all pending file changes from the in-memory store to the file system with optional transforms and filtering.

/**
 * Commit pending file changes to disk
 * @param options - Pipeline options or file transform function
 * @param transforms - Additional file transform functions to apply
 * @returns Promise that resolves when all files are committed
 */
function commit<EditorFile extends MemFsEditorFile>(
  options?: PipelineOptions<EditorFile> | FileTransform<EditorFile>,
  ...transforms: FileTransform<EditorFile>[]
): Promise<void>;

interface PipelineOptions<T> {
  /** Filter function to select which files to commit */
  filter?: (file: T) => boolean;
  /** Custom error handling for individual files */
  onError?: (error: Error, file: T) => void;
  /** Additional pipeline configuration */
  [key: string]: any;
}

interface FileTransform<T> {
  /** Transform function applied to each file before committing */
  (file: T): T | Promise<T>;
}

interface MemFsEditorFile {
  path: string;
  stat?: { mode?: number } | null;
  contents: Buffer | null;
  committed?: boolean;
  isNew?: boolean;
  state?: 'modified' | 'deleted';
  stateCleared?: 'modified' | 'deleted';
}

Usage Examples:

import { create as createMemFs } from "mem-fs";
import { create as createEditor } from "mem-fs-editor";

const store = createMemFs();
const fs = createEditor(store);

// Make some changes
fs.write("package.json", JSON.stringify({ name: "my-app" }, null, 2));
fs.copy("src/**/*.js", "dist/");
fs.delete("temp/**/*");

// Simple commit
await fs.commit();

// Commit with file filtering
await fs.commit({
  filter: (file) => !file.path.includes("node_modules")
});

// Commit with transforms
await fs.commit(
  (file) => {
    // Add timestamp to all files
    if (file.contents) {
      const header = `// Generated: ${new Date().toISOString()}\n`;
      file.contents = Buffer.concat([
        Buffer.from(header),
        file.contents
      ]);
    }
    return file;
  }
);

Export File State for Debugging

Export current file states and metadata for debugging and inspection purposes.

/**
 * Export file states for debugging
 * @param cwd - Current working directory (default: process.cwd())
 * @param filter - Filter pattern or function to select files
 * @returns Object mapping file paths to their dump information
 */
function dump<EditorFile extends MemFsEditorFile>(
  cwd?: string,
  filter?: string | ((file: EditorFile, cwd: string) => boolean)
): Record<string, MemFsEditorFileDump>;

interface MemFsEditorFileDump {
  /** File contents as string or null if binary/empty */
  contents: string | null;
  /** Current file state (modified, deleted, etc.) */
  state?: string;
  /** Previously cleared state information */
  stateCleared?: string;
}

Usage Examples:

// Dump all files
const allFiles = fs.dump();
console.log("Current files:", Object.keys(allFiles));

// Dump with custom working directory
const srcFiles = fs.dump("/project/src");

// Dump with string filter (glob pattern)
const jsFiles = fs.dump(undefined, "**/*.js");

// Dump with function filter
const modifiedFiles = fs.dump(undefined, (file, cwd) => {
  return file.state === 'modified';
});

// Inspect specific file states
Object.entries(allFiles).forEach(([path, dump]) => {
  console.log(`${path}:`, {
    hasContents: dump.contents !== null,
    state: dump.state,
    stateCleared: dump.stateCleared
  });
});

Advanced Pipeline Operations

File Transform Functions

// Content transformation
const addLicenseHeader = (file) => {
  if (file.path.endsWith('.js') && file.contents) {
    const license = `/*
 * Copyright (c) 2024 My Company
 * Licensed under MIT License
 */\n`;
    file.contents = Buffer.concat([
      Buffer.from(license),
      file.contents
    ]);
  }
  return file;
};

// Minification transform
const minifyJS = (file) => {
  if (file.path.endsWith('.js') && file.contents) {
    const minified = file.contents
      .toString()
      .replace(/\/\*[\s\S]*?\*\//g, '') // Remove comments
      .replace(/\s+/g, ' ') // Collapse whitespace
      .trim();
    file.contents = Buffer.from(minified);
  }
  return file;
};

// Path transformation
const updatePaths = (file) => {
  if (file.path.startsWith('temp/')) {
    file.path = file.path.replace('temp/', 'dist/');
  }
  return file;
};

// Apply multiple transforms
await fs.commit(addLicenseHeader, minifyJS, updatePaths);

Conditional Commits

// Commit only specific file types
await fs.commit({
  filter: (file) => {
    return file.path.match(/\.(js|ts|json)$/) && !file.path.includes('test');
  }
});

// Commit with size limits
await fs.commit({
  filter: (file) => {
    return !file.contents || file.contents.length < 1024 * 1024; // < 1MB
  }
});

// Commit only new files
await fs.commit({
  filter: (file) => file.isNew === true
});

// Commit only modified files
await fs.commit({
  filter: (file) => file.state === 'modified'
});

Error Handling in Commits

// Commit with error handling
await fs.commit({
  filter: (file) => file.path.endsWith('.js'),
  onError: (error, file) => {
    console.error(`Failed to commit ${file.path}:`, error.message);
    // Could log to external service, skip file, etc.
  }
});

// Robust commit with try-catch
try {
  await fs.commit(
    // Transform that might fail
    (file) => {
      if (file.path.endsWith('.json') && file.contents) {
        // Validate JSON
        JSON.parse(file.contents.toString());
      }
      return file;
    }
  );
} catch (error) {
  console.error("Commit failed:", error.message);
  
  // Inspect what files were problematic
  const dump = fs.dump();
  const pendingFiles = Object.entries(dump)
    .filter(([_, info]) => info.state)
    .map(([path]) => path);
  
  console.log("Files with pending changes:", pendingFiles);
}

Stream-based Processing

import { createCommitTransform } from "mem-fs-editor/transform";

// Create custom commit transform
const customCommitTransform = createCommitTransform();

// The transform module provides stream-based processing
// for high-performance batch operations

State Management Integration

import { 
  setFileState, 
  isFileNew, 
  isFileStateModified,
  isFilePending,
  setCommittedFile 
} from "mem-fs-editor/state";

// Check file states before committing
const dump = fs.dump();
Object.entries(dump).forEach(([path, info]) => {
  const file = store.get(path);
  
  console.log(`${path}:`, {
    isNew: isFileNew(file),
    isModified: isFileStateModified(file), 
    isPending: isFilePending(file)
  });
});

// Commit with state management
await fs.commit({
  filter: (file) => isFilePending(file)
});

// Custom state management after commit
const commitWithStateTracking = async () => {
  const filesToCommit = Object.keys(fs.dump());
  
  await fs.commit();
  
  // Mark files as committed using state utilities
  filesToCommit.forEach(filepath => {
    const file = store.get(filepath);
    if (file) {
      setCommittedFile(file);
    }
  });
};

await commitWithStateTracking();

Performance Optimization

// Batch commits for better performance
const commitInBatches = async (batchSize = 50) => {
  const allFiles = Object.keys(fs.dump());
  
  for (let i = 0; i < allFiles.length; i += batchSize) {
    const batch = allFiles.slice(i, i + batchSize);
    
    await fs.commit({
      filter: (file) => batch.includes(file.path)
    });
    
    console.log(`Committed batch ${Math.floor(i / batchSize) + 1}`);
  }
};

await commitInBatches();

// Optimized commit with minimal transforms
await fs.commit({
  filter: (file) => {
    // Quick checks first
    return file.path.endsWith('.js') && file.isNew;
  }
}, (file) => {
  // Minimal transform
  return file;
});

Debugging and Monitoring

// Monitor commit progress
const monitorCommit = async () => {
  const startTime = Date.now();
  const initialCount = Object.keys(fs.dump()).length;
  
  console.log(`Starting commit of ${initialCount} files...`);
  
  await fs.commit((file) => {
    console.log(`Processing: ${file.path}`);
    return file;
  });
  
  const endTime = Date.now();
  console.log(`Commit completed in ${endTime - startTime}ms`);
};

await monitorCommit();

// Debug file states during commit
const debugCommit = async () => {
  const beforeDump = fs.dump();
  console.log("Before commit:", Object.keys(beforeDump).length, "files");
  
  await fs.commit();
  
  const afterDump = fs.dump();
  console.log("After commit:", Object.keys(afterDump).length, "files");
  
  // Show what changed
  const committed = Object.keys(beforeDump).filter(path => 
    !afterDump[path] || !afterDump[path].state
  );
  console.log("Committed files:", committed);
};

await debugCommit();

docs

commit-pipeline.md

file-copy.md

file-management.md

file-reading.md

file-writing.md

index.md

state-management.md

template-processing.md

transform.md

tile.json