Additional utilities for cache operations including memoization management and temporary directory operations.
Clears the internal in-memory LRU cache that stores frequently accessed cache entries.
/**
* Clears internal memory cache
* @returns {object} Object containing the cleared cache entries
*/
function clearMemoized();Usage Examples:
const cacache = require('cacache');
// Clear all memoized entries
const clearedEntries = cacache.clearMemoized();
console.log('Cleared memoized entries:', Object.keys(clearedEntries).length);
// Use during memory pressure
if (process.memoryUsage().heapUsed > 500 * 1024 * 1024) { // > 500MB
console.log('High memory usage, clearing cache');
cacache.clearMemoized();
}
// Clear periodically in long-running applications
setInterval(() => {
const cleared = cacache.clearMemoized();
console.log(`Cleared ${Object.keys(cleared).length} memoized entries`);
}, 60 * 60 * 1000); // Every hour
// Clear before shutdown
process.on('SIGTERM', () => {
cacache.clearMemoized();
console.log('Cleared memoization cache before shutdown');
});Creates a temporary directory within the cache directory for atomic operations.
/**
* Creates temporary directory in cache
* @param {string} cache - Path to cache directory
* @param {object} opts - Options object
* @param {string} [opts.tmpPrefix] - Prefix for temporary directory name
* @returns {Promise<string>} Promise resolving to temporary directory path
*/
function tmp.mkdir(cache, opts = {});Usage Examples:
// Create basic temporary directory
const tmpDir = await cacache.tmp.mkdir('./cache');
console.log('Created temporary directory:', tmpDir);
// Use temporary directory for atomic operations
const tmpDir = await cacache.tmp.mkdir('./cache');
const tmpFile = path.join(tmpDir, 'temp-data');
// Write to temporary location first
await fs.writeFile(tmpFile, 'temporary data');
// Move to final location atomically
await fs.rename(tmpFile, './final-location');
// Create temporary directory with custom prefix
const tmpDir = await cacache.tmp.mkdir('./cache', {
tmpPrefix: 'build-'
});
console.log('Build temp directory:', tmpDir);
// Cleanup temporary directory
const fs = require('fs');
try {
await fs.rm(tmpDir, { recursive: true, force: true });
} catch (error) {
console.warn('Failed to cleanup temp directory:', error);
}Executes a callback function with a temporary directory that is automatically cleaned up.
/**
* Executes callback with temporary directory
* @param {string} cache - Path to cache directory
* @param {object|function} opts - Options object or callback function
* @param {function} cb - Callback function (if opts provided)
* @returns {Promise<any>} Promise resolving to callback result
*/
function tmp.withTmp(cache, opts, cb);
// Alternative signature when no options needed
function tmp.withTmp(cache, cb);Usage Examples:
// Basic usage with automatic cleanup
const result = await cacache.tmp.withTmp('./cache', async (tmpDir) => {
console.log('Working in temporary directory:', tmpDir);
// Perform operations in temporary directory
const tempFile = path.join(tmpDir, 'work.txt');
await fs.writeFile(tempFile, 'temporary work');
const data = await fs.readFile(tempFile, 'utf8');
return data.toUpperCase();
});
console.log('Result:', result);
// Temporary directory is automatically cleaned up
// With options
const result = await cacache.tmp.withTmp('./cache',
{ tmpPrefix: 'processing-' },
async (tmpDir) => {
// Download file to temporary location
const response = await fetch('https://example.com/data.json');
const tempFile = path.join(tmpDir, 'downloaded.json');
await fs.writeFile(tempFile, await response.text());
// Process file
const data = JSON.parse(await fs.readFile(tempFile, 'utf8'));
return data.processed = true, data;
}
);
// Complex atomic operation
await cacache.tmp.withTmp('./cache', async (tmpDir) => {
// Build complex data structure in temporary location
const buildDir = path.join(tmpDir, 'build');
await fs.mkdir(buildDir, { recursive: true });
// Generate multiple files
await Promise.all([
fs.writeFile(path.join(buildDir, 'config.json'), JSON.stringify(config)),
fs.writeFile(path.join(buildDir, 'data.csv'), csvData),
fs.writeFile(path.join(buildDir, 'readme.txt'), documentation)
]);
// Create archive from build directory
const archivePath = path.join(tmpDir, 'bundle.tar.gz');
await createArchive(buildDir, archivePath);
// Store archive in cache
const archiveData = await fs.readFile(archivePath);
await cacache.put('./cache', 'processed-bundle', archiveData);
return 'Bundle created and cached';
});Monitor and manage memory usage with memoization:
// Smart memory management
class CacheMemoryManager {
constructor(maxMemoryMB = 100) {
this.maxMemory = maxMemoryMB * 1024 * 1024;
this.lastClear = Date.now();
}
checkAndClearIfNeeded() {
const memUsage = process.memoryUsage();
const timeSinceLastClear = Date.now() - this.lastClear;
// Clear if memory usage is high or it's been a while
if (memUsage.heapUsed > this.maxMemory || timeSinceLastClear > 30 * 60 * 1000) {
const cleared = cacache.clearMemoized();
this.lastClear = Date.now();
console.log(`Memory manager cleared ${Object.keys(cleared).length} entries`);
// Force garbage collection if available
if (global.gc) {
global.gc();
}
}
}
async withMemoryCheck(operation) {
this.checkAndClearIfNeeded();
const result = await operation();
this.checkAndClearIfNeeded();
return result;
}
}
const memManager = new CacheMemoryManager(50); // 50MB limit
// Use with cache operations
const result = await memManager.withMemoryCheck(async () => {
return await cacache.get('./cache', 'large-data');
});Advanced temporary file handling patterns:
// Atomic file replacement
async function atomicFileUpdate(cache, finalPath, updateFn) {
return await cacache.tmp.withTmp(cache, async (tmpDir) => {
const tmpFile = path.join(tmpDir, 'atomic-update');
// Read existing file if it exists
let existingData = null;
try {
existingData = await fs.readFile(finalPath);
} catch (error) {
if (error.code !== 'ENOENT') throw error;
}
// Apply update function
const newData = await updateFn(existingData);
// Write to temporary file
await fs.writeFile(tmpFile, newData);
// Atomic move to final location
await fs.rename(tmpFile, finalPath);
return newData;
});
}
// Usage
await atomicFileUpdate('./cache', './config.json', (existingData) => {
const config = existingData ? JSON.parse(existingData) : {};
config.lastUpdated = Date.now();
return JSON.stringify(config, null, 2);
});
// Batch file processing with temporary workspace
async function batchProcessFiles(cache, inputFiles, processor) {
return await cacache.tmp.withTmp(cache, async (tmpDir) => {
const workDir = path.join(tmpDir, 'batch-work');
await fs.mkdir(workDir, { recursive: true });
const results = [];
for (const inputFile of inputFiles) {
const workFile = path.join(workDir, path.basename(inputFile));
// Copy input to work directory
await fs.copyFile(inputFile, workFile);
// Process file
const result = await processor(workFile, workDir);
results.push(result);
}
return results;
});
}Implement custom memoization strategies:
// Custom memoization with TTL
class TTLMemoization {
constructor(ttlMs = 5 * 60 * 1000) { // 5 minutes default
this.cache = new Map();
this.ttl = ttlMs;
}
get(key) {
const entry = this.cache.get(key);
if (!entry) return undefined;
if (Date.now() - entry.timestamp > this.ttl) {
this.cache.delete(key);
return undefined;
}
return entry.value;
}
set(key, value) {
this.cache.set(key, {
value,
timestamp: Date.now()
});
}
clear() {
const entries = Object.fromEntries(this.cache);
this.cache.clear();
return entries;
}
}
// Use custom memoization
const customMemo = new TTLMemoization(2 * 60 * 1000); // 2 minutes
const data = await cacache.get('./cache', 'api-data', {
memoize: customMemo
});
// Size-limited memoization
class SizeLimitedMemoization {
constructor(maxSize = 10 * 1024 * 1024) { // 10MB default
this.cache = new Map();
this.maxSize = maxSize;
this.currentSize = 0;
}
get(key) {
return this.cache.get(key);
}
set(key, value) {
const size = Buffer.isBuffer(value) ? value.length :
typeof value === 'string' ? Buffer.byteLength(value) :
JSON.stringify(value).length;
// Clear old entries if needed
while (this.currentSize + size > this.maxSize && this.cache.size > 0) {
const firstKey = this.cache.keys().next().value;
const firstValue = this.cache.get(firstKey);
const firstSize = Buffer.isBuffer(firstValue) ? firstValue.length :
typeof firstValue === 'string' ? Buffer.byteLength(firstValue) :
JSON.stringify(firstValue).length;
this.cache.delete(firstKey);
this.currentSize -= firstSize;
}
this.cache.set(key, value);
this.currentSize += size;
}
}Advanced temporary directory usage:
// Parallel processing with isolated workspaces
async function parallelProcessing(cache, tasks, processor) {
return await Promise.all(
tasks.map(async (task, index) => {
return await cacache.tmp.withTmp(cache,
{ tmpPrefix: `task-${index}-` },
async (tmpDir) => {
console.log(`Processing task ${index} in ${tmpDir}`);
return await processor(task, tmpDir);
}
);
})
);
}
// Staged processing with intermediate results
async function stagedProcessing(cache, input, stages) {
return await cacache.tmp.withTmp(cache, async (tmpDir) => {
let currentData = input;
for (let i = 0; i < stages.length; i++) {
const stageDir = path.join(tmpDir, `stage-${i}`);
await fs.mkdir(stageDir, { recursive: true });
console.log(`Running stage ${i}: ${stages[i].name}`);
currentData = await stages[i].process(currentData, stageDir);
// Optionally save intermediate results
if (stages[i].saveIntermediate) {
const intermediateFile = path.join(stageDir, 'result.json');
await fs.writeFile(intermediateFile, JSON.stringify(currentData, null, 2));
}
}
return currentData;
});
}
// Example usage
const result = await stagedProcessing('./cache', initialData, [
{
name: 'validation',
process: async (data, stageDir) => {
// Validate data
return data.filter(item => item.valid);
}
},
{
name: 'transformation',
process: async (data, stageDir) => {
// Transform data
return data.map(item => ({ ...item, processed: true }));
},
saveIntermediate: true
},
{
name: 'aggregation',
process: async (data, stageDir) => {
// Aggregate results
return {
count: data.length,
items: data
};
}
}
]);withTmp()mkdir() when you need control over cleanup timing// Monitor and manage memory usage
function getMemoryStats() {
const usage = process.memoryUsage();
return {
heapUsed: Math.round(usage.heapUsed / 1024 / 1024),
heapTotal: Math.round(usage.heapTotal / 1024 / 1024),
external: Math.round(usage.external / 1024 / 1024)
};
}
// Clear memoization when memory usage is high
setInterval(() => {
const stats = getMemoryStats();
if (stats.heapUsed > 200) { // > 200MB
console.log(`High memory usage (${stats.heapUsed}MB), clearing cache`);
cacache.clearMemoized();
}
}, 30000); // Check every 30 secondsUtility operations may encounter various errors:
// Handle temporary directory errors
try {
const result = await cacache.tmp.withTmp('./cache', async (tmpDir) => {
// Operations that might fail
return await riskyOperation(tmpDir);
});
} catch (error) {
console.error('Temporary operation failed:', error);
// Temporary directory is still cleaned up automatically
}
// Handle memoization clearing
try {
const cleared = cacache.clearMemoized();
console.log('Cleared entries:', Object.keys(cleared).length);
} catch (error) {
console.warn('Failed to clear memoization:', error);
// Not critical, continue operation
}get() and set() methodswithTmp() automatically cleans up temporary directoriesmkdir() creates directories that must be manually cleaned up