Functionality for discovering and enumerating cache contents, providing information about stored entries and their metadata.
Returns an object containing all cache entries, keyed by their cache keys.
/**
* Lists all cache entries
* @param {string} cache - Path to cache directory
* @returns {Promise<object>} Promise resolving to object mapping keys to EntryObjects
*/
function ls(cache);Usage Examples:
const cacache = require('cacache');
// List all entries
const entries = await cacache.ls('./cache');
console.log('Cache contains', Object.keys(entries).length, 'entries');
// Iterate through entries
for (const [key, entry] of Object.entries(entries)) {
console.log(`Key: ${key}`);
console.log(` Integrity: ${entry.integrity}`);
console.log(` Size: ${entry.size} bytes`);
console.log(` Time: ${new Date(entry.time)}`);
console.log(` Metadata:`, entry.metadata);
console.log(` Path: ${entry.path}`);
}
// Check if specific key exists
const entries = await cacache.ls('./cache');
if ('my-key' in entries) {
console.log('Entry found:', entries['my-key']);
} else {
console.log('Entry not found');
}
// Get cache statistics
const entries = await cacache.ls('./cache');
const totalSize = Object.values(entries).reduce((sum, entry) => sum + entry.size, 0);
const totalEntries = Object.keys(entries).length;
console.log(`Cache contains ${totalEntries} entries totaling ${totalSize} bytes`);Returns a readable object stream that emits cache entries one at a time, useful for large caches to avoid loading all entries into memory.
/**
* Returns stream of cache entries
* @param {string} cache - Path to cache directory
* @returns {ReadableStream} Readable object stream of EntryObjects
*/
function ls.stream(cache);Usage Examples:
// Stream through all entries
const stream = cacache.ls.stream('./cache');
stream.on('data', (entry) => {
console.log(`Found entry: ${entry.key} (${entry.size} bytes)`);
});
stream.on('end', () => {
console.log('Finished listing entries');
});
// Collect entries with stream
const { pipeline } = require('stream/promises');
const { Transform } = require('stream');
const entries = [];
await pipeline(
cacache.ls.stream('./cache'),
new Transform({
objectMode: true,
transform(entry, encoding, callback) {
entries.push(entry);
callback();
}
})
);
console.log(`Collected ${entries.length} entries`);
// Filter entries during streaming
const stream = cacache.ls.stream('./cache');
const recentEntries = [];
const oneDayAgo = Date.now() - (24 * 60 * 60 * 1000);
stream.on('data', (entry) => {
if (entry.time > oneDayAgo) {
recentEntries.push(entry);
}
});
stream.on('end', () => {
console.log(`Found ${recentEntries.length} recent entries`);
});Find entries matching specific conditions:
// Find large entries (> 1MB)
const entries = await cacache.ls('./cache');
const largeEntries = Object.entries(entries)
.filter(([key, entry]) => entry.size > 1024 * 1024)
.map(([key, entry]) => ({ key, ...entry }));
console.log(`Found ${largeEntries.length} large entries`);
// Find entries by metadata
const entries = await cacache.ls('./cache');
const apiEntries = Object.entries(entries)
.filter(([key, entry]) =>
entry.metadata && entry.metadata.source === 'api'
)
.map(([key, entry]) => ({ key, ...entry }));
// Find entries by age
const weekAgo = Date.now() - (7 * 24 * 60 * 60 * 1000);
const oldEntries = Object.entries(entries)
.filter(([key, entry]) => entry.time < weekAgo)
.map(([key, entry]) => ({ key, ...entry }));Analyze cache usage patterns:
async function analyzeCacheUsage(cache) {
const entries = await cacache.ls(cache);
const stats = {
totalEntries: 0,
totalSize: 0,
averageSize: 0,
sizeDistribution: {},
ageDistribution: {},
metadataTypes: {}
};
// Basic statistics
stats.totalEntries = Object.keys(entries).length;
stats.totalSize = Object.values(entries).reduce((sum, entry) => sum + entry.size, 0);
stats.averageSize = stats.totalSize / stats.totalEntries;
// Size distribution
for (const entry of Object.values(entries)) {
const sizeCategory = entry.size < 1024 ? 'small' :
entry.size < 1024 * 1024 ? 'medium' : 'large';
stats.sizeDistribution[sizeCategory] = (stats.sizeDistribution[sizeCategory] || 0) + 1;
}
// Age distribution
const now = Date.now();
for (const entry of Object.values(entries)) {
const ageHours = Math.floor((now - entry.time) / (60 * 60 * 1000));
const ageCategory = ageHours < 1 ? 'recent' :
ageHours < 24 ? 'today' :
ageHours < 168 ? 'week' : 'old';
stats.ageDistribution[ageCategory] = (stats.ageDistribution[ageCategory] || 0) + 1;
}
// Metadata analysis
for (const entry of Object.values(entries)) {
if (entry.metadata) {
for (const key of Object.keys(entry.metadata)) {
stats.metadataTypes[key] = (stats.metadataTypes[key] || 0) + 1;
}
}
}
return stats;
}
const stats = await analyzeCacheUsage('./cache');
console.log('Cache Statistics:', stats);Identify entries that reference the same content:
async function findDuplicateContent(cache) {
const entries = await cacache.ls(cache);
const contentMap = new Map();
// Group entries by content hash
for (const [key, entry] of Object.entries(entries)) {
if (!contentMap.has(entry.integrity)) {
contentMap.set(entry.integrity, []);
}
contentMap.get(entry.integrity).push({ key, ...entry });
}
// Find duplicates
const duplicates = [];
for (const [integrity, entryList] of contentMap.entries()) {
if (entryList.length > 1) {
duplicates.push({
integrity,
count: entryList.length,
entries: entryList,
totalSize: entryList[0].size * entryList.length,
wastedSpace: entryList[0].size * (entryList.length - 1)
});
}
}
return duplicates;
}
const duplicates = await findDuplicateContent('./cache');
console.log(`Found ${duplicates.length} sets of duplicate content`);
duplicates.forEach(dup => {
console.log(`${dup.count} entries share content ${dup.integrity.slice(0, 16)}...`);
console.log(` Wasted space: ${dup.wastedSpace} bytes`);
});Process large caches efficiently using streams:
const { pipeline } = require('stream/promises');
const { Transform } = require('stream');
// Process entries in batches
async function processEntriesInBatches(cache, batchSize = 100) {
let batch = [];
let batchCount = 0;
await pipeline(
cacache.ls.stream(cache),
new Transform({
objectMode: true,
transform(entry, encoding, callback) {
batch.push(entry);
if (batch.length >= batchSize) {
console.log(`Processing batch ${++batchCount} (${batch.length} entries)`);
// Process batch here
batch = [];
}
callback();
},
flush(callback) {
if (batch.length > 0) {
console.log(`Processing final batch (${batch.length} entries)`);
// Process final batch
}
callback();
}
})
);
}
await processEntriesInBatches('./cache', 50);Create detailed reports of cache contents:
async function exportCacheInventory(cache, outputFile) {
const fs = require('fs');
const entries = await cacache.ls(cache);
const inventory = {
timestamp: new Date().toISOString(),
cachePath: cache,
summary: {
totalEntries: Object.keys(entries).length,
totalSize: Object.values(entries).reduce((sum, entry) => sum + entry.size, 0)
},
entries: Object.entries(entries).map(([key, entry]) => ({
key,
integrity: entry.integrity,
size: entry.size,
created: new Date(entry.time).toISOString(),
metadata: entry.metadata,
path: entry.path
}))
};
await fs.promises.writeFile(outputFile, JSON.stringify(inventory, null, 2));
console.log(`Cache inventory exported to ${outputFile}`);
}
await exportCacheInventory('./cache', './cache-inventory.json');ls() for small to medium caches (< 10,000 entries)ls.stream() for large caches to avoid memory issuesls.stream() processes entries incrementally without loading all into memory// Memory-efficient processing of large caches
const stream = cacache.ls.stream('./cache');
let entryCount = 0;
let totalSize = 0;
stream.on('data', (entry) => {
entryCount++;
totalSize += entry.size;
// Process entry immediately, don't accumulate
console.log(`Entry ${entryCount}: ${entry.key}`);
});
stream.on('end', () => {
console.log(`Processed ${entryCount} entries, total size: ${totalSize}`);
});Listing operations are read-only and safe to run concurrently with other operations:
// Safe to run listing while other operations are in progress
const [entries, stats] = await Promise.all([
cacache.ls('./cache'),
cacache.verify('./cache')
]);Listing operations may encounter various errors:
try {
const entries = await cacache.ls('./cache');
console.log('Found', Object.keys(entries).length, 'entries');
} catch (error) {
switch (error.code) {
case 'ENOENT':
console.log('Cache directory does not exist');
break;
case 'EACCES':
console.error('Permission denied reading cache');
break;
case 'ENOTDIR':
console.error('Cache path is not a directory');
break;
default:
console.error('Unexpected error:', error);
}
}