Low-level operations for managing the cache index structure, including compaction and direct index manipulation.
Compacts the index by removing duplicate and invalid entries, and optionally filtering entries based on custom criteria.
/**
* Compacts index by removing duplicates and invalid entries
* @param {string} cache - Path to cache directory
* @param {string} key - Cache key to compact (compacts the bucket containing this key)
* @param {function} matchFn - Function to determine if two entries match
* @param {object} opts - Options object
* @param {function} [opts.validateEntry] - Function to validate entries (return true to keep)
* @param {string} [opts.tmpPrefix] - Prefix for temporary files
* @returns {Promise<EntryObject[]>} Promise resolving to array of compacted entries
*/
function index.compact(cache, key, matchFn, opts = {});Usage Examples:
const cacache = require('cacache');
// Basic compaction - remove duplicates with same key
await cacache.index.compact('./cache', 'my-key', (oldEntry, newEntry) => {
return oldEntry.key === newEntry.key;
});
// Compact with custom matching function
await cacache.index.compact('./cache', 'api-data', (oldEntry, newEntry) => {
// Match entries with same key and metadata source
return oldEntry.key === newEntry.key &&
oldEntry.metadata?.source === newEntry.metadata?.source;
});
// Compact with entry validation
const compactedEntries = await cacache.index.compact('./cache', 'my-key',
(oldEntry, newEntry) => oldEntry.key === newEntry.key,
{
validateEntry: (entry) => {
// Keep only entries newer than 1 day
const oneDayAgo = Date.now() - (24 * 60 * 60 * 1000);
return entry.time > oneDayAgo;
}
}
);
console.log(`Compacted to ${compactedEntries.length} entries`);
// Compact with custom temporary file prefix
await cacache.index.compact('./cache', 'temp-data',
(oldEntry, newEntry) => oldEntry.key === newEntry.key,
{
tmpPrefix: 'compaction-'
}
);Directly inserts an entry into the cache index without storing content. This is typically used internally but can be useful for advanced cache management.
/**
* Inserts entry into cache index
* @param {string} cache - Path to cache directory
* @param {string} key - Cache key for the entry
* @param {string} integrity - Content integrity hash (SRI format)
* @param {object} opts - Options object
* @param {object} [opts.metadata] - User-defined metadata for the entry
* @param {number} [opts.size] - Content size in bytes
* @param {number} [opts.time] - Custom timestamp (default: Date.now())
* @returns {Promise<EntryObject>} Promise resolving to the inserted entry object
*/
function index.insert(cache, key, integrity, opts = {});Usage Examples:
// Insert index entry with known content hash
const entry = await cacache.index.insert('./cache', 'external-content', 'sha512-abc123...', {
metadata: {
source: 'external',
url: 'https://example.com/data.json'
},
size: 1024,
time: Date.now()
});
console.log('Inserted entry:', entry);
// Insert entry for content that exists elsewhere
const integrityHash = 'sha512-def456...';
const entry = await cacache.index.insert('./cache', 'linked-content', integrityHash, {
metadata: {
type: 'symlink',
actualPath: '/path/to/actual/file'
}
});
// Insert multiple related entries
const baseHash = 'sha512-ghi789...';
const variants = ['small', 'medium', 'large'];
const entries = await Promise.all(
variants.map(variant =>
cacache.index.insert('./cache', `image-${variant}`, baseHash, {
metadata: {
variant: variant,
originalKey: 'base-image'
},
size: 2048
})
)
);
console.log(`Inserted ${entries.length} variant entries`);Implement different compaction strategies based on your use case:
// Time-based compaction - keep only newest entries
async function compactByTime(cache, key) {
return await cacache.index.compact(cache, key,
(oldEntry, newEntry) => oldEntry.key === newEntry.key,
{
validateEntry: (entry) => {
const hourAgo = Date.now() - (60 * 60 * 1000);
return entry.time > hourAgo;
}
}
);
}
// Size-based compaction - prefer smaller entries
async function compactBySize(cache, key) {
const entries = [];
return await cacache.index.compact(cache, key,
(oldEntry, newEntry) => {
if (oldEntry.key === newEntry.key) {
// Keep the smaller entry
return oldEntry.size <= newEntry.size;
}
return false;
}
);
}
// Metadata-based compaction - keep entries with priority metadata
async function compactByPriority(cache, key) {
return await cacache.index.compact(cache, key,
(oldEntry, newEntry) => oldEntry.key === newEntry.key,
{
validateEntry: (entry) => {
return entry.metadata?.priority === 'high';
}
}
);
}Perform multiple index operations efficiently:
// Batch insert multiple entries
async function batchInsertEntries(cache, entries) {
const results = await Promise.all(
entries.map(async ({ key, integrity, ...opts }) => {
try {
const entry = await cacache.index.insert(cache, key, integrity, opts);
return { success: true, key, entry };
} catch (error) {
return { success: false, key, error: error.message };
}
})
);
const successful = results.filter(r => r.success).length;
const failed = results.filter(r => !r.success).length;
console.log(`Batch insert: ${successful} successful, ${failed} failed`);
return results;
}
// Example usage
const entriesToInsert = [
{ key: 'entry1', integrity: 'sha512-abc...', metadata: { type: 'data' } },
{ key: 'entry2', integrity: 'sha512-def...', metadata: { type: 'config' } },
{ key: 'entry3', integrity: 'sha512-ghi...', metadata: { type: 'log' } }
];
await batchInsertEntries('./cache', entriesToInsert);Maintain index integrity and performance:
// Clean up orphaned index entries (entries without content)
async function cleanupOrphanedEntries(cache) {
const entries = await cacache.ls(cache);
const orphanedKeys = [];
for (const [key, entry] of Object.entries(entries)) {
const hasContent = await cacache.get.hasContent(cache, entry.integrity);
if (!hasContent) {
orphanedKeys.push(key);
}
}
console.log(`Found ${orphanedKeys.length} orphaned entries`);
// Compact each orphaned entry's bucket to remove it
await Promise.all(
orphanedKeys.map(key =>
cacache.index.compact(cache, key,
(oldEntry, newEntry) => oldEntry.key === newEntry.key,
{
validateEntry: (entry) => {
// Remove orphaned entries
return !orphanedKeys.includes(entry.key);
}
}
)
)
);
return orphanedKeys;
}
// Defragment index by compacting all buckets
async function defragmentIndex(cache) {
const entries = await cacache.ls(cache);
const processedBuckets = new Set();
for (const key of Object.keys(entries)) {
// Calculate bucket hash to avoid processing same bucket multiple times
const bucketHash = require('crypto')
.createHash('sha256')
.update(key)
.digest('hex');
if (!processedBuckets.has(bucketHash)) {
await cacache.index.compact(cache, key,
(oldEntry, newEntry) => oldEntry.key === newEntry.key
);
processedBuckets.add(bucketHash);
}
}
console.log(`Defragmented ${processedBuckets.size} index buckets`);
}Create backups of index data:
// Export index data for backup
async function exportIndexData(cache) {
const entries = await cacache.ls(cache);
const indexData = {
timestamp: new Date().toISOString(),
entries: Object.entries(entries).map(([key, entry]) => ({
key,
integrity: entry.integrity,
size: entry.size,
time: entry.time,
metadata: entry.metadata
}))
};
return indexData;
}
// Restore index data from backup
async function restoreIndexData(cache, indexData) {
console.log(`Restoring ${indexData.entries.length} index entries`);
const results = await Promise.all(
indexData.entries.map(async (entryData) => {
try {
await cacache.index.insert(cache, entryData.key, entryData.integrity, {
metadata: entryData.metadata,
size: entryData.size,
time: entryData.time
});
return { success: true, key: entryData.key };
} catch (error) {
return { success: false, key: entryData.key, error: error.message };
}
})
);
const successful = results.filter(r => r.success).length;
console.log(`Restored ${successful} of ${indexData.entries.length} entries`);
return results;
}
// Usage
const backup = await exportIndexData('./cache');
await restoreIndexData('./new-cache', backup);// Process large indexes in chunks to manage memory
async function compactLargeIndex(cache, keys, chunkSize = 100) {
for (let i = 0; i < keys.length; i += chunkSize) {
const chunk = keys.slice(i, i + chunkSize);
await Promise.all(
chunk.map(key =>
cacache.index.compact(cache, key,
(oldEntry, newEntry) => oldEntry.key === newEntry.key
)
)
);
console.log(`Compacted chunk ${Math.floor(i / chunkSize) + 1}`);
// Allow event loop to process other tasks
await new Promise(resolve => setImmediate(resolve));
}
}Index management operations may encounter various errors:
try {
await cacache.index.compact('./cache', 'some-key', matchFn);
} catch (error) {
switch (error.code) {
case 'ENOENT':
console.log('Cache or index file does not exist');
break;
case 'EACCES':
console.error('Permission denied accessing index');
break;
case 'EMFILE':
console.error('Too many open files');
break;
default:
console.error('Unexpected error:', error);
}
}