tessl install tessl/npm-cache-manager@7.2.0Cache Manager for Node.js with support for multi-store caching, background refresh, and Keyv-compatible storage adapters
cache-manager provides a comprehensive event system for monitoring cache operations. All cache operations emit events that can be listened to for logging, metrics, debugging, or triggering side effects.
Add event listeners to monitor cache operations.
/**
* Register an event listener
* @param event - Event name to listen for
* @param listener - Callback function for the event
* @returns EventEmitter instance for chaining
*/
on<E extends keyof Events>(event: E, listener: Events[E]): EventEmitter;Behavioral Details:
refresh)Usage Examples:
import { createCache } from 'cache-manager';
const cache = createCache();
// Single listener
cache.on('get', ({ key, value }) => {
console.log(`Get: ${key} = ${value}`);
});
// Multiple listeners for same event
cache.on('set', ({ key, value }) => {
console.log(`Cached: ${key}`);
});
cache.on('set', ({ key, value }) => {
metrics.cacheWrites++;
});
// Chaining
cache
.on('get', logGet)
.on('set', logSet)
.on('del', logDel);Remove previously registered event listeners.
/**
* Remove an event listener
* @param event - Event name to stop listening for
* @param listener - Callback function to remove (must be same reference as registered)
* @returns EventEmitter instance for chaining
*/
off<E extends keyof Events>(event: E, listener: Events[E]): EventEmitter;Behavioral Details:
Usage Examples:
import { createCache } from 'cache-manager';
const cache = createCache();
// Define listener function (must be named, not inline)
const logGet = ({ key, value }: { key: string; value?: any }) => {
console.log(`Get: ${key} = ${value}`);
};
// Add listener
cache.on('get', logGet);
await cache.set('key', 'value');
await cache.get('key'); // Logs: "Get: key = value"
// Remove listener
cache.off('get', logGet);
await cache.get('key'); // No log output
// Removing non-existent listener (safe, no error)
cache.off('get', logGet); // No-opLifecycle Management:
import { createCache } from 'cache-manager';
const cache = createCache();
// Temporary listener
function setupTempListener() {
const tempListener = ({ key }: { key: string }) => {
console.log(`Temporary log: ${key}`);
};
cache.on('set', tempListener);
// Cleanup after 1 minute
setTimeout(() => {
cache.off('set', tempListener);
console.log('Temporary listener removed');
}, 60000);
}
// Request-scoped listener (Express.js example)
app.use((req, res, next) => {
const requestListener = ({ key, value }: any) => {
req.cacheLog = req.cacheLog || [];
req.cacheLog.push({ key, value });
};
cache.on('get', requestListener);
// Cleanup when response finishes
res.on('finish', () => {
cache.off('get', requestListener);
});
next();
});Fired when a get() operation completes (successful or failed).
type Events = {
get<T>(data: { key: string; value?: T; store?: string; error?: unknown }): void;
// ... other events
};Event Data:
key - The cache key that was retrieved (always present)value - The retrieved value (present for cache hits, undefined for cache misses)store - Store label indicating which store provided the value (in multi-store setups, e.g., "primary" or "secondary:0"). Only present for cache hits.error - Error object if operation failed (rare; get doesn't usually throw on miss)Usage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('get', ({ key, value, store, error }) => {
if (error) {
console.error(`Get failed for ${key}:`, error);
} else if (value !== undefined) {
console.log(`Cache hit for ${key} from ${store}:`, value);
} else {
console.log(`Cache miss for ${key}`);
}
});
await cache.set('user:123', { name: 'Alice' });
await cache.get('user:123'); // Triggers event with value
await cache.get('user:999'); // Triggers event with value: undefined (cache miss)Advanced Patterns:
import { createCache } from 'cache-manager';
const cache = createCache();
// Cache hit rate tracking
let hits = 0;
let misses = 0;
cache.on('get', ({ value }) => {
if (value !== undefined) {
hits++;
} else {
misses++;
}
});
function getHitRate() {
const total = hits + misses;
return total > 0 ? (hits / total) * 100 : 0;
}
// Store-specific metrics
const storeMetrics = new Map<string, { hits: number; misses: number }>();
cache.on('get', ({ key, value, store }) => {
if (!store) return; // Only track when store is known
if (!storeMetrics.has(store)) {
storeMetrics.set(store, { hits: 0, misses: 0 });
}
const metrics = storeMetrics.get(store)!;
if (value !== undefined) {
metrics.hits++;
} else {
metrics.misses++;
}
});
// Alert on specific key misses
const criticalKeys = new Set(['config:app', 'config:db']);
cache.on('get', ({ key, value }) => {
if (criticalKeys.has(key) && value === undefined) {
console.error(`ALERT: Critical key ${key} not in cache!`);
// sendAlert(`Cache miss on critical key: ${key}`);
}
});Fired when an mget() operation completes.
type Events = {
mget<T>(data: { keys: string[]; values?: T[]; error?: unknown }): void;
// ... other events
};Event Data:
keys - Array of cache keys that were retrieved (always present)values - Array of retrieved values if successful (same length as keys, with undefined for misses)error - Error object if operation failedUsage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('mget', ({ keys, values, error }) => {
if (error) {
console.error(`Multi-get failed for ${keys.join(', ')}:`, error);
} else if (values) {
const hitCount = values.filter(v => v !== undefined).length;
const missCount = values.filter(v => v === undefined).length;
console.log(`Retrieved ${keys.length} keys: ${hitCount} hits, ${missCount} misses`);
}
});
await cache.mset([
{ key: 'a', value: 1 },
{ key: 'b', value: 2 },
]);
await cache.mget(['a', 'b', 'c']);
// Triggers event with keys: ['a', 'b', 'c'] and values: [1, 2, undefined]Batch Analytics:
import { createCache } from 'cache-manager';
const cache = createCache();
// Track batch efficiency
const batchStats = {
totalRequests: 0,
totalKeys: 0,
totalHits: 0,
totalMisses: 0,
};
cache.on('mget', ({ keys, values }) => {
if (!values) return;
batchStats.totalRequests++;
batchStats.totalKeys += keys.length;
const hits = values.filter(v => v !== undefined).length;
const misses = values.filter(v => v === undefined).length;
batchStats.totalHits += hits;
batchStats.totalMisses += misses;
});
function getBatchStats() {
return {
...batchStats,
avgBatchSize: batchStats.totalKeys / batchStats.totalRequests || 0,
hitRate: batchStats.totalKeys > 0
? (batchStats.totalHits / batchStats.totalKeys) * 100
: 0,
};
}
// Large batch warning
cache.on('mget', ({ keys }) => {
if (keys.length > 1000) {
console.warn(`Large batch: ${keys.length} keys requested`);
}
});Fired when a set() operation completes (successful or failed).
type Events = {
set<T>(data: { key: string; value: T; store?: string; error?: unknown }): void;
// ... other events
};Event Data:
key - The cache key that was set (always present)value - The value that was stored (always present)store - Store label indicating which store received the value (in multi-store setups, e.g., "primary" or "secondary:0"). May fire multiple times for multi-store.error - Error object if operation failedUsage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('set', ({ key, value, store, error }) => {
if (error) {
console.error(`Set failed for ${key}:`, error);
} else {
console.log(`Cached ${key} in ${store}:`, value);
}
});
await cache.set('config:theme', 'dark');
// Triggers event with key: 'config:theme', value: 'dark'Advanced Patterns:
import { createCache } from 'cache-manager';
const cache = createCache();
// Track cache writes
let writeCount = 0;
let writtenBytes = 0;
cache.on('set', ({ key, value }) => {
writeCount++;
writtenBytes += JSON.stringify({ key, value }).length;
});
// Invalidation tracking
const invalidationLog: Array<{ key: string; timestamp: number }> = [];
cache.on('set', ({ key }) => {
invalidationLog.push({ key, timestamp: Date.now() });
// Keep only last 1000 invalidations
if (invalidationLog.length > 1000) {
invalidationLog.shift();
}
});
// Large value warning
cache.on('set', ({ key, value }) => {
const size = JSON.stringify(value).length;
if (size > 1024 * 1024) { // 1MB
console.warn(`Large value cached: ${key} (${(size / 1024 / 1024).toFixed(2)}MB)`);
}
});
// Duplicate write detection
const recentWrites = new Map<string, number>();
cache.on('set', ({ key }) => {
const lastWrite = recentWrites.get(key);
const now = Date.now();
if (lastWrite && now - lastWrite < 1000) {
console.warn(`Rapid re-write detected: ${key} (${now - lastWrite}ms since last write)`);
}
recentWrites.set(key, now);
// Cleanup old entries
if (recentWrites.size > 10000) {
const cutoff = now - 60000;
for (const [k, t] of recentWrites.entries()) {
if (t < cutoff) recentWrites.delete(k);
}
}
});
// Multi-store consistency monitoring
let primaryWrites = 0;
let secondaryWrites = 0;
cache.on('set', ({ store }) => {
if (store === 'primary') {
primaryWrites++;
} else if (store?.startsWith('secondary')) {
secondaryWrites++;
}
});
setInterval(() => {
if (primaryWrites !== secondaryWrites) {
console.warn(`Write inconsistency: primary=${primaryWrites}, secondary=${secondaryWrites}`);
}
}, 10000);Fired when an mset() operation completes.
type Events = {
mset<T>(data: {
list: Array<{ key: string; value: T; ttl?: number }>;
error?: unknown;
}): void;
// ... other events
};Event Data:
list - Array of items that were set (with keys, values, and TTLs) (always present)error - Error object if operation failedUsage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('mset', ({ list, error }) => {
if (error) {
console.error('Multi-set failed:', error);
} else {
console.log(`Cached ${list.length} items:`, list.map(i => i.key));
}
});
await cache.mset([
{ key: 'x', value: 10, ttl: 5000 },
{ key: 'y', value: 20 },
]);
// Triggers event with list containing both itemsBulk Operation Tracking:
import { createCache } from 'cache-manager';
const cache = createCache();
// Track bulk operations
const bulkStats = {
operations: 0,
totalItems: 0,
totalBytes: 0,
};
cache.on('mset', ({ list }) => {
bulkStats.operations++;
bulkStats.totalItems += list.length;
bulkStats.totalBytes += JSON.stringify(list).length;
});
// Efficiency metrics
function getBulkEfficiency() {
return {
avgItemsPerBatch: bulkStats.totalItems / bulkStats.operations || 0,
avgBatchSize: bulkStats.totalBytes / bulkStats.operations || 0,
};
}
// TTL distribution analysis
const ttlDistribution = {
none: 0,
short: 0, // < 1 minute
medium: 0, // 1-10 minutes
long: 0, // > 10 minutes
};
cache.on('mset', ({ list }) => {
for (const item of list) {
if (!item.ttl) {
ttlDistribution.none++;
} else if (item.ttl < 60000) {
ttlDistribution.short++;
} else if (item.ttl < 600000) {
ttlDistribution.medium++;
} else {
ttlDistribution.long++;
}
}
});Fired when a del() operation completes (successful or failed).
type Events = {
del(data: { key: string; error?: unknown }): void;
// ... other events
};Event Data:
key - The cache key that was deleted (always present)error - Error object if operation failedUsage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('del', ({ key, error }) => {
if (error) {
console.error(`Delete failed for ${key}:`, error);
} else {
console.log(`Deleted ${key} from cache`);
}
});
await cache.set('temp:data', 'value');
await cache.del('temp:data');
// Triggers event with key: 'temp:data'Invalidation Tracking:
import { createCache } from 'cache-manager';
const cache = createCache();
// Audit trail
const deletionLog: Array<{ key: string; timestamp: number; reason?: string }> = [];
cache.on('del', ({ key }) => {
deletionLog.push({
key,
timestamp: Date.now(),
// Add context if available (would need to pass through custom metadata)
});
// Persist audit log periodically
if (deletionLog.length % 100 === 0) {
persistAuditLog(deletionLog.slice(-100));
}
});
// Deletion rate monitoring
let deletionCount = 0;
let deletionRateWindow = Date.now();
cache.on('del', () => {
deletionCount++;
const now = Date.now();
if (now - deletionRateWindow > 60000) {
const rate = deletionCount / 60; // per second
console.log(`Deletion rate: ${rate.toFixed(2)}/s`);
if (rate > 100) {
console.warn('High deletion rate detected!');
}
deletionCount = 0;
deletionRateWindow = now;
}
});
// Pattern detection
const deletionPatterns = new Map<string, number>();
cache.on('del', ({ key }) => {
// Extract pattern (e.g., "user:*" from "user:123")
const pattern = key.split(':')[0];
deletionPatterns.set(pattern, (deletionPatterns.get(pattern) || 0) + 1);
});
function getTopDeletionPatterns(limit = 10) {
return Array.from(deletionPatterns.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, limit);
}Fired when an mdel() operation completes (successful or failed).
type Events = {
mdel(data: { keys: string[]; error?: unknown }): void;
// ... other events
};Event Data:
keys - Array of cache keys that were deleted (always present)error - Error object if operation failedUsage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('mdel', ({ keys, error }) => {
if (error) {
console.error(`Multi-delete failed for ${keys.join(', ')}:`, error);
} else {
console.log(`Deleted ${keys.length} keys from cache:`, keys);
}
});
await cache.mset([
{ key: 'temp:1', value: 'data1' },
{ key: 'temp:2', value: 'data2' },
{ key: 'temp:3', value: 'data3' },
]);
await cache.mdel(['temp:1', 'temp:2', 'temp:3']);
// Triggers event with keys: ['temp:1', 'temp:2', 'temp:3']Bulk Invalidation Monitoring:
import { createCache } from 'cache-manager';
const cache = createCache();
// Track bulk deletions
const bulkDeletions: Array<{ count: number; timestamp: number }> = [];
cache.on('mdel', ({ keys }) => {
bulkDeletions.push({
count: keys.length,
timestamp: Date.now(),
});
// Alert on large deletions
if (keys.length > 1000) {
console.warn(`Large bulk deletion: ${keys.length} keys`);
}
});
// Cleanup wave detection
function detectCleanupWaves() {
const now = Date.now();
const recentWindow = 60000; // 1 minute
const recentDeletions = bulkDeletions.filter(
d => now - d.timestamp < recentWindow
);
const totalDeleted = recentDeletions.reduce((sum, d) => sum + d.count, 0);
if (totalDeleted > 10000) {
console.warn(`Cleanup wave detected: ${totalDeleted} deletions in last minute`);
}
}Fired when a clear() operation completes (successful or failed).
type Events = {
clear(error?: unknown): void;
// ... other events
};Event Data:
error - Error object if operation failed (clear event has no other data)Usage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('clear', (error) => {
if (error) {
console.error('Clear failed:', error);
} else {
console.log('Cache cleared successfully');
}
});
await cache.set('key1', 'value1');
await cache.set('key2', 'value2');
await cache.clear();
// Triggers eventCache Reset Monitoring:
import { createCache } from 'cache-manager';
const cache = createCache();
// Track clear operations
const clearLog: Array<{ timestamp: number; initiator?: string }> = [];
cache.on('clear', (error) => {
if (error) {
console.error('Cache clear failed:', error);
// Alert administrators
return;
}
clearLog.push({
timestamp: Date.now(),
// Would need custom context to track initiator
});
console.warn('CACHE CLEARED - All entries removed');
// Alert if multiple clears in short time
const recentClears = clearLog.filter(
c => Date.now() - c.timestamp < 300000 // 5 minutes
);
if (recentClears.length > 3) {
console.error('ALERT: Multiple cache clears detected in short period!');
}
});
// Warm-up after clear
cache.on('clear', () => {
// Automatically repopulate critical data
setTimeout(async () => {
console.log('Re-warming cache after clear...');
await warmUpCache();
}, 1000);
});
async function warmUpCache() {
// Load critical data
const criticalData = await db.getCriticalConfig();
await cache.mset(
criticalData.map(item => ({
key: `config:${item.key}`,
value: item.value,
}))
);
}Fired when a background refresh operation completes (successful or failed). This event is triggered by the wrap() function when the refresh threshold is reached.
type Events = {
refresh<T>(data: { key: string; value: T; error?: unknown }): void;
// ... other events
};Event Data:
key - The cache key that was refreshed (always present)value - The new value from the refresh, or old value if error occurred (always present)error - Error object if refresh failed (old value remains cached if error)Usage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
let refreshCount = 0;
cache.on('refresh', ({ key, value, error }) => {
if (error) {
console.error(`Background refresh failed for ${key}:`, error);
// Old value remains in cache
} else {
refreshCount++;
console.log(`Background refresh succeeded for ${key}:`, value);
console.log(`Total refreshes: ${refreshCount}`);
}
});
async function fetchData() {
console.log('Fetching data...');
await new Promise(resolve => setTimeout(resolve, 100));
return { timestamp: Date.now() };
}
// Initial wrap - no refresh event
await cache.wrap('data', fetchData, 10000, 3000);
// Output: "Fetching data..."
// Wait until below threshold (7+ seconds)
await new Promise(resolve => setTimeout(resolve, 8000));
// This triggers background refresh
await cache.wrap('data', fetchData, 10000, 3000);
// Triggers refresh event after background execution completesRefresh Monitoring:
import { createCache } from 'cache-manager';
const cache = createCache();
// Track refresh operations
const refreshMetrics = {
total: 0,
success: 0,
failed: 0,
avgDuration: 0,
refreshTimes: [] as number[],
};
const refreshStartTimes = new Map<string, number>();
// Would need to track start time via custom implementation
cache.on('refresh', ({ key, error }) => {
refreshMetrics.total++;
if (error) {
refreshMetrics.failed++;
console.error(`Refresh failed for ${key}:`, error);
// Alert on high failure rate
const failureRate = refreshMetrics.failed / refreshMetrics.total;
if (failureRate > 0.1) { // 10%
console.error(`High refresh failure rate: ${(failureRate * 100).toFixed(2)}%`);
}
} else {
refreshMetrics.success++;
}
});
// Refresh performance tracking
cache.on('refresh', ({ key, value }) => {
if (typeof value === 'object' && value && 'timestamp' in value) {
const now = Date.now();
const generationTime = now - (value as any).timestamp;
refreshMetrics.refreshTimes.push(generationTime);
// Keep only last 100 refresh times
if (refreshMetrics.refreshTimes.length > 100) {
refreshMetrics.refreshTimes.shift();
}
// Calculate average
const sum = refreshMetrics.refreshTimes.reduce((a, b) => a + b, 0);
refreshMetrics.avgDuration = sum / refreshMetrics.refreshTimes.length;
}
});
// Proactive refresh vs reactive fetch comparison
let proactiveRefreshes = 0;
let reactiveFetches = 0;
cache.on('refresh', ({ key }) => {
proactiveRefreshes++;
console.log(`Proactive refresh: ${key} (${proactiveRefreshes} total)`);
});
// This would need to be tracked in wrap function when cache miss occurs
// For demonstration, showing the concept
function trackCacheMiss(key: string) {
reactiveFetches++;
console.log(`Reactive fetch: ${key} (${reactiveFetches} total)`);
}
function getRefreshEffectiveness() {
const total = proactiveRefreshes + reactiveFetches;
return {
proactiveRate: total > 0 ? (proactiveRefreshes / total) * 100 : 0,
reactiveRate: total > 0 ? (reactiveFetches / total) * 100 : 0,
};
}Refresh Error Handling:
import { createCache } from 'cache-manager';
const cache = createCache();
// Refresh failure tracking
const refreshFailures = new Map<string, number>();
cache.on('refresh', ({ key, error }) => {
if (error) {
const failures = refreshFailures.get(key) || 0;
refreshFailures.set(key, failures + 1);
// Alert on repeated failures
if (failures > 3) {
console.error(`ALERT: ${key} has failed ${failures} refresh attempts`);
// Consider disabling refresh for this key or alerting ops
}
} else {
// Reset failure count on success
refreshFailures.delete(key);
}
});
// Refresh timing analysis
cache.on('refresh', ({ key, value }) => {
console.log(`Refreshed ${key} in background - users not blocked`);
// Compare with synchronous fetch time (would need additional tracking)
// This demonstrates the benefit of background refresh
});Fired when a ttl() operation completes (successful or failed). This event is emitted when querying the expiration time of a cache key.
type Events = {
ttl(data: { key: string; value?: any; error?: unknown }): void;
// ... other events
};Event Data:
key - The cache key being queried (always present)value - The raw stored data with expiration timestamp if found (may be undefined)error - Error object if operation failedUsage Example:
import { createCache } from 'cache-manager';
const cache = createCache();
cache.on('ttl', ({ key, value, error }) => {
if (error) {
console.error(`TTL query failed for ${key}:`, error);
} else if (value) {
console.log(`TTL for ${key}:`, value);
} else {
console.log(`No TTL found for ${key} (key may not exist)`);
}
});
// Set a value with TTL
await cache.set('session:abc', { userId: 123 }, 60000);
// Query TTL - triggers event
const ttl = await cache.ttl('session:abc');
console.log(`Expires at: ${ttl}`);
// Query non-existent key - triggers event with no value
await cache.ttl('missing:key');TTL Monitoring:
import { createCache } from 'cache-manager';
const cache = createCache();
// Track TTL queries (may indicate inefficient code)
let ttlQueryCount = 0;
cache.on('ttl', ({ key }) => {
ttlQueryCount++;
if (ttlQueryCount % 100 === 0) {
console.warn(`High TTL query count: ${ttlQueryCount}`);
}
});
// Monitor keys approaching expiration
const expiringKeys: Array<{ key: string; expiresAt: number }> = [];
cache.on('ttl', ({ key, value }) => {
if (value && typeof value === 'number') {
const remaining = value - Date.now();
if (remaining > 0 && remaining < 60000) { // Less than 1 minute
expiringKeys.push({ key, expiresAt: value });
console.log(`${key} expires in ${Math.floor(remaining / 1000)}s`);
}
}
});type Events = {
get<T>(data: { key: string; value?: T; store?: string; error?: unknown }): void;
mget<T>(data: { keys: string[]; values?: T[]; error?: unknown }): void;
set<T>(data: { key: string; value: T; store?: string; error?: unknown }): void;
mset<T>(data: {
list: Array<{ key: string; value: T; ttl?: number }>;
error?: unknown;
}): void;
del(data: { key: string; error?: unknown }): void;
mdel(data: { keys: string[]; error?: unknown }): void;
clear(error?: unknown): void;
ttl(data: { key: string; value?: any; error?: unknown }): void;
refresh<T>(data: { key: string; value: T; error?: unknown }): void;
};import { createCache } from 'cache-manager';
const cache = createCache();
const metrics = {
get: { total: 0, hits: 0, misses: 0, errors: 0 },
set: { total: 0, errors: 0 },
del: { total: 0, errors: 0 },
clear: { total: 0, errors: 0 },
refresh: { total: 0, success: 0, failed: 0 },
};
cache.on('get', ({ key, value, error }) => {
metrics.get.total++;
if (error) {
metrics.get.errors++;
} else if (value !== undefined) {
metrics.get.hits++;
} else {
metrics.get.misses++;
}
});
cache.on('set', ({ error }) => {
metrics.set.total++;
if (error) metrics.set.errors++;
});
cache.on('del', ({ error }) => {
metrics.del.total++;
if (error) metrics.del.errors++;
});
cache.on('clear', (error) => {
metrics.clear.total++;
if (error) metrics.clear.errors++;
});
cache.on('refresh', ({ error }) => {
metrics.refresh.total++;
if (error) {
metrics.refresh.failed++;
} else {
metrics.refresh.success++;
}
});
// Expose metrics endpoint
function getMetrics() {
return {
...metrics,
hitRate: metrics.get.total > 0
? ((metrics.get.hits / (metrics.get.hits + metrics.get.misses)) * 100).toFixed(2) + '%'
: 'N/A',
refreshSuccessRate: metrics.refresh.total > 0
? ((metrics.refresh.success / metrics.refresh.total) * 100).toFixed(2) + '%'
: 'N/A',
};
}In multi-store setups, events include store information showing which tier handled the operation.
import { createCache } from 'cache-manager';
import { Keyv } from 'keyv';
import KeyvRedis from '@keyv/redis';
import { CacheableMemory } from 'cacheable';
const cache = createCache({
stores: [
new Keyv({ store: new CacheableMemory({ ttl: 60000 }) }),
new Keyv({ store: new KeyvRedis('redis://localhost:6379') }),
],
});
// Track which tier serves requests
const tierMetrics = {
memory: 0,
redis: 0,
miss: 0,
};
cache.on('get', ({ key, value, store }) => {
if (value !== undefined && store) {
if (store === 'primary') {
tierMetrics.memory++;
console.log(`Cache hit for ${key} from memory (L1)`);
} else if (store.startsWith('secondary')) {
tierMetrics.redis++;
console.log(`Cache hit for ${key} from Redis (L2)`);
}
} else if (value === undefined) {
tierMetrics.miss++;
console.log(`Cache miss for ${key}`);
}
});
// Monitor tier effectiveness
function getTierEffectiveness() {
const total = tierMetrics.memory + tierMetrics.redis + tierMetrics.miss;
return {
memoryHitRate: total > 0 ? (tierMetrics.memory / total) * 100 : 0,
redisHitRate: total > 0 ? (tierMetrics.redis / total) * 100 : 0,
overallHitRate: total > 0
? ((tierMetrics.memory + tierMetrics.redis) / total) * 100
: 0,
};
}
cache.on('set', ({ key, store }) => {
if (store) {
console.log(`Set ${key} in ${store}`);
}
});import { createCache } from 'cache-manager';
const cache = createCache();
let consecutiveErrors = 0;
const ERROR_THRESHOLD = 5;
let lastErrorTime = 0;
function handleCacheError(operation: string, key: string, error: unknown) {
consecutiveErrors++;
const now = Date.now();
console.error(`Cache ${operation} error for ${key} (${consecutiveErrors}):`, error);
// Alert on threshold
if (consecutiveErrors >= ERROR_THRESHOLD) {
console.error('ALERT: Cache errors exceeded threshold!');
// sendAlert('Cache system experiencing issues');
}
// Alert on error spike
if (now - lastErrorTime < 1000) {
console.error('ALERT: Rapid error spike detected!');
}
lastErrorTime = now;
}
function resetErrorCount() {
if (consecutiveErrors > 0) {
console.log(`Error streak ended after ${consecutiveErrors} errors`);
consecutiveErrors = 0;
}
}
cache.on('get', ({ key, error }) => {
if (error) {
handleCacheError('get', key, error);
} else {
resetErrorCount();
}
});
cache.on('set', ({ key, error }) => {
if (error) {
handleCacheError('set', key, error);
} else {
resetErrorCount();
}
});
cache.on('refresh', ({ key, error }) => {
if (error) {
handleCacheError('refresh', key, error);
} else {
resetErrorCount();
}
});
// Error rate monitoring
const errorWindow = {
startTime: Date.now(),
errorCount: 0,
totalOps: 0,
};
function trackOperation(hasError: boolean) {
errorWindow.totalOps++;
if (hasError) errorWindow.errorCount++;
const now = Date.now();
if (now - errorWindow.startTime > 60000) { // Every minute
const errorRate = errorWindow.totalOps > 0
? (errorWindow.errorCount / errorWindow.totalOps) * 100
: 0;
console.log(`Error rate: ${errorRate.toFixed(2)}%`);
if (errorRate > 5) {
console.error('ALERT: Error rate exceeds 5%!');
}
// Reset window
errorWindow.startTime = now;
errorWindow.errorCount = 0;
errorWindow.totalOps = 0;
}
}
cache.on('get', ({ error }) => trackOperation(!!error));
cache.on('set', ({ error }) => trackOperation(!!error));import { createCache } from 'cache-manager';
const cache = createCache();
// Define listener function
const logGet = ({ key, value }: { key: string; value?: any }) => {
console.log(`Get: ${key} = ${value}`);
};
// Add listener
cache.on('get', logGet);
await cache.set('key', 'value');
await cache.get('key'); // Logs: "Get: key = value"
// Remove listener
cache.off('get', logGet);
await cache.get('key'); // No log outputSince on() and off() return the EventEmitter, you can chain multiple listener registrations.
import { createCache } from 'cache-manager';
const cache = createCache();
cache
.on('set', ({ key, value }) => {
console.log(`Set ${key}:`, value);
})
.on('get', ({ key, value }) => {
console.log(`Get ${key}:`, value);
})
.on('del', ({ key }) => {
console.log(`Delete ${key}`);
})
.on('refresh', ({ key, error }) => {
if (error) {
console.error(`Refresh failed for ${key}:`, error);
} else {
console.log(`Refreshed ${key}`);
}
});import { createCache } from 'cache-manager';
const cache = createCache();
// Measure event handler overhead
const handlerTimes: number[] = [];
cache.on('get', ({ key }) => {
const start = performance.now();
// Your logging/metrics code
console.log(`Get: ${key}`);
const duration = performance.now() - start;
handlerTimes.push(duration);
// Keep last 1000 measurements
if (handlerTimes.length > 1000) {
handlerTimes.shift();
}
});
function getHandlerOverhead() {
if (handlerTimes.length === 0) return 0;
const avg = handlerTimes.reduce((a, b) => a + b, 0) / handlerTimes.length;
const max = Math.max(...handlerTimes);
return { avg, max };
}
// Warn if handlers are slow
setInterval(() => {
const overhead = getHandlerOverhead();
if (overhead.avg > 1) { // 1ms average
console.warn(`Event handlers are slow: ${overhead.avg.toFixed(2)}ms avg, ${overhead.max.toFixed(2)}ms max`);
}
}, 60000);Events are emitted at specific points during operations:
In multi-store configurations:
get and set events may include a store field indicating which store handled the operation ("primary" for first store, "secondary:N" for subsequent stores)store field to track which tier handled each operation