or run

tessl search
Log in

Version

Workspace
tessl
Visibility
Public
Created
Last updated
Describes
npmpkg:npm/cache-manager@7.2.x

docs

examples

edge-cases.mdintegration-patterns.mdreal-world-scenarios.md
index.md
tile.json

tessl/npm-cache-manager

tessl install tessl/npm-cache-manager@7.2.0

Cache Manager for Node.js with support for multi-store caching, background refresh, and Keyv-compatible storage adapters

real-world-scenarios.mddocs/examples/

Real-World Scenarios

Production-ready implementation examples for common use cases.

Express.js API Caching

Basic Endpoint Caching

import express from 'express';
import { createCache } from 'cache-manager';

const app = express();
const cache = createCache({ ttl: 60000 });

app.get('/api/users/:id', async (req, res) => {
  try {
    const user = await cache.wrap(
      `user:${req.params.id}`,
      async () => {
        const result = await db.users.findById(req.params.id);
        if (!result) throw new Error('User not found');
        return result;
      },
      60000 // 60 second cache
    );
    
    res.json(user);
  } catch (error) {
    if (error.message === 'User not found') {
      res.status(404).json({ error: 'User not found' });
    } else {
      res.status(500).json({ error: 'Internal server error' });
    }
  }
});

app.listen(3000);

With Cache Headers

app.get('/api/products/:id', async (req, res) => {
  const cacheKey = `product:${req.params.id}`;
  
  const rawData = await cache.wrap(
    cacheKey,
    () => db.products.findById(req.params.id),
    { ttl: 300000, raw: true }
  );
  
  // Add cache headers
  const age = Math.floor((Date.now() - (rawData.expires - 300000)) / 1000);
  const maxAge = Math.floor((rawData.expires - Date.now()) / 1000);
  
  res.set({
    'Cache-Control': `public, max-age=${maxAge}`,
    'Age': age.toString(),
    'X-Cache': 'HIT',
  });
  
  res.json(rawData.value);
});

Invalidation on Update

app.put('/api/users/:id', async (req, res) => {
  const userId = req.params.id;
  
  // Update database
  const updated = await db.users.update(userId, req.body);
  
  // Invalidate cache
  await cache.del(`user:${userId}`);
  
  // Also invalidate related caches
  await cache.mdel([
    `user:${userId}:profile`,
    `user:${userId}:posts`,
    `user:${userId}:settings`,
  ]);
  
  res.json(updated);
});

GraphQL DataLoader Integration

Basic DataLoader with Cache

import DataLoader from 'dataloader';
import { createCache } from 'cache-manager';

const cache = createCache({ ttl: 60000 });

const userLoader = new DataLoader(async (ids: readonly number[]) => {
  // Check cache for each ID
  const cacheKeys = ids.map(id => `user:${id}`);
  const cached = await cache.mget(cacheKeys);
  
  // Find missing IDs
  const missingIndices = cached
    .map((val, idx) => val === undefined ? idx : null)
    .filter((idx): idx is number => idx !== null);
  
  if (missingIndices.length > 0) {
    // Fetch missing from database
    const missingIds = missingIndices.map(idx => ids[idx]);
    const users = await db.users.findByIds(missingIds);
    
    // Update cache
    await cache.mset(
      users.map(user => ({
        key: `user:${user.id}`,
        value: user,
        ttl: 60000,
      }))
    );
    
    // Merge with cached results
    missingIndices.forEach((idx, i) => {
      cached[idx] = users[i];
    });
  }
  
  return cached;
});

// GraphQL resolver
const resolvers = {
  Query: {
    user: (_, { id }) => userLoader.load(id),
  },
  Post: {
    author: (post) => userLoader.load(post.authorId),
  },
};

Metrics Collection

Comprehensive Metrics

import { createCache } from 'cache-manager';

const cache = createCache({ ttl: 60000 });

const metrics = {
  get: { total: 0, hits: 0, misses: 0, errors: 0, latency: [] as number[] },
  set: { total: 0, errors: 0, latency: [] as number[] },
  refresh: { total: 0, success: 0, failed: 0 },
};

cache.on('get', ({ key, value, error }) => {
  metrics.get.total++;
  if (error) {
    metrics.get.errors++;
  } else if (value !== undefined) {
    metrics.get.hits++;
  } else {
    metrics.get.misses++;
  }
});

cache.on('set', ({ error }) => {
  metrics.set.total++;
  if (error) metrics.set.errors++;
});

cache.on('refresh', ({ error }) => {
  metrics.refresh.total++;
  if (error) {
    metrics.refresh.failed++;
  } else {
    metrics.refresh.success++;
  }
});

// Expose metrics endpoint
app.get('/metrics', (req, res) => {
  const hitRate = metrics.get.total > 0 
    ? ((metrics.get.hits / (metrics.get.hits + metrics.get.misses)) * 100).toFixed(2)
    : '0.00';
  
  const errorRate = metrics.get.total > 0
    ? ((metrics.get.errors / metrics.get.total) * 100).toFixed(2)
    : '0.00';
  
  res.json({
    cache: {
      get: {
        ...metrics.get,
        hitRate: `${hitRate}%`,
        errorRate: `${errorRate}%`,
      },
      set: metrics.set,
      refresh: {
        ...metrics.refresh,
        successRate: metrics.refresh.total > 0
          ? `${((metrics.refresh.success / metrics.refresh.total) * 100).toFixed(2)}%`
          : '0.00%',
      },
    },
  });
});

Prometheus Integration

import { createCache } from 'cache-manager';
import { register, Counter, Histogram } from 'prom-client';

const cache = createCache({ ttl: 60000 });

// Metrics
const cacheHits = new Counter({
  name: 'cache_hits_total',
  help: 'Total number of cache hits',
  labelNames: ['operation'],
});

const cacheMisses = new Counter({
  name: 'cache_misses_total',
  help: 'Total number of cache misses',
});

const cacheErrors = new Counter({
  name: 'cache_errors_total',
  help: 'Total number of cache errors',
  labelNames: ['operation'],
});

const cacheOperationDuration = new Histogram({
  name: 'cache_operation_duration_seconds',
  help: 'Cache operation duration',
  labelNames: ['operation'],
  buckets: [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1],
});

cache.on('get', ({ value, error }) => {
  if (error) {
    cacheErrors.inc({ operation: 'get' });
  } else if (value !== undefined) {
    cacheHits.inc({ operation: 'get' });
  } else {
    cacheMisses.inc();
  }
});

// Prometheus endpoint
app.get('/prometheus', async (req, res) => {
  res.set('Content-Type', register.contentType);
  res.end(await register.metrics());
});

Session Management

User Session Cache

import { createCache } from 'cache-manager';
import { randomUUID } from 'crypto';

interface Session {
  userId: number;
  email: string;
  role: string;
  createdAt: number;
}

const sessionCache = createCache({
  ttl: 3600000, // 1 hour
});

async function createSession(userId: number): Promise<string> {
  const sessionId = randomUUID();
  
  const user = await db.users.findById(userId);
  
  const session: Session = {
    userId: user.id,
    email: user.email,
    role: user.role,
    createdAt: Date.now(),
  };
  
  await sessionCache.set(`session:${sessionId}`, session, 3600000);
  
  return sessionId;
}

async function getSession(sessionId: string): Promise<Session | null> {
  const session = await sessionCache.get<Session>(`session:${sessionId}`);
  return session || null;
}

async function deleteSession(sessionId: string): Promise<void> {
  await sessionCache.del(`session:${sessionId}`);
}

// Express middleware
function requireAuth(req, res, next) {
  const sessionId = req.cookies.sessionId;
  
  if (!sessionId) {
    return res.status(401).json({ error: 'Not authenticated' });
  }
  
  getSession(sessionId).then(session => {
    if (!session) {
      return res.status(401).json({ error: 'Invalid session' });
    }
    
    req.session = session;
    next();
  }).catch(next);
}

Rate Limiting

Simple Rate Limiter

import { createCache } from 'cache-manager';

const rateLimitCache = createCache();

async function checkRateLimit(
  key: string,
  limit: number,
  window: number
): Promise<boolean> {
  const current = await rateLimitCache.get<number>(key) || 0;
  
  if (current >= limit) {
    return false; // Rate limit exceeded
  }
  
  await rateLimitCache.set(key, current + 1, window);
  return true; // OK
}

// Express middleware
app.use('/api', async (req, res, next) => {
  const ip = req.ip;
  const key = `ratelimit:${ip}`;
  
  const allowed = await checkRateLimit(key, 100, 60000); // 100 requests per minute
  
  if (!allowed) {
    return res.status(429).json({ error: 'Too many requests' });
  }
  
  next();
});

Token Bucket Algorithm

interface TokenBucket {
  tokens: number;
  lastRefill: number;
}

async function checkTokenBucket(
  key: string,
  capacity: number,
  refillRate: number // tokens per second
): Promise<boolean> {
  const now = Date.now();
  let bucket = await rateLimitCache.get<TokenBucket>(key);
  
  if (!bucket) {
    bucket = { tokens: capacity, lastRefill: now };
  } else {
    // Refill tokens
    const timePassed = (now - bucket.lastRefill) / 1000;
    const tokensToAdd = timePassed * refillRate;
    bucket.tokens = Math.min(capacity, bucket.tokens + tokensToAdd);
    bucket.lastRefill = now;
  }
  
  if (bucket.tokens < 1) {
    await rateLimitCache.set(key, bucket, 3600000);
    return false; // No tokens available
  }
  
  bucket.tokens -= 1;
  await rateLimitCache.set(key, bucket, 3600000);
  return true;
}

Background Jobs

Job Result Caching

import { createCache } from 'cache-manager';

const jobCache = createCache({ ttl: 300000 }); // 5 minutes

async function processJob(jobId: string): Promise<any> {
  // Check if already processed
  const cached = await jobCache.get(`job:${jobId}`);
  if (cached) {
    return cached; // Return cached result
  }
  
  // Process job
  const result = await heavyComputation(jobId);
  
  // Cache result
  await jobCache.set(`job:${jobId}`, result, 300000);
  
  return result;
}

// Queue worker
async function processQueue() {
  while (true) {
    const job = await queue.pop();
    if (!job) {
      await sleep(1000);
      continue;
    }
    
    try {
      await processJob(job.id);
    } catch (error) {
      console.error(`Job ${job.id} failed:`, error);
      await queue.push(job); // Re-queue
    }
  }
}

Cache Warming

Startup Cache Warming

import { createCache } from 'cache-manager';

const cache = createCache({ ttl: 300000 });

async function warmCache() {
  console.log('Warming cache...');
  
  // Load critical configuration
  const config = await db.config.findAll();
  await cache.mset(
    config.map(item => ({
      key: `config:${item.key}`,
      value: item.value,
      ttl: 3600000, // 1 hour
    }))
  );
  
  // Load popular products
  const popularProducts = await db.products.findPopular(100);
  await cache.mset(
    popularProducts.map(product => ({
      key: `product:${product.id}`,
      value: product,
      ttl: 600000, // 10 minutes
    }))
  );
  
  console.log('Cache warmed');
}

// Warm on startup
warmCache().catch(console.error);

// Re-warm periodically
setInterval(warmCache, 3600000); // Every hour

Microservices Communication

Service-to-Service Caching

import { createCache } from 'cache-manager';
import fetch from 'node-fetch';

const serviceCache = createCache({ ttl: 60000 });

async function callUserService(userId: number): Promise<User> {
  return await serviceCache.wrap(
    `user-service:${userId}`,
    async () => {
      const response = await fetch(`http://user-service/api/users/${userId}`);
      if (!response.ok) {
        throw new Error(`User service error: ${response.status}`);
      }
      return await response.json();
    },
    60000,
    15000 // Background refresh at 15s remaining
  );
}

async function callProductService(productId: number): Promise<Product> {
  return await serviceCache.wrap(
    `product-service:${productId}`,
    async () => {
      const response = await fetch(`http://product-service/api/products/${productId}`);
      if (!response.ok) {
        throw new Error(`Product service error: ${response.status}`);
      }
      return await response.json();
    },
    300000,
    60000
  );
}

Testing Support

Mock Cache for Tests

import { createCache } from 'cache-manager';

// Test helper
function createMockCache() {
  const cache = createCache();
  
  // Track calls for assertions
  const calls = {
    get: [] as string[],
    set: [] as Array<{ key: string; value: any }>,
    del: [] as string[],
  };
  
  const originalGet = cache.get.bind(cache);
  const originalSet = cache.set.bind(cache);
  const originalDel = cache.del.bind(cache);
  
  cache.get = async (key: string) => {
    calls.get.push(key);
    return originalGet(key);
  };
  
  cache.set = async (key: string, value: any, ttl?: number) => {
    calls.set.push({ key, value });
    return originalSet(key, value, ttl);
  };
  
  cache.del = async (key: string) => {
    calls.del.push(key);
    return originalDel(key);
  };
  
  return { cache, calls };
}

// Usage in tests
describe('User Service', () => {
  it('should cache user fetches', async () => {
    const { cache, calls } = createMockCache();
    const service = new UserService(cache);
    
    await service.getUser(123);
    await service.getUser(123);
    
    expect(calls.get).toEqual(['user:123', 'user:123']);
    expect(calls.set).toHaveLength(1); // Only set once
  });
});

Next Steps

  • Edge Cases - Advanced scenarios and corner cases
  • Integration Patterns - More framework integrations
  • Core Operations Reference - Complete API documentation