or run

tessl search
Log in

Version

Workspace
tessl
Visibility
Public
Created
Last updated
Describes
npmpkg:npm/cache-manager@7.2.x

docs

examples

edge-cases.mdintegration-patterns.mdreal-world-scenarios.md
index.md
tile.json

tessl/npm-cache-manager

tessl install tessl/npm-cache-manager@7.2.0

Cache Manager for Node.js with support for multi-store caching, background refresh, and Keyv-compatible storage adapters

integration-patterns.mddocs/examples/

Integration Patterns

Framework-specific integration examples and deployment patterns.

Next.js Integration

API Routes Caching

// pages/api/users/[id].ts
import type { NextApiRequest, NextApiResponse } from 'next';
import { createCache } from 'cache-manager';

const cache = createCache({ ttl: 60000 });

export default async function handler(
  req: NextApiRequest,
  res: NextApiResponse
) {
  const { id } = req.query;
  
  try {
    const user = await cache.wrap(
      `user:${id}`,
      async () => {
        const response = await fetch(`${process.env.API_URL}/users/${id}`);
        return await response.json();
      },
      60000
    );
    
    res.setHeader('Cache-Control', 'public, max-age=60');
    res.status(200).json(user);
  } catch (error) {
    res.status(500).json({ error: 'Failed to fetch user' });
  }
}

Server-Side Rendering with Cache

// pages/products/[id].tsx
import { GetServerSideProps } from 'next';
import { createCache } from 'cache-manager';

const cache = createCache({ ttl: 300000 });

export const getServerSideProps: GetServerSideProps = async (context) => {
  const { id } = context.params!;
  
  const product = await cache.wrap(
    `product:${id}`,
    async () => {
      return await db.products.findById(id as string);
    },
    300000
  );
  
  if (!product) {
    return { notFound: true };
  }
  
  return {
    props: { product },
  };
};

export default function ProductPage({ product }) {
  return <div>{product.name}</div>;
}

NestJS Integration

Injectable Cache Service

// cache.service.ts
import { Injectable, OnModuleDestroy } from '@nestjs/common';
import { createCache, Cache } from 'cache-manager';

@Injectable()
export class CacheService implements OnModuleDestroy {
  private readonly cache: Cache;
  
  constructor() {
    this.cache = createCache({
      ttl: 300000, // 5 minutes
    });
  }
  
  async get<T>(key: string): Promise<T | undefined> {
    return await this.cache.get<T>(key);
  }
  
  async set<T>(key: string, value: T, ttl?: number): Promise<T> {
    return await this.cache.set(key, value, ttl);
  }
  
  async wrap<T>(
    key: string,
    fn: () => Promise<T>,
    ttl?: number
  ): Promise<T> {
    return await this.cache.wrap(key, fn, ttl);
  }
  
  async onModuleDestroy() {
    await this.cache.disconnect();
  }
}

Controller with Caching

// users.controller.ts
import { Controller, Get, Param } from '@nestjs/common';
import { CacheService } from './cache.service';

@Controller('users')
export class UsersController {
  constructor(private readonly cacheService: CacheService) {}
  
  @Get(':id')
  async getUser(@Param('id') id: string) {
    return await this.cacheService.wrap(
      `user:${id}`,
      () => this.fetchUser(id),
      60000
    );
  }
  
  private async fetchUser(id: string) {
    // Database query
    return await this.usersRepository.findOne(id);
  }
}

Fastify Integration

Plugin Registration

import Fastify from 'fastify';
import { createCache } from 'cache-manager';

const fastify = Fastify();

// Register cache as decorator
const cache = createCache({ ttl: 60000 });
fastify.decorate('cache', cache);

// Add TypeScript types
declare module 'fastify' {
  interface FastifyInstance {
    cache: Cache;
  }
}

// Use in routes
fastify.get('/users/:id', async (request, reply) => {
  const { id } = request.params as { id: string };
  
  const user = await fastify.cache.wrap(
    `user:${id}`,
    async () => {
      return await db.users.findById(id);
    },
    60000
  );
  
  return user;
});

// Cleanup on close
fastify.addHook('onClose', async () => {
  await cache.disconnect();
});

Koa Integration

Middleware Pattern

import Koa from 'koa';
import { createCache } from 'cache-manager';

const app = new Koa();
const cache = createCache({ ttl: 60000 });

// Add cache to context
app.context.cache = cache;

// Cache middleware
app.use(async (ctx, next) => {
  const cacheKey = `route:${ctx.path}:${JSON.stringify(ctx.query)}`;
  
  // Try cache
  const cached = await ctx.cache.get(cacheKey);
  if (cached) {
    ctx.body = cached;
    ctx.set('X-Cache', 'HIT');
    return;
  }
  
  // Execute route
  await next();
  
  // Cache response
  if (ctx.status === 200 && ctx.body) {
    await ctx.cache.set(cacheKey, ctx.body, 60000);
    ctx.set('X-Cache', 'MISS');
  }
});

// Routes
app.use(async (ctx) => {
  ctx.body = { message: 'Hello World' };
});

Apollo GraphQL Integration

DataSource with Caching

import { DataSource } from 'apollo-datasource';
import { createCache } from 'cache-manager';

class UserDataSource extends DataSource {
  private cache = createCache({ ttl: 60000 });
  
  async getUser(id: number) {
    return await this.cache.wrap(
      `user:${id}`,
      () => this.fetchUser(id),
      60000
    );
  }
  
  private async fetchUser(id: number) {
    const response = await fetch(`/api/users/${id}`);
    return await response.json();
  }
  
  async invalidateUser(id: number) {
    await this.cache.del(`user:${id}`);
  }
}

// Apollo Server setup
const server = new ApolloServer({
  typeDefs,
  resolvers,
  dataSources: () => ({
    users: new UserDataSource(),
  }),
});

tRPC Integration

Procedure with Caching

import { initTRPC } from '@trpc/server';
import { createCache } from 'cache-manager';

const t = initTRPC.create();
const cache = createCache({ ttl: 60000 });

const appRouter = t.router({
  getUser: t.procedure
    .input(z.object({ id: z.number() }))
    .query(async ({ input }) => {
      return await cache.wrap(
        `user:${input.id}`,
        async () => {
          return await db.users.findById(input.id);
        },
        60000
      );
    }),
  
  updateUser: t.procedure
    .input(z.object({ id: z.number(), data: z.any() }))
    .mutation(async ({ input }) => {
      const updated = await db.users.update(input.id, input.data);
      
      // Invalidate cache
      await cache.del(`user:${input.id}`);
      
      return updated;
    }),
});

Bull Queue Integration

Job Results Caching

import Queue from 'bull';
import { createCache } from 'cache-manager';

const jobQueue = new Queue('jobs', 'redis://localhost:6379');
const resultCache = createCache({ ttl: 600000 }); // 10 minutes

// Process jobs
jobQueue.process(async (job) => {
  const { id } = job.data;
  
  // Check cache first
  const cached = await resultCache.get(`job:${id}`);
  if (cached) {
    return cached;
  }
  
  // Process job
  const result = await processJob(job.data);
  
  // Cache result
  await resultCache.set(`job:${id}`, result, 600000);
  
  return result;
});

// Submit job and wait for result
async function submitAndWait(data: any) {
  const job = await jobQueue.add(data);
  const result = await job.finished();
  return result;
}

Socket.IO Integration

Room State Caching

import { Server } from 'socket.io';
import { createCache } from 'cache-manager';

const io = new Server(3000);
const roomCache = createCache({ ttl: 300000 });

io.on('connection', (socket) => {
  socket.on('join-room', async (roomId) => {
    await socket.join(roomId);
    
    // Get cached room state
    let roomState = await roomCache.get(`room:${roomId}`);
    
    if (!roomState) {
      // Load from DB
      roomState = await db.rooms.findById(roomId);
      await roomCache.set(`room:${roomId}`, roomState, 300000);
    }
    
    socket.emit('room-state', roomState);
  });
  
  socket.on('update-room', async (roomId, updates) => {
    // Update DB
    await db.rooms.update(roomId, updates);
    
    // Invalidate cache
    await roomCache.del(`room:${roomId}`);
    
    // Broadcast to room
    io.to(roomId).emit('room-updated', updates);
  });
});

Docker Compose Setup

Multi-Container with Redis

# docker-compose.yml
version: '3.8'
services:
  app:
    build: .
    ports:
      - "3000:3000"
    environment:
      - REDIS_URL=redis://redis:6379
    depends_on:
      - redis
  
  redis:
    image: redis:7-alpine
    ports:
      - "6379:6379"
    volumes:
      - redis_data:/data

volumes:
  redis_data:

Application Configuration

// config.ts
import { createCache } from 'cache-manager';
import { Keyv } from 'keyv';
import KeyvRedis from '@keyv/redis';
import { CacheableMemory } from 'cacheable';

const redisUrl = process.env.REDIS_URL || 'redis://localhost:6379';

export const cache = createCache({
  stores: [
    new Keyv({ 
      store: new CacheableMemory({ ttl: 60000, lruSize: 1000 })
    }),
    new Keyv({ 
      store: new KeyvRedis(redisUrl)
    }),
  ],
  ttl: 300000,
});

// Graceful shutdown
process.on('SIGTERM', async () => {
  console.log('Disconnecting cache...');
  await cache.disconnect();
  process.exit(0);
});

Kubernetes Deployment

Deployment with Redis StatefulSet

# k8s/redis.yaml
apiVersion: apps/v1
kind: StatefulSet
metadata:
  name: redis
spec:
  serviceName: redis
  replicas: 1
  selector:
    matchLabels:
      app: redis
  template:
    metadata:
      labels:
        app: redis
    spec:
      containers:
      - name: redis
        image: redis:7-alpine
        ports:
        - containerPort: 6379
        volumeMounts:
        - name: redis-data
          mountPath: /data
  volumeClaimTemplates:
  - metadata:
      name: redis-data
    spec:
      accessModes: ["ReadWriteOnce"]
      resources:
        requests:
          storage: 10Gi
---
apiVersion: v1
kind: Service
metadata:
  name: redis
spec:
  ports:
  - port: 6379
  clusterIP: None
  selector:
    app: redis

Application Deployment

# k8s/app.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
  name: app
spec:
  replicas: 3
  selector:
    matchLabels:
      app: myapp
  template:
    metadata:
      labels:
        app: myapp
    spec:
      containers:
      - name: app
        image: myapp:latest
        env:
        - name: REDIS_URL
          value: "redis://redis:6379"
        ports:
        - containerPort: 3000

AWS Lambda Integration

Lambda with Elasticache

import { createCache } from 'cache-manager';
import { Keyv } from 'keyv';
import KeyvRedis from '@keyv/redis';

// Reuse connection across Lambda invocations
let cache: Cache | null = null;

function getCache() {
  if (!cache) {
    cache = createCache({
      stores: [
        new Keyv({
          store: new KeyvRedis(process.env.REDIS_URL!),
        }),
      ],
      ttl: 300000,
    });
  }
  return cache;
}

export const handler = async (event: any) => {
  const cache = getCache();
  
  const result = await cache.wrap(
    `lambda:${event.id}`,
    async () => {
      return await processEvent(event);
    },
    60000
  );
  
  return {
    statusCode: 200,
    body: JSON.stringify(result),
  };
};

Testing Patterns

Jest with In-Memory Cache

import { createCache } from 'cache-manager';

describe('UserService', () => {
  let cache: Cache;
  let service: UserService;
  
  beforeEach(() => {
    // Fresh cache for each test
    cache = createCache();
    service = new UserService(cache);
  });
  
  afterEach(async () => {
    await cache.clear();
  });
  
  it('should cache user lookups', async () => {
    const user = { id: 1, name: 'Alice' };
    jest.spyOn(db.users, 'findById').mockResolvedValue(user);
    
    // First call - hits database
    const result1 = await service.getUser(1);
    expect(db.users.findById).toHaveBeenCalledTimes(1);
    
    // Second call - uses cache
    const result2 = await service.getUser(1);
    expect(db.users.findById).toHaveBeenCalledTimes(1);
    expect(result2).toEqual(user);
  });
});

Next Steps

  • Real-World Scenarios - More production examples
  • Quick Start Guide - Getting started
  • Core Operations Reference - Complete API