A high-performance Least Recently Used (LRU) cache implementation for JavaScript and TypeScript applications.
npx @tessl/cli install tessl/npm-lru-cache@11.2.0LRU Cache is a high-performance Least Recently Used (LRU) cache implementation for JavaScript and TypeScript applications. It maintains a cache that automatically evicts the least recently used items when capacity limits are reached, supporting flexible configuration including maximum item count, total storage size limits, and time-to-live (TTL) expiration.
npm install lru-cacheimport { LRUCache } from "lru-cache";For CommonJS:
const { LRUCache } = require("lru-cache");For minified build:
import { LRUCache } from "lru-cache/min";For browser use (via CDN):
import { LRUCache } from "http://unpkg.com/lru-cache@11/dist/mjs/index.min.mjs";import { LRUCache } from "lru-cache";
// Create cache with max entries limit
const cache = new LRUCache<string, any>({ max: 500 });
// Basic operations
cache.set("key", "value");
const value = cache.get("key"); // "value"
const exists = cache.has("key"); // true
cache.delete("key");
cache.clear();
// TTL-based caching
const ttlCache = new LRUCache<string, any>({
max: 100,
ttl: 1000 * 60 * 5, // 5 minutes
});
ttlCache.set("session", { userId: 123 });LRU Cache is built around several key components:
LRUCache<K, V, FC> class with efficient doubly-linked list for LRU trackingEssential cache operations for storing, retrieving, and managing cache entries.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
constructor(options: LRUCache.Options<K, V, FC>);
/** Add or update a cache entry */
set(key: K, value: V | undefined, options?: LRUCache.SetOptions<K, V, FC>): this;
/** Retrieve a cache entry, updating its recency */
get(key: K, options?: LRUCache.GetOptions<K, V, FC>): V | undefined;
/** Check if a key exists without updating recency */
has(key: K, options?: LRUCache.HasOptions<K, V, FC>): boolean;
/** Remove a cache entry */
delete(key: K): boolean;
/** Remove all cache entries */
clear(): void;
/** Get a value without updating recency or deleting stale items */
peek(key: K, options?: LRUCache.PeekOptions<K, V, FC>): V | undefined;
/** Remove and return the least recently used item */
pop(): V | undefined;
}Access cache state and configuration.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
/** Maximum number of entries (read-only) */
readonly max: LRUCache.Count;
/** Maximum total calculated size (read-only) */
readonly maxSize: LRUCache.Size;
/** Current number of entries (read-only) */
readonly size: LRUCache.Count;
/** Current total calculated size (read-only) */
readonly calculatedSize: LRUCache.Size;
/** Fetch method function (read-only) */
readonly fetchMethod?: LRUCache.Fetcher<K, V, FC>;
/** Memoization method function (read-only) */
readonly memoMethod?: LRUCache.Memoizer<K, V, FC>;
/** Disposal callback function (read-only) */
readonly dispose?: LRUCache.Disposer<K, V>;
/** Post-disposal callback function (read-only) */
readonly disposeAfter?: LRUCache.Disposer<K, V>;
/** Item insertion callback function (read-only) */
readonly onInsert?: LRUCache.Inserter<K, V>;
/** Performance timer object (read-only) */
readonly perf: Perf;
}Configure and manage time-based cache expiration.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
/** Default TTL in milliseconds */
ttl: LRUCache.Milliseconds;
/** TTL resolution in milliseconds */
ttlResolution: LRUCache.Milliseconds;
/** Automatically purge expired items */
ttlAutopurge: boolean;
/** Reset age on get operations */
updateAgeOnGet: boolean;
/** Reset age on has operations */
updateAgeOnHas: boolean;
/** Allow returning stale values */
allowStale: boolean;
/** Don't delete stale items on get */
noDeleteOnStaleGet: boolean;
/** Get remaining TTL for a key in milliseconds */
getRemainingTTL(key: K): LRUCache.Milliseconds;
/** Remove all stale entries */
purgeStale(): boolean;
}Control cache size and entry size calculations.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
/** Maximum size allowed for individual entries */
maxEntrySize: LRUCache.Size;
/** Function to calculate entry sizes */
sizeCalculation?: LRUCache.SizeCalculator<K, V>;
/** Skip dispose callbacks on set operations */
noDisposeOnSet: boolean;
/** Don't update TTL on existing entries */
noUpdateTTL: boolean;
}Iterate over cache entries in various orders.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
/** Iterator for [key, value] pairs (most to least recent) */
entries(): IterableIterator<[K, V]>;
/** Iterator for [key, value] pairs (least to most recent) */
rentries(): IterableIterator<[K, V]>;
/** Iterator for keys (most to least recent) */
keys(): IterableIterator<K>;
/** Iterator for keys (least to most recent) */
rkeys(): IterableIterator<K>;
/** Iterator for values (most to least recent) */
values(): IterableIterator<V>;
/** Iterator for values (least to most recent) */
rvalues(): IterableIterator<V>;
/** Default iterator (same as entries()) */
[Symbol.iterator](): IterableIterator<[K, V]>;
/** Apply callback to each entry (most to least recent) */
forEach(fn: (value: V, key: K, cache: this) => any, thisArg?: any): void;
/** Apply callback to each entry (least to most recent) */
rforEach(fn: (value: V, key: K, cache: this) => any, thisArg?: any): void;
/** Find first entry matching predicate */
find(fn: (value: V, key: K, cache: this) => boolean, options?: LRUCache.GetOptions<K, V, FC>): V | undefined;
}Asynchronous cache operations with stale-while-revalidate patterns.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
/** Async fetch with stale-while-revalidate support */
fetch(key: K, options?: LRUCache.FetchOptions<K, V, FC>): Promise<V | undefined>;
/** Fetch that throws if result is undefined */
forceFetch(key: K, options?: LRUCache.FetchOptions<K, V, FC>): Promise<V>;
/** Fetch that throws if result is undefined */
forceFetch(key: K, options?: LRUCache.FetchOptions<K, V, FC>): Promise<V>;
/** Memoization with computation function */
memo(key: K, options?: LRUCache.MemoOptions<K, V, FC>): V;
/** Control fetch rejection behavior */
noDeleteOnFetchRejection: boolean;
/** Allow stale values on fetch rejection */
allowStaleOnFetchRejection: boolean;
/** Allow stale values on fetch abort */
allowStaleOnFetchAbort: boolean;
/** Ignore fetch abort signals */
ignoreFetchAbort: boolean;
}Export and import cache state for persistence.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
/** Export cache entries for serialization */
dump(): [K, LRUCache.Entry<V>][];
/** Import cache entries from serialization */
load(entries: [K, LRUCache.Entry<V>][]): void;
/** Get detailed information about a cache entry */
info(key: K): LRUCache.Entry<V> | undefined;
}Advanced cache management and debugging features.
class LRUCache<K extends {}, V extends {}, FC = unknown> {
/** Default iterator (same as entries()) */
[Symbol.iterator](): IterableIterator<[K, V]>;
/** String representation for debugging */
readonly [Symbol.toStringTag]: string;
/** Expose internal cache structure for debugging (unsafe) */
static unsafeExposeInternals<K extends {}, V extends {}, FC = unknown>(
cache: LRUCache<K, V, FC>
): object;
}interface LRUCache.OptionsBase<K, V, FC> {
/** Maximum number of items (at least one of max, maxSize, or ttl required) */
max?: LRUCache.Count;
/** Maximum total calculated size of all items */
maxSize?: LRUCache.Size;
/** Maximum size allowed for any single item */
maxEntrySize?: LRUCache.Size;
/** Time to live in milliseconds (at least one of max, maxSize, or ttl required) */
ttl?: LRUCache.Milliseconds;
/** Minimum time between staleness checks in milliseconds */
ttlResolution?: LRUCache.Milliseconds;
/** Automatically remove stale items */
ttlAutopurge?: boolean;
/** Reset item age when accessed with get() */
updateAgeOnGet?: boolean;
/** Reset item age when checked with has() */
updateAgeOnHas?: boolean;
/** Allow get() and fetch() to return stale values */
allowStale?: boolean;
/** Don't delete stale items when retrieved with get() */
noDeleteOnStaleGet?: boolean;
/** Function to calculate item sizes */
sizeCalculation?: LRUCache.SizeCalculator<K, V>;
/** Function called when items are removed from cache */
dispose?: LRUCache.Disposer<K, V>;
/** Function called after items are completely removed from cache */
disposeAfter?: LRUCache.Disposer<K, V>;
/** Function called when items are added to cache */
onInsert?: LRUCache.Inserter<K, V>;
/** Skip dispose function when overwriting existing values */
noDisposeOnSet?: boolean;
/** Don't update TTL when overwriting existing values */
noUpdateTTL?: boolean;
/** Async function for fetch() operations */
fetchMethod?: LRUCache.Fetcher<K, V, FC>;
/** Function for memo() operations */
memoMethod?: LRUCache.Memoizer<K, V, FC>;
/** Don't delete stale items when fetchMethod fails */
noDeleteOnFetchRejection?: boolean;
/** Return stale values when fetchMethod fails */
allowStaleOnFetchRejection?: boolean;
/** Return stale values when fetch is aborted */
allowStaleOnFetchAbort?: boolean;
/** Continue fetch operations despite abort signals */
ignoreFetchAbort?: boolean;
/** Custom performance timer object */
perf?: Perf;
}
type LRUCache.Options<K, V, FC> =
| LRUCache.OptionsMaxLimit<K, V, FC>
| LRUCache.OptionsSizeLimit<K, V, FC>
| LRUCache.OptionsTTLLimit<K, V, FC>;
interface LRUCache.OptionsMaxLimit<K, V, FC> extends LRUCache.OptionsBase<K, V, FC> {
max: LRUCache.Count;
}
interface LRUCache.OptionsSizeLimit<K, V, FC> extends LRUCache.OptionsBase<K, V, FC> {
maxSize: LRUCache.Size;
}
interface LRUCache.OptionsTTLLimit<K, V, FC> extends LRUCache.OptionsBase<K, V, FC> {
ttl: LRUCache.Milliseconds;
ttlAutopurge: boolean;
}interface LRUCache.GetOptions<K, V, FC> {
allowStale?: boolean;
updateAgeOnGet?: boolean;
noDeleteOnStaleGet?: boolean;
status?: LRUCache.Status<V>;
}
interface LRUCache.SetOptions<K, V, FC> {
ttl?: LRUCache.Milliseconds;
start?: LRUCache.Milliseconds;
size?: LRUCache.Size;
sizeCalculation?: LRUCache.SizeCalculator<K, V>;
noDisposeOnSet?: boolean;
noUpdateTTL?: boolean;
status?: LRUCache.Status<V>;
}
interface LRUCache.HasOptions<K, V, FC> {
updateAgeOnHas?: boolean;
status?: LRUCache.Status<V>;
}
interface LRUCache.PeekOptions<K, V, FC> {
allowStale?: boolean;
}
interface LRUCache.FetchOptions<K, V, FC> extends LRUCache.GetOptions<K, V, FC>, LRUCache.SetOptions<K, V, FC> {
forceRefresh?: boolean;
signal?: AbortSignal;
context?: FC;
noDeleteOnFetchRejection?: boolean;
allowStaleOnFetchRejection?: boolean;
allowStaleOnFetchAbort?: boolean;
ignoreFetchAbort?: boolean;
}
interface LRUCache.MemoOptions<K, V, FC> {
forceRefresh?: boolean;
context?: FC;
allowStale?: boolean;
updateAgeOnGet?: boolean;
noDeleteOnStaleGet?: boolean;
sizeCalculation?: LRUCache.SizeCalculator<K, V>;
ttl?: LRUCache.Milliseconds;
noDisposeOnSet?: boolean;
noUpdateTTL?: boolean;
noDeleteOnFetchRejection?: boolean;
allowStaleOnFetchRejection?: boolean;
ignoreFetchAbort?: boolean;
allowStaleOnFetchAbort?: boolean;
status?: LRUCache.Status<V>;
}type LRUCache.Disposer<K, V> = (
value: V,
key: K,
reason: LRUCache.DisposeReason
) => void;
type LRUCache.Inserter<K, V> = (
value: V,
key: K,
reason: LRUCache.InsertReason
) => void;
type LRUCache.SizeCalculator<K, V> = (value: V, key: K) => LRUCache.Size;
type LRUCache.Fetcher<K, V, FC = unknown> = (
key: K,
staleValue: V | undefined,
options: LRUCache.FetcherOptions<K, V, FC>
) => Promise<V | undefined | void> | V | undefined | void;
type LRUCache.Memoizer<K, V, FC = unknown> = (
key: K,
staleValue: V | undefined,
options: LRUCache.MemoizerOptions<K, V, FC>
) => V;interface LRUCache.FetcherOptions<K, V, FC = unknown> {
signal: AbortSignal;
options: LRUCache.FetcherFetchOptions<K, V, FC>;
context: FC;
}
interface LRUCache.FetcherFetchOptions<K, V, FC = unknown> {
allowStale?: boolean;
updateAgeOnGet?: boolean;
noDeleteOnStaleGet?: boolean;
sizeCalculation?: LRUCache.SizeCalculator<K, V>;
ttl?: LRUCache.Milliseconds;
noDisposeOnSet?: boolean;
noUpdateTTL?: boolean;
noDeleteOnFetchRejection?: boolean;
allowStaleOnFetchRejection?: boolean;
ignoreFetchAbort?: boolean;
allowStaleOnFetchAbort?: boolean;
status?: LRUCache.Status<V>;
size?: LRUCache.Size;
}
interface LRUCache.MemoizerOptions<K, V, FC = unknown> {
options: LRUCache.MemoizerMemoOptions<K, V, FC>;
context: FC;
}
interface LRUCache.MemoizerMemoOptions<K, V, FC = unknown> {
allowStale?: boolean;
updateAgeOnGet?: boolean;
noDeleteOnStaleGet?: boolean;
sizeCalculation?: LRUCache.SizeCalculator<K, V>;
ttl?: LRUCache.Milliseconds;
noDisposeOnSet?: boolean;
noUpdateTTL?: boolean;
status?: LRUCache.Status<V>;
size?: LRUCache.Size;
start?: LRUCache.Milliseconds;
}type LRUCache.Size = number;
type LRUCache.Milliseconds = number;
type LRUCache.Count = number;
type LRUCache.DisposeReason = 'evict' | 'set' | 'delete' | 'expire' | 'fetch';
type LRUCache.InsertReason = 'add' | 'update' | 'replace';interface LRUCache.Status<V> {
set?: 'add' | 'update' | 'replace' | 'miss';
has?: 'hit' | 'stale' | 'miss';
get?: 'stale' | 'hit' | 'miss';
fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh';
ttl?: LRUCache.Milliseconds;
start?: LRUCache.Milliseconds;
now?: LRUCache.Milliseconds;
remainingTTL?: LRUCache.Milliseconds;
entrySize?: LRUCache.Size;
totalCalculatedSize?: LRUCache.Size;
maxEntrySizeExceeded?: true;
oldValue?: V;
returnedStale?: true;
fetchDispatched?: true;
fetchUpdated?: true;
fetchResolved?: true;
fetchRejected?: true;
fetchAborted?: true;
fetchAbortIgnored?: true;
fetchError?: Error;
}
interface LRUCache.Entry<V> {
value: V;
ttl?: LRUCache.Milliseconds;
size?: LRUCache.Size;
start?: LRUCache.Milliseconds;
}interface Perf {
now(): number;
}type BackgroundFetch<V> = Promise<V | undefined> & {
__returned: BackgroundFetch<V> | undefined;
__abortController: AbortController;
__staleWhileFetching: V | undefined;
};type PosInt = number & { [TYPE]: 'Positive Integer' };
type Index = number & { [TYPE]: 'LRUCache Index' };
type UintArray = Uint8Array | Uint16Array | Uint32Array;
type NumberArray = UintArray | number[];
type StackLike = Stack | Index[];
class Stack {
static create(max: number): StackLike;
push(n: Index): void;
pop(): Index;
}
class ZeroArray extends Array<number> {
constructor(size: number);
}
type DisposeTask<K, V> = [
value: V,
key: K,
reason: LRUCache.DisposeReason,
];import { LRUCache } from "lru-cache";
// Simple cache with max entries
const cache = new LRUCache<string, string>({ max: 100 });
cache.set("user:123", "John Doe");
cache.set("user:456", "Jane Smith");
console.log(cache.get("user:123")); // "John Doe"
console.log(cache.has("user:456")); // true
console.log(cache.size); // 2
cache.delete("user:123");
console.log(cache.get("user:123")); // undefinedimport { LRUCache } from "lru-cache";
const cache = new LRUCache<string, any>({
max: 500,
ttl: 1000 * 60 * 5, // 5 minutes
allowStale: true,
updateAgeOnGet: true,
});
cache.set("session:abc", { userId: 123, role: "admin" });
// Check remaining TTL
console.log(cache.getRemainingTTL("session:abc")); // ~300000ms
// Get with status tracking
const status = {};
const session = cache.get("session:abc", { status });
console.log(status); // { get: 'hit', ttl: 300000, ... }import { LRUCache } from "lru-cache";
const cache = new LRUCache<string, Buffer>({
max: 100,
maxSize: 1024 * 1024, // 1MB total
sizeCalculation: (value, key) => value.length + key.length,
});
const largeBuffer = Buffer.alloc(500000);
cache.set("large-file", largeBuffer);
console.log(cache.calculatedSize); // ~500000
console.log(cache.size); // 1import { LRUCache } from "lru-cache";
const cache = new LRUCache<string, any>({
max: 100,
ttl: 1000 * 60 * 10, // 10 minutes
fetchMethod: async (key, staleValue, { signal }) => {
const response = await fetch(`/api/data/${key}`, { signal });
return response.json();
},
});
// First call fetches from API
const data1 = await cache.fetch("user-profile");
// Second call returns cached value
const data2 = await cache.fetch("user-profile");
// Force refresh
const data3 = await cache.fetch("user-profile", { forceRefresh: true });
// Fetch that throws if undefined (useful for required data)
try {
const data4 = await cache.forceFetch("user-profile");
console.log("Data:", data4); // Always has a value
} catch (error) {
console.error("Failed to fetch required data:", error);
}import { LRUCache } from "lru-cache";
const cache = new LRUCache<string, number>({
max: 1000,
memoMethod: (key) => {
// Expensive computation
return expensiveCalculation(key);
},
});
// Computes and caches result
const result1 = cache.memo("fibonacci-100");
// Returns cached result
const result2 = cache.memo("fibonacci-100");
// Force recomputation
const result3 = cache.memo("fibonacci-100", { forceRefresh: true });import { LRUCache } from "lru-cache";
const cache = new LRUCache<string, any>({
max: 100,
dispose: (value, key, reason) => {
console.log(`Disposing ${key} (${reason})`);
if (value.cleanup) value.cleanup();
},
onInsert: (value, key, reason) => {
console.log(`Inserted ${key} (${reason})`);
},
});
cache.set("resource", { data: "...", cleanup: () => {} });
// Logs: "Inserted resource (add)"
cache.set("resource", { data: "updated" });
// Logs: "Inserted resource (replace)"
// Logs: "Disposing resource (set)"import { LRUCache } from "lru-cache";
const cache = new LRUCache<string, any>({ max: 100, ttl: 60000 });
cache.set("user:1", { name: "Alice" });
cache.set("user:2", { name: "Bob" });
// Export cache state
const exported = cache.dump();
console.log(exported); // [[key, {value, ttl, start, size}], ...]
// Create new cache and import state
const newCache = new LRUCache<string, any>({ max: 100, ttl: 60000 });
newCache.load(exported);
console.log(newCache.get("user:1")); // { name: "Alice" }import { LRUCache } from "lru-cache";
const cache = new LRUCache<string, number>({ max: 5 });
cache.set("a", 1);
cache.set("b", 2);
cache.set("c", 3);
// Iterate entries (most to least recent)
for (const [key, value] of cache.entries()) {
console.log(key, value); // c 3, b 2, a 1
}
// Iterate in reverse (least to most recent)
for (const [key, value] of cache.rentries()) {
console.log(key, value); // a 1, b 2, c 3
}
// Use forEach
cache.forEach((value, key) => {
console.log(`${key}: ${value}`);
});
// Find first matching entry
const found = cache.find((value, key) => value > 1);
console.log(found); // 3 (from key "c")