or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

index.md

index.mddocs/

0

# LRU Cache

1

2

LRU Cache is a high-performance Least Recently Used (LRU) cache implementation for JavaScript and TypeScript applications. It maintains a cache that automatically evicts the least recently used items when capacity limits are reached, supporting flexible configuration including maximum item count, total storage size limits, and time-to-live (TTL) expiration.

3

4

## Package Information

5

6

- **Package Name**: lru-cache

7

- **Package Type**: npm

8

- **Language**: TypeScript

9

- **Installation**: `npm install lru-cache`

10

- **Node Version**: 20 || >=22

11

12

## Core Imports

13

14

```typescript

15

import { LRUCache } from "lru-cache";

16

```

17

18

For CommonJS:

19

20

```javascript

21

const { LRUCache } = require("lru-cache");

22

```

23

24

For minified build:

25

26

```typescript

27

import { LRUCache } from "lru-cache/min";

28

```

29

30

For browser use (via CDN):

31

32

```javascript

33

import { LRUCache } from "http://unpkg.com/lru-cache@11/dist/mjs/index.min.mjs";

34

```

35

36

## Basic Usage

37

38

```typescript

39

import { LRUCache } from "lru-cache";

40

41

// Create cache with max entries limit

42

const cache = new LRUCache<string, any>({ max: 500 });

43

44

// Basic operations

45

cache.set("key", "value");

46

const value = cache.get("key"); // "value"

47

const exists = cache.has("key"); // true

48

cache.delete("key");

49

cache.clear();

50

51

// TTL-based caching

52

const ttlCache = new LRUCache<string, any>({

53

max: 100,

54

ttl: 1000 * 60 * 5, // 5 minutes

55

});

56

57

ttlCache.set("session", { userId: 123 });

58

```

59

60

## Architecture

61

62

LRU Cache is built around several key components:

63

64

- **Core Cache**: Generic `LRUCache<K, V, FC>` class with efficient doubly-linked list for LRU tracking

65

- **Storage Options**: Support for max entry count, total size limits, and TTL-based expiration

66

- **Lifecycle Hooks**: Disposal callbacks, insertion callbacks, and size calculation functions

67

- **Async Operations**: Built-in fetch method for stale-while-revalidate patterns and memoization

68

- **Type Safety**: Full TypeScript generics with branded types for internal safety

69

70

## Capabilities

71

72

### Core Cache Operations

73

74

Essential cache operations for storing, retrieving, and managing cache entries.

75

76

```typescript { .api }

77

class LRUCache<K extends {}, V extends {}, FC = unknown> {

78

constructor(options: LRUCache.Options<K, V, FC>);

79

80

/** Add or update a cache entry */

81

set(key: K, value: V | undefined, options?: LRUCache.SetOptions<K, V, FC>): this;

82

83

/** Retrieve a cache entry, updating its recency */

84

get(key: K, options?: LRUCache.GetOptions<K, V, FC>): V | undefined;

85

86

/** Check if a key exists without updating recency */

87

has(key: K, options?: LRUCache.HasOptions<K, V, FC>): boolean;

88

89

/** Remove a cache entry */

90

delete(key: K): boolean;

91

92

/** Remove all cache entries */

93

clear(): void;

94

95

/** Get a value without updating recency or deleting stale items */

96

peek(key: K, options?: LRUCache.PeekOptions<K, V, FC>): V | undefined;

97

98

/** Remove and return the least recently used item */

99

pop(): V | undefined;

100

}

101

```

102

103

### Cache Information and Properties

104

105

Access cache state and configuration.

106

107

```typescript { .api }

108

class LRUCache<K extends {}, V extends {}, FC = unknown> {

109

/** Maximum number of entries (read-only) */

110

readonly max: LRUCache.Count;

111

112

/** Maximum total calculated size (read-only) */

113

readonly maxSize: LRUCache.Size;

114

115

/** Current number of entries (read-only) */

116

readonly size: LRUCache.Count;

117

118

/** Current total calculated size (read-only) */

119

readonly calculatedSize: LRUCache.Size;

120

121

/** Fetch method function (read-only) */

122

readonly fetchMethod?: LRUCache.Fetcher<K, V, FC>;

123

124

/** Memoization method function (read-only) */

125

readonly memoMethod?: LRUCache.Memoizer<K, V, FC>;

126

127

/** Disposal callback function (read-only) */

128

readonly dispose?: LRUCache.Disposer<K, V>;

129

130

/** Post-disposal callback function (read-only) */

131

readonly disposeAfter?: LRUCache.Disposer<K, V>;

132

133

/** Item insertion callback function (read-only) */

134

readonly onInsert?: LRUCache.Inserter<K, V>;

135

136

/** Performance timer object (read-only) */

137

readonly perf: Perf;

138

}

139

```

140

141

### TTL and Age Management

142

143

Configure and manage time-based cache expiration.

144

145

```typescript { .api }

146

class LRUCache<K extends {}, V extends {}, FC = unknown> {

147

/** Default TTL in milliseconds */

148

ttl: LRUCache.Milliseconds;

149

150

/** TTL resolution in milliseconds */

151

ttlResolution: LRUCache.Milliseconds;

152

153

/** Automatically purge expired items */

154

ttlAutopurge: boolean;

155

156

/** Reset age on get operations */

157

updateAgeOnGet: boolean;

158

159

/** Reset age on has operations */

160

updateAgeOnHas: boolean;

161

162

/** Allow returning stale values */

163

allowStale: boolean;

164

165

/** Don't delete stale items on get */

166

noDeleteOnStaleGet: boolean;

167

168

/** Get remaining TTL for a key in milliseconds */

169

getRemainingTTL(key: K): LRUCache.Milliseconds;

170

171

/** Remove all stale entries */

172

purgeStale(): boolean;

173

}

174

```

175

176

### Size Management

177

178

Control cache size and entry size calculations.

179

180

```typescript { .api }

181

class LRUCache<K extends {}, V extends {}, FC = unknown> {

182

/** Maximum size allowed for individual entries */

183

maxEntrySize: LRUCache.Size;

184

185

/** Function to calculate entry sizes */

186

sizeCalculation?: LRUCache.SizeCalculator<K, V>;

187

188

/** Skip dispose callbacks on set operations */

189

noDisposeOnSet: boolean;

190

191

/** Don't update TTL on existing entries */

192

noUpdateTTL: boolean;

193

}

194

```

195

196

### Iteration and Traversal

197

198

Iterate over cache entries in various orders.

199

200

```typescript { .api }

201

class LRUCache<K extends {}, V extends {}, FC = unknown> {

202

/** Iterator for [key, value] pairs (most to least recent) */

203

entries(): IterableIterator<[K, V]>;

204

205

/** Iterator for [key, value] pairs (least to most recent) */

206

rentries(): IterableIterator<[K, V]>;

207

208

/** Iterator for keys (most to least recent) */

209

keys(): IterableIterator<K>;

210

211

/** Iterator for keys (least to most recent) */

212

rkeys(): IterableIterator<K>;

213

214

/** Iterator for values (most to least recent) */

215

values(): IterableIterator<V>;

216

217

/** Iterator for values (least to most recent) */

218

rvalues(): IterableIterator<V>;

219

220

/** Default iterator (same as entries()) */

221

[Symbol.iterator](): IterableIterator<[K, V]>;

222

223

/** Apply callback to each entry (most to least recent) */

224

forEach(fn: (value: V, key: K, cache: this) => any, thisArg?: any): void;

225

226

/** Apply callback to each entry (least to most recent) */

227

rforEach(fn: (value: V, key: K, cache: this) => any, thisArg?: any): void;

228

229

/** Find first entry matching predicate */

230

find(fn: (value: V, key: K, cache: this) => boolean, options?: LRUCache.GetOptions<K, V, FC>): V | undefined;

231

}

232

```

233

234

### Async Operations and Fetch

235

236

Asynchronous cache operations with stale-while-revalidate patterns.

237

238

```typescript { .api }

239

class LRUCache<K extends {}, V extends {}, FC = unknown> {

240

/** Async fetch with stale-while-revalidate support */

241

fetch(key: K, options?: LRUCache.FetchOptions<K, V, FC>): Promise<V | undefined>;

242

243

/** Fetch that throws if result is undefined */

244

forceFetch(key: K, options?: LRUCache.FetchOptions<K, V, FC>): Promise<V>;

245

246

/** Fetch that throws if result is undefined */

247

forceFetch(key: K, options?: LRUCache.FetchOptions<K, V, FC>): Promise<V>;

248

249

/** Memoization with computation function */

250

memo(key: K, options?: LRUCache.MemoOptions<K, V, FC>): V;

251

252

/** Control fetch rejection behavior */

253

noDeleteOnFetchRejection: boolean;

254

255

/** Allow stale values on fetch rejection */

256

allowStaleOnFetchRejection: boolean;

257

258

/** Allow stale values on fetch abort */

259

allowStaleOnFetchAbort: boolean;

260

261

/** Ignore fetch abort signals */

262

ignoreFetchAbort: boolean;

263

}

264

```

265

266

### Serialization and Persistence

267

268

Export and import cache state for persistence.

269

270

```typescript { .api }

271

class LRUCache<K extends {}, V extends {}, FC = unknown> {

272

/** Export cache entries for serialization */

273

dump(): [K, LRUCache.Entry<V>][];

274

275

/** Import cache entries from serialization */

276

load(entries: [K, LRUCache.Entry<V>][]): void;

277

278

/** Get detailed information about a cache entry */

279

info(key: K): LRUCache.Entry<V> | undefined;

280

}

281

```

282

283

### Advanced Functionality

284

285

Advanced cache management and debugging features.

286

287

```typescript { .api }

288

class LRUCache<K extends {}, V extends {}, FC = unknown> {

289

/** Default iterator (same as entries()) */

290

[Symbol.iterator](): IterableIterator<[K, V]>;

291

292

/** String representation for debugging */

293

readonly [Symbol.toStringTag]: string;

294

295

/** Expose internal cache structure for debugging (unsafe) */

296

static unsafeExposeInternals<K extends {}, V extends {}, FC = unknown>(

297

cache: LRUCache<K, V, FC>

298

): object;

299

}

300

```

301

302

## Configuration Options

303

304

### Constructor Options

305

306

```typescript { .api }

307

interface LRUCache.OptionsBase<K, V, FC> {

308

/** Maximum number of items (at least one of max, maxSize, or ttl required) */

309

max?: LRUCache.Count;

310

311

/** Maximum total calculated size of all items */

312

maxSize?: LRUCache.Size;

313

314

/** Maximum size allowed for any single item */

315

maxEntrySize?: LRUCache.Size;

316

317

/** Time to live in milliseconds (at least one of max, maxSize, or ttl required) */

318

ttl?: LRUCache.Milliseconds;

319

320

/** Minimum time between staleness checks in milliseconds */

321

ttlResolution?: LRUCache.Milliseconds;

322

323

/** Automatically remove stale items */

324

ttlAutopurge?: boolean;

325

326

/** Reset item age when accessed with get() */

327

updateAgeOnGet?: boolean;

328

329

/** Reset item age when checked with has() */

330

updateAgeOnHas?: boolean;

331

332

/** Allow get() and fetch() to return stale values */

333

allowStale?: boolean;

334

335

/** Don't delete stale items when retrieved with get() */

336

noDeleteOnStaleGet?: boolean;

337

338

/** Function to calculate item sizes */

339

sizeCalculation?: LRUCache.SizeCalculator<K, V>;

340

341

/** Function called when items are removed from cache */

342

dispose?: LRUCache.Disposer<K, V>;

343

344

/** Function called after items are completely removed from cache */

345

disposeAfter?: LRUCache.Disposer<K, V>;

346

347

/** Function called when items are added to cache */

348

onInsert?: LRUCache.Inserter<K, V>;

349

350

/** Skip dispose function when overwriting existing values */

351

noDisposeOnSet?: boolean;

352

353

/** Don't update TTL when overwriting existing values */

354

noUpdateTTL?: boolean;

355

356

/** Async function for fetch() operations */

357

fetchMethod?: LRUCache.Fetcher<K, V, FC>;

358

359

/** Function for memo() operations */

360

memoMethod?: LRUCache.Memoizer<K, V, FC>;

361

362

/** Don't delete stale items when fetchMethod fails */

363

noDeleteOnFetchRejection?: boolean;

364

365

/** Return stale values when fetchMethod fails */

366

allowStaleOnFetchRejection?: boolean;

367

368

/** Return stale values when fetch is aborted */

369

allowStaleOnFetchAbort?: boolean;

370

371

/** Continue fetch operations despite abort signals */

372

ignoreFetchAbort?: boolean;

373

374

/** Custom performance timer object */

375

perf?: Perf;

376

}

377

378

type LRUCache.Options<K, V, FC> =

379

| LRUCache.OptionsMaxLimit<K, V, FC>

380

| LRUCache.OptionsSizeLimit<K, V, FC>

381

| LRUCache.OptionsTTLLimit<K, V, FC>;

382

383

interface LRUCache.OptionsMaxLimit<K, V, FC> extends LRUCache.OptionsBase<K, V, FC> {

384

max: LRUCache.Count;

385

}

386

387

interface LRUCache.OptionsSizeLimit<K, V, FC> extends LRUCache.OptionsBase<K, V, FC> {

388

maxSize: LRUCache.Size;

389

}

390

391

interface LRUCache.OptionsTTLLimit<K, V, FC> extends LRUCache.OptionsBase<K, V, FC> {

392

ttl: LRUCache.Milliseconds;

393

ttlAutopurge: boolean;

394

}

395

```

396

397

### Method Options

398

399

```typescript { .api }

400

interface LRUCache.GetOptions<K, V, FC> {

401

allowStale?: boolean;

402

updateAgeOnGet?: boolean;

403

noDeleteOnStaleGet?: boolean;

404

status?: LRUCache.Status<V>;

405

}

406

407

interface LRUCache.SetOptions<K, V, FC> {

408

ttl?: LRUCache.Milliseconds;

409

start?: LRUCache.Milliseconds;

410

size?: LRUCache.Size;

411

sizeCalculation?: LRUCache.SizeCalculator<K, V>;

412

noDisposeOnSet?: boolean;

413

noUpdateTTL?: boolean;

414

status?: LRUCache.Status<V>;

415

}

416

417

interface LRUCache.HasOptions<K, V, FC> {

418

updateAgeOnHas?: boolean;

419

status?: LRUCache.Status<V>;

420

}

421

422

interface LRUCache.PeekOptions<K, V, FC> {

423

allowStale?: boolean;

424

}

425

426

interface LRUCache.FetchOptions<K, V, FC> extends LRUCache.GetOptions<K, V, FC>, LRUCache.SetOptions<K, V, FC> {

427

forceRefresh?: boolean;

428

signal?: AbortSignal;

429

context?: FC;

430

noDeleteOnFetchRejection?: boolean;

431

allowStaleOnFetchRejection?: boolean;

432

allowStaleOnFetchAbort?: boolean;

433

ignoreFetchAbort?: boolean;

434

}

435

436

interface LRUCache.MemoOptions<K, V, FC> {

437

forceRefresh?: boolean;

438

context?: FC;

439

allowStale?: boolean;

440

updateAgeOnGet?: boolean;

441

noDeleteOnStaleGet?: boolean;

442

sizeCalculation?: LRUCache.SizeCalculator<K, V>;

443

ttl?: LRUCache.Milliseconds;

444

noDisposeOnSet?: boolean;

445

noUpdateTTL?: boolean;

446

noDeleteOnFetchRejection?: boolean;

447

allowStaleOnFetchRejection?: boolean;

448

ignoreFetchAbort?: boolean;

449

allowStaleOnFetchAbort?: boolean;

450

status?: LRUCache.Status<V>;

451

}

452

```

453

454

## Function Types

455

456

### Callback Function Types

457

458

```typescript { .api }

459

type LRUCache.Disposer<K, V> = (

460

value: V,

461

key: K,

462

reason: LRUCache.DisposeReason

463

) => void;

464

465

type LRUCache.Inserter<K, V> = (

466

value: V,

467

key: K,

468

reason: LRUCache.InsertReason

469

) => void;

470

471

type LRUCache.SizeCalculator<K, V> = (value: V, key: K) => LRUCache.Size;

472

473

type LRUCache.Fetcher<K, V, FC = unknown> = (

474

key: K,

475

staleValue: V | undefined,

476

options: LRUCache.FetcherOptions<K, V, FC>

477

) => Promise<V | undefined | void> | V | undefined | void;

478

479

type LRUCache.Memoizer<K, V, FC = unknown> = (

480

key: K,

481

staleValue: V | undefined,

482

options: LRUCache.MemoizerOptions<K, V, FC>

483

) => V;

484

```

485

486

### Callback Parameter Types

487

488

```typescript { .api }

489

interface LRUCache.FetcherOptions<K, V, FC = unknown> {

490

signal: AbortSignal;

491

options: LRUCache.FetcherFetchOptions<K, V, FC>;

492

context: FC;

493

}

494

495

interface LRUCache.FetcherFetchOptions<K, V, FC = unknown> {

496

allowStale?: boolean;

497

updateAgeOnGet?: boolean;

498

noDeleteOnStaleGet?: boolean;

499

sizeCalculation?: LRUCache.SizeCalculator<K, V>;

500

ttl?: LRUCache.Milliseconds;

501

noDisposeOnSet?: boolean;

502

noUpdateTTL?: boolean;

503

noDeleteOnFetchRejection?: boolean;

504

allowStaleOnFetchRejection?: boolean;

505

ignoreFetchAbort?: boolean;

506

allowStaleOnFetchAbort?: boolean;

507

status?: LRUCache.Status<V>;

508

size?: LRUCache.Size;

509

}

510

511

interface LRUCache.MemoizerOptions<K, V, FC = unknown> {

512

options: LRUCache.MemoizerMemoOptions<K, V, FC>;

513

context: FC;

514

}

515

516

interface LRUCache.MemoizerMemoOptions<K, V, FC = unknown> {

517

allowStale?: boolean;

518

updateAgeOnGet?: boolean;

519

noDeleteOnStaleGet?: boolean;

520

sizeCalculation?: LRUCache.SizeCalculator<K, V>;

521

ttl?: LRUCache.Milliseconds;

522

noDisposeOnSet?: boolean;

523

noUpdateTTL?: boolean;

524

status?: LRUCache.Status<V>;

525

size?: LRUCache.Size;

526

start?: LRUCache.Milliseconds;

527

}

528

```

529

530

## Data Types

531

532

### Core Value Types

533

534

```typescript { .api }

535

type LRUCache.Size = number;

536

type LRUCache.Milliseconds = number;

537

type LRUCache.Count = number;

538

539

type LRUCache.DisposeReason = 'evict' | 'set' | 'delete' | 'expire' | 'fetch';

540

type LRUCache.InsertReason = 'add' | 'update' | 'replace';

541

```

542

543

### Status and Entry Types

544

545

```typescript { .api }

546

interface LRUCache.Status<V> {

547

set?: 'add' | 'update' | 'replace' | 'miss';

548

has?: 'hit' | 'stale' | 'miss';

549

get?: 'stale' | 'hit' | 'miss';

550

fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh';

551

552

ttl?: LRUCache.Milliseconds;

553

start?: LRUCache.Milliseconds;

554

now?: LRUCache.Milliseconds;

555

remainingTTL?: LRUCache.Milliseconds;

556

entrySize?: LRUCache.Size;

557

totalCalculatedSize?: LRUCache.Size;

558

maxEntrySizeExceeded?: true;

559

oldValue?: V;

560

returnedStale?: true;

561

562

fetchDispatched?: true;

563

fetchUpdated?: true;

564

fetchResolved?: true;

565

fetchRejected?: true;

566

fetchAborted?: true;

567

fetchAbortIgnored?: true;

568

fetchError?: Error;

569

}

570

571

interface LRUCache.Entry<V> {

572

value: V;

573

ttl?: LRUCache.Milliseconds;

574

size?: LRUCache.Size;

575

start?: LRUCache.Milliseconds;

576

}

577

```

578

579

### Performance Timer Interface

580

581

```typescript { .api }

582

interface Perf {

583

now(): number;

584

}

585

```

586

587

### Advanced Promise Type

588

589

```typescript { .api }

590

type BackgroundFetch<V> = Promise<V | undefined> & {

591

__returned: BackgroundFetch<V> | undefined;

592

__abortController: AbortController;

593

__staleWhileFetching: V | undefined;

594

};

595

```

596

597

### Internal Utility Types

598

599

```typescript { .api }

600

type PosInt = number & { [TYPE]: 'Positive Integer' };

601

type Index = number & { [TYPE]: 'LRUCache Index' };

602

type UintArray = Uint8Array | Uint16Array | Uint32Array;

603

type NumberArray = UintArray | number[];

604

type StackLike = Stack | Index[];

605

606

class Stack {

607

static create(max: number): StackLike;

608

push(n: Index): void;

609

pop(): Index;

610

}

611

612

class ZeroArray extends Array<number> {

613

constructor(size: number);

614

}

615

616

type DisposeTask<K, V> = [

617

value: V,

618

key: K,

619

reason: LRUCache.DisposeReason,

620

];

621

```

622

623

## Usage Examples

624

625

### Basic Caching

626

627

```typescript

628

import { LRUCache } from "lru-cache";

629

630

// Simple cache with max entries

631

const cache = new LRUCache<string, string>({ max: 100 });

632

633

cache.set("user:123", "John Doe");

634

cache.set("user:456", "Jane Smith");

635

636

console.log(cache.get("user:123")); // "John Doe"

637

console.log(cache.has("user:456")); // true

638

console.log(cache.size); // 2

639

640

cache.delete("user:123");

641

console.log(cache.get("user:123")); // undefined

642

```

643

644

### TTL-Based Caching

645

646

```typescript

647

import { LRUCache } from "lru-cache";

648

649

const cache = new LRUCache<string, any>({

650

max: 500,

651

ttl: 1000 * 60 * 5, // 5 minutes

652

allowStale: true,

653

updateAgeOnGet: true,

654

});

655

656

cache.set("session:abc", { userId: 123, role: "admin" });

657

658

// Check remaining TTL

659

console.log(cache.getRemainingTTL("session:abc")); // ~300000ms

660

661

// Get with status tracking

662

const status = {};

663

const session = cache.get("session:abc", { status });

664

console.log(status); // { get: 'hit', ttl: 300000, ... }

665

```

666

667

### Size-Based Caching

668

669

```typescript

670

import { LRUCache } from "lru-cache";

671

672

const cache = new LRUCache<string, Buffer>({

673

max: 100,

674

maxSize: 1024 * 1024, // 1MB total

675

sizeCalculation: (value, key) => value.length + key.length,

676

});

677

678

const largeBuffer = Buffer.alloc(500000);

679

cache.set("large-file", largeBuffer);

680

681

console.log(cache.calculatedSize); // ~500000

682

console.log(cache.size); // 1

683

```

684

685

### Async Fetch with Stale-While-Revalidate

686

687

```typescript

688

import { LRUCache } from "lru-cache";

689

690

const cache = new LRUCache<string, any>({

691

max: 100,

692

ttl: 1000 * 60 * 10, // 10 minutes

693

fetchMethod: async (key, staleValue, { signal }) => {

694

const response = await fetch(`/api/data/${key}`, { signal });

695

return response.json();

696

},

697

});

698

699

// First call fetches from API

700

const data1 = await cache.fetch("user-profile");

701

702

// Second call returns cached value

703

const data2 = await cache.fetch("user-profile");

704

705

// Force refresh

706

const data3 = await cache.fetch("user-profile", { forceRefresh: true });

707

708

// Fetch that throws if undefined (useful for required data)

709

try {

710

const data4 = await cache.forceFetch("user-profile");

711

console.log("Data:", data4); // Always has a value

712

} catch (error) {

713

console.error("Failed to fetch required data:", error);

714

}

715

```

716

717

### Memoization

718

719

```typescript

720

import { LRUCache } from "lru-cache";

721

722

const cache = new LRUCache<string, number>({

723

max: 1000,

724

memoMethod: (key) => {

725

// Expensive computation

726

return expensiveCalculation(key);

727

},

728

});

729

730

// Computes and caches result

731

const result1 = cache.memo("fibonacci-100");

732

733

// Returns cached result

734

const result2 = cache.memo("fibonacci-100");

735

736

// Force recomputation

737

const result3 = cache.memo("fibonacci-100", { forceRefresh: true });

738

```

739

740

### Lifecycle Callbacks

741

742

```typescript

743

import { LRUCache } from "lru-cache";

744

745

const cache = new LRUCache<string, any>({

746

max: 100,

747

dispose: (value, key, reason) => {

748

console.log(`Disposing ${key} (${reason})`);

749

if (value.cleanup) value.cleanup();

750

},

751

onInsert: (value, key, reason) => {

752

console.log(`Inserted ${key} (${reason})`);

753

},

754

});

755

756

cache.set("resource", { data: "...", cleanup: () => {} });

757

// Logs: "Inserted resource (add)"

758

759

cache.set("resource", { data: "updated" });

760

// Logs: "Inserted resource (replace)"

761

// Logs: "Disposing resource (set)"

762

```

763

764

### Serialization and Persistence

765

766

```typescript

767

import { LRUCache } from "lru-cache";

768

769

const cache = new LRUCache<string, any>({ max: 100, ttl: 60000 });

770

771

cache.set("user:1", { name: "Alice" });

772

cache.set("user:2", { name: "Bob" });

773

774

// Export cache state

775

const exported = cache.dump();

776

console.log(exported); // [[key, {value, ttl, start, size}], ...]

777

778

// Create new cache and import state

779

const newCache = new LRUCache<string, any>({ max: 100, ttl: 60000 });

780

newCache.load(exported);

781

782

console.log(newCache.get("user:1")); // { name: "Alice" }

783

```

784

785

### Iteration

786

787

```typescript

788

import { LRUCache } from "lru-cache";

789

790

const cache = new LRUCache<string, number>({ max: 5 });

791

792

cache.set("a", 1);

793

cache.set("b", 2);

794

cache.set("c", 3);

795

796

// Iterate entries (most to least recent)

797

for (const [key, value] of cache.entries()) {

798

console.log(key, value); // c 3, b 2, a 1

799

}

800

801

// Iterate in reverse (least to most recent)

802

for (const [key, value] of cache.rentries()) {

803

console.log(key, value); // a 1, b 2, c 3

804

}

805

806

// Use forEach

807

cache.forEach((value, key) => {

808

console.log(`${key}: ${value}`);

809

});

810

811

// Find first matching entry

812

const found = cache.find((value, key) => value > 1);

813

console.log(found); // 3 (from key "c")

814

```