Debugs native module crashes, optimizes V8 performance, configures node-gyp builds, writes N-API/node-addon-api bindings, and diagnoses libuv event loop issues in Node.js. Use when working with C++ addons, native modules, binding.gyp, node-gyp errors, segfaults, memory leaks in native code, V8 optimization/deoptimization, libuv thread pool tuning, N-API or NAN bindings, build system failures, or any Node.js internals below the JavaScript layer.
99
99%
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Passed
No known issues
Memory issues in Node.js can manifest as leaks, excessive consumption, or crashes. This guide covers tools and techniques for diagnosing and fixing memory problems.
Process Memory
├── V8 Heap (JavaScript objects)
│ ├── New Space (Young Generation)
│ ├── Old Space (Old Generation)
│ ├── Large Object Space
│ └── Code Space
├── V8 External Memory (Buffers, ArrayBuffers)
├── Native Memory (C++ allocations)
└── Stack Memory# Default old space limit (approximately)
# ~1.4GB on 64-bit systems
# Increase limit
node --max-old-space-size=4096 app.js # 4GB
# Check current limits
node -e "console.log(v8.getHeapStatistics())"const v8 = require('node:v8');
const fs = require('node:fs');
// Method 1: Write to file
function takeSnapshot(filename) {
const snapshotFile = filename || `heap-${Date.now()}.heapsnapshot`;
v8.writeHeapSnapshot(snapshotFile);
console.log(`Heap snapshot written to ${snapshotFile}`);
return snapshotFile;
}
// Method 2: Using inspector
const inspector = require('node:inspector');
async function takeSnapshotWithInspector() {
const session = new inspector.Session();
session.connect();
return new Promise((resolve, reject) => {
const chunks = [];
session.on('HeapProfiler.addHeapSnapshotChunk', (m) => {
chunks.push(m.params.chunk);
});
session.post('HeapProfiler.takeHeapSnapshot', null, (err) => {
session.disconnect();
if (err) return reject(err);
const snapshot = chunks.join('');
fs.writeFileSync('heap.heapsnapshot', snapshot);
resolve('heap.heapsnapshot');
});
});
}// Take baseline
const snapshot1 = takeSnapshot('before.heapsnapshot');
// Run operation that may leak
await runPotentiallyLeakyOperation();
// Force GC if available
if (global.gc) {
global.gc();
}
// Take comparison snapshot
const snapshot2 = takeSnapshot('after.heapsnapshot');
// Compare in Chrome DevTools:
// 1. Open DevTools -> Memory tab
// 2. Load both snapshots
// 3. Select "Comparison" view
// 4. Look for objects with positive deltaKey terms:
Common patterns to look for:
High Retained Size + Low Shallow Size = Holding references to large objects
Growing Object Count = Likely leak
Many (string) or (array) = Possible unbounded collection
Detached DOM nodes = Event listener leaks (in browser-like environments)const v8 = require('node:v8');
class MemoryMonitor {
constructor(options = {}) {
this.intervalMs = options.intervalMs || 30000;
this.thresholdMb = options.thresholdMb || 100;
this.history = [];
this.baseline = null;
}
start() {
this.timer = setInterval(() => this.check(), this.intervalMs);
this.baseline = this.getHeapUsed();
console.log(`Memory monitor started. Baseline: ${this.baseline.toFixed(2)}MB`);
}
stop() {
clearInterval(this.timer);
}
getHeapUsed() {
const stats = v8.getHeapStatistics();
return stats.used_heap_size / 1024 / 1024;
}
check() {
const current = this.getHeapUsed();
const delta = current - this.baseline;
this.history.push({
timestamp: Date.now(),
used: current,
delta
});
// Keep last 100 measurements
if (this.history.length > 100) {
this.history.shift();
}
// Check for consistent growth
if (this.history.length >= 10) {
const recent = this.history.slice(-10);
const allGrowing = recent.every((m, i) =>
i === 0 || m.used > recent[i - 1].used
);
if (allGrowing && delta > this.thresholdMb) {
console.warn(`[MEMORY WARNING] Heap grew by ${delta.toFixed(2)}MB`);
console.warn(` Current: ${current.toFixed(2)}MB`);
console.warn(` Baseline: ${this.baseline.toFixed(2)}MB`);
}
}
}
getStats() {
return {
baseline: this.baseline,
current: this.getHeapUsed(),
history: this.history
};
}
}const async_hooks = require('node:async_hooks');
const resources = new Map();
const hook = async_hooks.createHook({
init(asyncId, type, triggerAsyncId) {
const stack = new Error().stack;
resources.set(asyncId, {
type,
triggerAsyncId,
stack,
timestamp: Date.now()
});
},
destroy(asyncId) {
resources.delete(asyncId);
}
});
hook.enable();
// Periodically check for long-lived resources
setInterval(() => {
const now = Date.now();
const longLived = [];
for (const [id, resource] of resources) {
if (now - resource.timestamp > 60000) { // Older than 1 minute
longLived.push({ id, ...resource });
}
}
if (longLived.length > 0) {
console.log(`Long-lived resources: ${longLived.length}`);
// Group by type
const byType = {};
for (const r of longLived) {
byType[r.type] = (byType[r.type] || 0) + 1;
}
console.log(byType);
}
}, 30000);// LEAK: Cache grows forever
const cache = new Map();
function getData(key) {
if (!cache.has(key)) {
cache.set(key, fetchData(key));
}
return cache.get(key);
}
// FIX: Use LRU cache
const LRU = require('lru-cache');
const cache = new LRU({
max: 500,
ttl: 1000 * 60 * 5 // 5 minutes
});// LEAK: Listeners added but never removed
function subscribe(emitter, handler) {
emitter.on('data', handler);
// Never cleaned up!
}
// FIX: Return cleanup function
function subscribe(emitter, handler) {
emitter.on('data', handler);
return () => emitter.off('data', handler);
}
// Or use AbortController
function subscribe(emitter, handler, signal) {
emitter.on('data', handler);
signal?.addEventListener('abort', () => {
emitter.off('data', handler);
});
}// LEAK: Closure retains large data
function createProcessor(largeData) {
// Process data
const summary = processData(largeData);
// This closure retains largeData even though it only needs summary
return function() {
return summary;
};
}
// FIX: Don't capture unnecessary variables
function createProcessor(largeData) {
const summary = processData(largeData);
// largeData can now be GC'd
return function() {
return summary;
};
}// LEAK: Timers holding references
class Service {
start() {
this.timer = setInterval(() => {
this.doWork(); // 'this' keeps Service alive
}, 1000);
}
// stop() never called
}
// FIX: Clean up timers
class Service {
start() {
this.timer = setInterval(() => this.doWork(), 1000);
}
stop() {
clearInterval(this.timer);
this.timer = null;
}
}const v8 = require('node:v8');
// V8 tracks external memory (Buffers, etc.)
const stats = v8.getHeapStatistics();
console.log('External memory:', stats.external_memory / 1024 / 1024, 'MB');function logMemory() {
const usage = process.memoryUsage();
console.log({
rss: (usage.rss / 1024 / 1024).toFixed(2) + ' MB',
heapTotal: (usage.heapTotal / 1024 / 1024).toFixed(2) + ' MB',
heapUsed: (usage.heapUsed / 1024 / 1024).toFixed(2) + ' MB',
external: (usage.external / 1024 / 1024).toFixed(2) + ' MB',
arrayBuffers: (usage.arrayBuffers / 1024 / 1024).toFixed(2) + ' MB'
});
}
// RSS (Resident Set Size) = Total memory allocated to process
// heapTotal = V8 heap size
// heapUsed = V8 heap used
// external = V8 external memory (Buffers, etc.)
// arrayBuffers = ArrayBuffer and SharedArrayBuffer allocations# Build Node.js with debug symbols
./configure --debug
make -j$(nproc)
# Run with Valgrind
valgrind --leak-check=full \
--show-leak-kinds=all \
--track-origins=yes \
./node --expose-gc script.js# Build with ASan
./configure --enable-asan
make -j$(nproc)
# Run
ASAN_OPTIONS=detect_leaks=1 ./node script.jsnpm install -g clinic
clinic heapprofiler -- node app.jsconst memwatch = require('@airbnb/node-memwatch');
memwatch.on('leak', (info) => {
console.log('Memory leak detected:', info);
});
memwatch.on('stats', (stats) => {
console.log('GC stats:', stats);
});
// Take heap diff
const hd = new memwatch.HeapDiff();
// ... run code ...
const diff = hd.end();
console.log('Heap diff:', JSON.stringify(diff, null, 2));# Take heap snapshot on signal
node --heapsnapshot-signal=SIGUSR2 app.js
# In another terminal
kill -USR2 $(pgrep -f "node app.js")# Generate heap snapshot before crashing
node --heapsnapshot-near-heap-limit=3 app.js# Start with low memory to trigger OOM faster during debugging
node --max-old-space-size=256 app.js# Enable core dumps
ulimit -c unlimited
# Run until crash
node --abort-on-uncaught-exception app.js
# Analyze with lldb/gdb
lldb node -c core.12345
(lldb) bt// Use WeakRef for caches that shouldn't prevent GC
const cache = new Map();
function cacheObject(key, obj) {
cache.set(key, new WeakRef(obj));
}
function getCached(key) {
const ref = cache.get(key);
if (ref) {
const obj = ref.deref();
if (obj) return obj;
// Object was GC'd
cache.delete(key);
}
return undefined;
}
// FinalizationRegistry for cleanup
const registry = new FinalizationRegistry((key) => {
console.log(`Object with key ${key} was garbage collected`);
cache.delete(key);
});
function cacheWithCleanup(key, obj) {
cache.set(key, new WeakRef(obj));
registry.register(obj, key);
}// BAD: Load entire file into memory
const data = await fs.promises.readFile('huge-file.json');
const parsed = JSON.parse(data);
// GOOD: Stream processing
const { pipeline } = require('node:stream/promises');
const JSONStream = require('JSONStream');
await pipeline(
fs.createReadStream('huge-file.json'),
JSONStream.parse('items.*'),
async function* (source) {
for await (const item of source) {
yield processItem(item);
}
},
fs.createWriteStream('output.json')
);class ObjectPool {
constructor(factory, reset, initialSize = 10) {
this.factory = factory;
this.reset = reset;
this.pool = [];
for (let i = 0; i < initialSize; i++) {
this.pool.push(factory());
}
}
acquire() {
return this.pool.length > 0 ? this.pool.pop() : this.factory();
}
release(obj) {
this.reset(obj);
this.pool.push(obj);
}
}
// Usage
const bufferPool = new ObjectPool(
() => Buffer.allocUnsafe(1024),
(buf) => buf.fill(0)
);rules