Debugs native module crashes, optimizes V8 performance, configures node-gyp builds, writes N-API/node-addon-api bindings, and diagnoses libuv event loop issues in Node.js. Use when working with C++ addons, native modules, binding.gyp, node-gyp errors, segfaults, memory leaks in native code, V8 optimization/deoptimization, libuv thread pool tuning, N-API or NAN bindings, build system failures, or any Node.js internals below the JavaScript layer.
99
99%
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Passed
No known issues
Understanding V8's garbage collection is critical for writing performant Node.js applications and debugging memory issues. V8 uses a generational garbage collector with different strategies for young and old objects.
V8 divides the heap into several spaces:
+------------------+
| New Space | <- Young generation (Scavenger)
| (Semi-spaces) |
+------------------+
| Old Space | <- Old generation (Mark-Sweep/Compact)
+------------------+
| Large Object | <- Objects > 512KB
| Space |
+------------------+
| Code Space | <- Compiled code
+------------------+
| Map Space | <- Hidden classes (Maps)
+------------------+// Get current heap statistics
const v8 = require('node:v8');
const stats = v8.getHeapStatistics();
console.log({
heapTotal: stats.total_heap_size / 1024 / 1024 + ' MB',
heapUsed: stats.used_heap_size / 1024 / 1024 + ' MB',
heapLimit: stats.heap_size_limit / 1024 / 1024 + ' MB',
mallocedMemory: stats.malloced_memory / 1024 / 1024 + ' MB',
externalMemory: stats.external_memory / 1024 / 1024 + ' MB',
});Configure heap limits:
# Set max old space size (default ~1.4GB on 64-bit)
node --max-old-space-size=4096 app.js
# Set max semi-space size (affects young generation)
node --max-semi-space-size=64 app.js
# Set initial heap size
node --initial-heap-size=256 app.jsThe Scavenger handles young generation collection using a semi-space copying algorithm. It's fast but requires 2x the memory for the young generation.
Before GC:
+----------------+ +----------------+
| From Space | | To Space |
| [A][B][C][D] | | (empty) |
+----------------+ +----------------+
After GC (B and D are dead):
+----------------+ +----------------+
| From Space | | To Space |
| (empty) | | [A][C] |
+----------------+ +----------------+// BAD: Creating many short-lived objects triggers frequent Scavenger runs
function processData(items) {
return items.map(item => ({
...item,
processed: true,
timestamp: new Date(), // New object each time
meta: { source: 'api' } // New object each time
}));
}
// BETTER: Reuse objects where possible
const META = Object.freeze({ source: 'api' });
function processData(items) {
const results = new Array(items.length);
for (let i = 0; i < items.length; i++) {
results[i] = {
...items[i],
processed: true,
timestamp: Date.now(), // Primitive, not object
meta: META // Shared reference
};
}
return results;
}class ObjectPool {
constructor(factory, reset, initialSize = 100) {
this.factory = factory;
this.reset = reset;
this.pool = [];
// Pre-allocate objects
for (let i = 0; i < initialSize; i++) {
this.pool.push(factory());
}
}
acquire() {
return this.pool.length > 0
? this.pool.pop()
: this.factory();
}
release(obj) {
this.reset(obj);
this.pool.push(obj);
}
}
// Usage
const bufferPool = new ObjectPool(
() => Buffer.allocUnsafe(4096),
(buf) => buf.fill(0)
);
const buf = bufferPool.acquire();
// ... use buffer
bufferPool.release(buf);Mark-Sweep handles old generation collection. It's a stop-the-world collector that marks live objects and then sweeps (frees) dead ones.
// Roots include:
// - Global objects (globalThis)
// - Stack variables
// - Active handles (timers, I/O)
// - Persistent handles from native addons
// Objects reachable from roots survive
globalThis.cache = largeObject; // Keeps largeObject alive
// Removing reference allows collection
delete globalThis.cache; // largeObject can now be collectedV8 uses incremental marking to reduce pause times:
# Trace GC events
node --trace-gc app.js
# Example output:
# [12345:0x...] 100 ms: Scavenge 4.2 (6.0) -> 3.8 (7.0) MB, 1.2 / 0.0 ms
# [12345:0x...] 500 ms: Mark-sweep 15.2 (20.0) -> 12.1 (20.0) MB, 50.3 / 0.0 ms (+ 10.2 ms in 5 steps)The (+ 10.2 ms in 5 steps) indicates incremental marking.
V8 uses write barriers to track cross-generation references:
// When old object references young object,
// V8 must remember to scan old object during Scavenge
const oldObject = {}; // Promoted to old space
// ... later
oldObject.child = {}; // New object in young space
// Write barrier records this referenceWhen heap fragmentation is high, V8 performs Mark-Compact instead of Mark-Sweep:
Before Compaction:
[LIVE][ ][LIVE][ ][ ][LIVE][ ]
After Compaction:
[LIVE][LIVE][LIVE][ ]
^ Free space consolidatedCompaction is expensive because it requires updating all pointers. V8 avoids it when possible:
# Force GC (for debugging only)
node --expose-gc -e "
global.gc(); // Minor GC
global.gc({ type: 'major' }); // Major GC
global.gc({ type: 'major', execution: 'sync' }); // Synchronous major GC
"V8's GC is based on the generational hypothesis: most objects die young.
// Short-lived objects (ideal case)
function handleRequest(req) {
const data = JSON.parse(req.body); // Dies quickly
const result = processData(data); // Dies quickly
return JSON.stringify(result); // Dies quickly
}
// Long-lived objects (cache, connections)
const connectionPool = new Pool(); // Lives forever
const cache = new LRUCache(); // Lives foreverV8 tracks allocation sites to optimize object placement:
// V8 learns that objects from this function live long
function createLongLivedConfig() {
return {
setting1: 'value1',
setting2: 'value2',
// After profiling, V8 may allocate directly in old space
};
}
// Called once at startup
const config = createLongLivedConfig();# Basic GC tracing
node --trace-gc app.js
# Detailed GC tracing
node --trace-gc-verbose app.js
# GC statistics at exit
node --trace-gc-object-stats app.js
# Trace GC causes
node --trace-gc-nvp app.jsconst v8 = require('node:v8');
const fs = require('node:fs');
// Write heap snapshot
function writeHeapSnapshot() {
const filename = v8.writeHeapSnapshot();
console.log(`Heap snapshot written to ${filename}`);
return filename;
}
// Stream heap snapshot (lower memory overhead)
function streamHeapSnapshot() {
const filename = `heap-${Date.now()}.heapsnapshot`;
const stream = fs.createWriteStream(filename);
v8.writeHeapSnapshot(filename);
return filename;
}const v8 = require('node:v8');
class MemoryMonitor {
constructor(intervalMs = 30000) {
this.baseline = null;
this.history = [];
setInterval(() => this.check(), intervalMs);
}
check() {
const stats = v8.getHeapStatistics();
const used = stats.used_heap_size;
if (!this.baseline) {
this.baseline = used;
return;
}
this.history.push({
timestamp: Date.now(),
used,
delta: used - this.baseline
});
// Keep last 100 measurements
if (this.history.length > 100) {
this.history.shift();
}
// Check for consistent growth
if (this.history.length >= 10) {
const recent = this.history.slice(-10);
const allGrowing = recent.every((m, i) =>
i === 0 || m.used >= recent[i-1].used
);
if (allGrowing) {
console.warn('Possible memory leak detected');
console.warn(`Heap grew from ${this.baseline} to ${used}`);
}
}
}
}// BAD: Closure retains large array
function createHandler(largeData) {
return function handler() {
// Even if we don't use largeData, it's retained
return 'done';
};
}
// GOOD: Don't capture unnecessary variables
function createHandler(largeData) {
const result = processData(largeData);
// largeData can be collected now
return function handler() {
return result;
};
}// BAD: Accidental global
function processData(data) {
results = data.map(transform); // Missing 'const' - creates global
return results;
}
// GOOD: Use strict mode and proper declarations
'use strict';
function processData(data) {
const results = data.map(transform);
return results;
}// BAD: Timer keeps callback and its closure alive
function startMonitoring(data) {
setInterval(() => {
console.log(data.value); // data is retained forever
}, 1000);
}
// GOOD: Store timer reference and clear when done
class Monitor {
constructor(data) {
this.data = data;
this.timer = setInterval(() => this.check(), 1000);
}
check() {
console.log(this.data.value);
}
stop() {
clearInterval(this.timer);
this.timer = null;
this.data = null; // Allow GC
}
}// 1. Pre-allocate arrays when size is known
const results = new Array(items.length);
for (let i = 0; i < items.length; i++) {
results[i] = transform(items[i]);
}
// 2. Reuse buffers
const sharedBuffer = Buffer.allocUnsafe(65536);
function processChunk(data) {
data.copy(sharedBuffer);
// Process in-place
}
// 3. Use TypedArrays for numeric data
const data = new Float64Array(1000);
// Much more GC-friendly than Array of NumbersFor long-lived data structures:
// Pre-allocate and fill immediately
// This helps V8 understand the object shape
const cache = Object.create(null);
const INITIAL_KEYS = ['user:', 'session:', 'token:'];
INITIAL_KEYS.forEach(k => { cache[k] = undefined; });
// Use Map for dynamic keys (better for old space)
const dynamicCache = new Map();deps/v8/src/heap/ in Node.js sourcenode --v8-options | grep gc for all GC-related flagsrules