Node.js bindings for the FoundationDB database with ACID transactions, tuple encoding, and directory layer support
Watch operations for reactive programming with key change notifications. Watches enable applications to be notified when specific keys change in the database.
Core watch structure with promise and cancel functionality.
/**
* Watch on a key for change notifications
*/
interface Watch {
/**
* Promise that resolves when the key changes or watch is cancelled
* Resolves to true if key changed, false if cancelled or error occurred
*/
promise: Promise<boolean>;
/**
* Cancel the watch
* Causes the promise to resolve to false
*/
cancel(): void;
}
/**
* Watch with associated value
* Returned by getAndWatch operations
*/
interface WatchWithValue<Value> extends Watch {
/** The current value of the watched key (undefined if key doesn't exist) */
value: Value | undefined;
}Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Create a watch (must be in a transaction)
const watch = await db.doTransaction(async (tn) => {
return tn.watch("config");
});
// Wait for change
console.log("Watching for config changes...");
const changed = await watch.promise;
if (changed) {
console.log("Config was modified!");
} else {
console.log("Watch was cancelled");
}Create a watch within a transaction.
/**
* Watch a key for changes within a transaction
* @param key - Key to watch
* @param opts - Watch options
* @returns Watch object with promise and cancel function
*/
watch(key: KeyIn, opts?: WatchOptions): Watch;
interface WatchOptions {
/**
* Throw all errors instead of resolving to false
* Default: false (errors resolve promise to false)
*/
throwAllErrors?: boolean;
}Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Watch within transaction
const watch = await db.doTransaction(async (tn) => {
// Read current value
const value = await tn.get("config");
console.log("Current value:", value?.toString());
// Create watch
return tn.watch("config");
});
// Wait outside transaction
await watch.promise;
console.log("Config changed!");
// With error handling
const watch2 = await db.doTransaction(async (tn) => {
return tn.watch("config", { throwAllErrors: true });
});
try {
await watch2.promise;
} catch (error) {
console.error("Watch error:", error);
}Get a value and watch it in a single operation.
/**
* Get a value and watch it for changes
* Atomically reads the value and creates a watch
* @param key - Key to get and watch
* @returns Promise resolving to WatchWithValue containing current value and watch
*/
getAndWatch(key: KeyIn): Promise<WatchWithValue<ValOut>>;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Get current value and watch for changes
const watch = await db.getAndWatch("config");
console.log("Current value:", watch.value?.toString());
console.log("Waiting for change...");
const changed = await watch.promise;
if (changed) {
// Read new value
const newValue = await db.get("config");
console.log("New value:", newValue?.toString());
}Set a value and watch it for changes.
/**
* Set a value and watch it for changes
* Atomically writes the value and creates a watch
* @param key - Key to set
* @param value - Value to set
* @returns Promise resolving to Watch object
*/
setAndWatch(key: KeyIn, value: ValIn): Promise<Watch>;
/**
* Clear a key and watch it for changes
* Atomically deletes the key and creates a watch
* @param key - Key to clear
* @returns Promise resolving to Watch object
*/
clearAndWatch(key: KeyIn): Promise<Watch>;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Set and watch
const watch = await db.setAndWatch("config", "initial");
console.log("Config set, watching for changes...");
await watch.promise;
console.log("Someone modified the config!");
// Clear and watch
const watch2 = await db.clearAndWatch("temp");
await watch2.promise;
console.log("Someone recreated the temp key!");Cancel watches to stop waiting for changes.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Create watch with timeout
const watch = await db.getAndWatch("config");
// Cancel after 5 seconds
setTimeout(() => {
console.log("Timeout - cancelling watch");
watch.cancel();
}, 5000);
const changed = await watch.promise;
if (!changed) {
console.log("Watch was cancelled (timeout)");
}Use watches to implement reactive configuration updates.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Configuration manager with watches
class ConfigManager {
private config: any = {};
private watching = false;
constructor(private db: typeof db, private configKey: string) {}
async start() {
// Load initial config
await this.reload();
// Start watching
this.watching = true;
this.watchLoop();
}
stop() {
this.watching = false;
}
async reload() {
const value = await this.db.get(this.configKey);
if (value) {
this.config = JSON.parse(value.toString());
console.log("Config loaded:", this.config);
}
}
get(key: string) {
return this.config[key];
}
private async watchLoop() {
while (this.watching) {
try {
const watch = await this.db.getAndWatch(this.configKey);
const changed = await watch.promise;
if (changed && this.watching) {
await this.reload();
}
} catch (error) {
console.error("Watch error:", error);
await new Promise((resolve) => setTimeout(resolve, 1000));
}
}
}
}
// Usage
const config = new ConfigManager(db, "app:config");
await config.start();
// Config automatically updates when database changes
setInterval(() => {
console.log("Theme:", config.get("theme"));
}, 1000);
// Update config (will trigger reload)
await db.set("app:config", JSON.stringify({ theme: "dark", lang: "en" }));
// Stop watching
// config.stop();Important constraints and limitations of watches.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Limitation 1: Watches must be created in a transaction
// This WILL NOT work:
// const watch = db.watch("key"); // Error!
// Correct:
const watch = await db.doTransaction(async (tn) => {
return tn.watch("key");
});
// Limitation 2: Maximum outstanding watches
// FDB limits the number of concurrent watches per database connection
// Default is usually 10,000 - configure with database options
db.setNativeOptions({
max_watches: 20000, // Increase limit if needed
});
// Limitation 3: Watch promise must be awaited outside transaction
await db.doTransaction(async (tn) => {
const watch = tn.watch("key");
// WRONG: Don't await inside transaction
// await watch.promise; // Would deadlock!
// RIGHT: Return and await outside
return watch;
}).then(async (watch) => {
// Await outside transaction
await watch.promise;
});
// Limitation 4: Watches don't provide the new value
const watch2 = await db.getAndWatch("key");
await watch2.promise;
// Must read again to get new value
const newValue = await db.get("key");Watch multiple keys simultaneously.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Watch multiple keys
async function watchMultiple(keys: string[]): Promise<string> {
const watches = await db.doTransaction(async (tn) => {
return Promise.all(keys.map((key) => tn.watch(key).promise));
});
// Wait for any key to change
const index = await Promise.race(
watches.map((promise, i) => promise.then(() => i))
);
return keys[index];
}
// Usage
const changedKey = await watchMultiple([
"config:database",
"config:cache",
"config:features",
]);
console.log("Changed key:", changedKey);Implement notification systems using watches.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Notification queue with watches
class NotificationQueue {
constructor(
private db: typeof db,
private queueKey: string
) {}
async send(message: any): Promise<void> {
await this.db.set(this.queueKey, JSON.stringify(message));
}
async receive(): Promise<any> {
while (true) {
const watch = await this.db.getAndWatch(this.queueKey);
if (watch.value !== undefined) {
// Process message
const message = JSON.parse(watch.value.toString());
// Clear message
await this.db.clear(this.queueKey);
return message;
}
// Wait for new message
await watch.promise;
}
}
}
// Usage
const queue = new NotificationQueue(db, "notifications:user:123");
// Sender
await queue.send({ type: "message", text: "Hello!" });
// Receiver
const notification = await queue.receive();
console.log("Received:", notification);Common patterns for using watches effectively.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Pattern 1: Persistent watch loop
async function watchForever(key: string, callback: () => void) {
while (true) {
try {
const watch = await db.doTransaction(async (tn) => tn.watch(key));
await watch.promise;
callback();
} catch (error) {
console.error("Watch error:", error);
// Brief delay before retry
await new Promise((resolve) => setTimeout(resolve, 100));
}
}
}
// Pattern 2: Watch with timeout
async function watchWithTimeout(
key: string,
timeoutMs: number
): Promise<boolean> {
const watch = await db.doTransaction(async (tn) => tn.watch(key));
const timeout = new Promise<boolean>((resolve) => {
setTimeout(() => {
watch.cancel();
resolve(false);
}, timeoutMs);
});
return Promise.race([watch.promise, timeout]);
}
// Pattern 3: Conditional watch
async function watchUntilCondition(
key: string,
predicate: (value: Buffer | undefined) => boolean
): Promise<void> {
while (true) {
const watch = await db.getAndWatch(key);
if (predicate(watch.value)) {
return; // Condition met
}
await watch.promise;
}
}
// Pattern 4: Batch watching
async function watchBatch(keys: string[]): Promise<Map<string, boolean>> {
const watches = await db.doTransaction(async (tn) => {
return new Map(keys.map((key) => [key, tn.watch(key)]));
});
// Wait for all watches
const results = new Map<string, boolean>();
await Promise.all(
Array.from(watches.entries()).map(async ([key, watch]) => {
const changed = await watch.promise;
results.set(key, changed);
})
);
return results;
}
// Usage examples
watchForever("config", () => console.log("Config changed!"));
const changed = await watchWithTimeout("key", 5000);
console.log("Changed within 5s:", changed);
await watchUntilCondition("status", (val) => val?.toString() === "ready");
console.log("Status is ready!");
const batchResults = await watchBatch(["key1", "key2", "key3"]);Handle watch errors appropriately.
Usage Example:
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Robust watch with error handling
async function robustWatch(key: string): Promise<boolean> {
try {
const watch = await db.doTransaction(async (tn) => {
return tn.watch(key, { throwAllErrors: true });
});
return await watch.promise;
} catch (error) {
if (error instanceof FDBError) {
console.error("FDB Error:", error.code, error.message);
// Handle specific errors
if (error.code === 1007) {
// Transaction too old
console.log("Transaction conflict, retrying...");
return await robustWatch(key); // Retry
}
}
throw error; // Rethrow other errors
}
}
// Watch with retry logic
async function watchWithRetry(
key: string,
maxRetries: number = 3
): Promise<boolean> {
for (let i = 0; i < maxRetries; i++) {
try {
return await robustWatch(key);
} catch (error) {
console.error(`Attempt ${i + 1} failed:`, error);
if (i === maxRetries - 1) throw error;
// Exponential backoff
await new Promise((resolve) =>
setTimeout(resolve, Math.pow(2, i) * 100)
);
}
}
return false;
}Guidelines for effective watch usage.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Practice 1: Always create watches in transactions
// ✓ Correct
const watch1 = await db.doTransaction(async (tn) => tn.watch("key"));
// ✗ Incorrect - will error
// const watch2 = db.watch("key");
// Practice 2: Await watches outside transactions
const watch3 = await db.doTransaction(async (tn) => {
const w = tn.watch("key");
// ✗ Don't await inside: await w.promise;
return w;
});
// ✓ Await outside
await watch3.promise;
// Practice 3: Handle cancellation gracefully
const watch4 = await db.getAndWatch("key");
setTimeout(() => watch4.cancel(), 5000);
const changed = await watch4.promise;
if (changed) {
console.log("Key changed");
} else {
console.log("Watch cancelled or error");
}
// Practice 4: Limit concurrent watches
// Be mindful of max_watches database option
const activeWatches = new Set<Watch>();
async function createLimitedWatch(key: string, maxWatches: number) {
if (activeWatches.size >= maxWatches) {
throw new Error("Too many active watches");
}
const watch = await db.getAndWatch(key);
activeWatches.add(watch);
watch.promise.finally(() => {
activeWatches.delete(watch);
});
return watch;
}
// Practice 5: Implement retry logic for long-running watches
async function reliableWatch(key: string) {
while (true) {
try {
const watch = await db.getAndWatch(key);
const changed = await watch.promise;
if (changed) {
console.log("Key changed!");
return;
}
} catch (error) {
console.error("Watch failed, retrying:", error);
await new Promise((resolve) => setTimeout(resolve, 1000));
}
}
}
// Practice 6: Clean up watches on shutdown
const watches: Watch[] = [];
process.on("SIGINT", () => {
console.log("Cancelling watches...");
watches.forEach((w) => w.cancel());
process.exit(0);
});Optimize watch usage for performance.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Consideration 1: Watches have overhead
// Don't watch thousands of keys simultaneously
// Instead, use a notification key or pub/sub pattern
// Consideration 2: Batch watch creation
async function efficientMultiWatch(keys: string[]): Promise<Watch[]> {
// Create all watches in single transaction
return await db.doTransaction(async (tn) => {
return keys.map((key) => tn.watch(key));
});
}
// Consideration 3: Use single notification key for groups
class NotificationBus {
constructor(
private db: typeof db,
private channelKey: string
) {}
async notify() {
// Update notification key to trigger all watchers
await this.db.set(this.channelKey, Date.now().toString());
}
async wait(): Promise<void> {
const watch = await this.db.getAndWatch(this.channelKey);
await watch.promise;
}
}
// Many subscribers can watch single key
const bus = new NotificationBus(db, "notifications:global");
// Multiple watchers
Promise.all([bus.wait(), bus.wait(), bus.wait()]).then(() => {
console.log("All watchers notified");
});
// Single notification triggers all
await bus.notify();Test FoundationDB operations with Jest's async/await support and lifecycle hooks.
import fdb from "foundationdb";
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "@jest/globals";
describe("FoundationDB Tests", () => {
let db: ReturnType<typeof fdb.open>;
beforeAll(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
afterAll(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
// Clear test data
await db.clearRangeStartsWith("test:");
});
test("should perform basic operations", async () => {
await db.set("test:key", "value");
const result = await db.get("test:key");
expect(result?.toString()).toBe("value");
});
test("should handle transactions", async () => {
const count = await db.doTransaction(async (tn) => {
tn.set("test:counter", "5");
const val = await tn.get("test:counter");
return parseInt(val?.toString() || "0");
});
expect(count).toBe(5);
});
test("should handle errors gracefully", async () => {
await expect(async () => {
await db.doTransaction(async (tn) => {
throw new Error("Test error");
});
}).rejects.toThrow("Test error");
});
});Use Mocha with Promise-based tests and proper cleanup.
import fdb from "foundationdb";
import { describe, before, after, beforeEach, it } from "mocha";
import { expect } from "chai";
describe("FoundationDB Operations", function() {
this.timeout(10000); // 10 second timeout
let db: ReturnType<typeof fdb.open>;
before(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
after(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
await db.clearRangeStartsWith("test:");
});
it("should read and write data", async () => {
await db.set("test:mocha", "data");
const value = await db.get("test:mocha");
expect(value?.toString()).to.equal("data");
});
it("should handle concurrent operations", async () => {
const operations = Array.from({ length: 10 }, (_, i) =>
db.set(`test:item:${i}`, `value${i}`)
);
await Promise.all(operations);
const items = await db.getRangeAllStartsWith("test:item:");
expect(items).to.have.lengthOf(10);
});
it("should support atomic operations", async () => {
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await Promise.all([
db.add("test:atomic", delta),
db.add("test:atomic", delta),
db.add("test:atomic", delta)
]);
const result = await db.get("test:atomic");
expect(result?.readBigInt64LE(0)).to.equal(3n);
});
});Leverage Vitest's fast execution and modern testing features.
import fdb from "foundationdb";
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "vitest";
describe("FoundationDB with Vitest", () => {
let db: ReturnType<typeof fdb.open>;
beforeAll(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
afterAll(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
await db.clearRangeStartsWith("test:");
});
test("concurrent writes with snapshot isolation", async () => {
const writes = Array.from({ length: 100 }, (_, i) =>
db.set(`test:concurrent:${i}`, `value${i}`)
);
await Promise.all(writes);
const results = await db.getRangeAllStartsWith("test:concurrent:");
expect(results).toHaveLength(100);
});
test("transaction retry logic", async () => {
let attempts = 0;
const result = await db.doTransaction(async (tn) => {
attempts++;
const value = await tn.get("test:retry");
tn.set("test:retry", (parseInt(value?.toString() || "0") + 1).toString());
return attempts;
});
expect(result).toBeGreaterThanOrEqual(1);
});
test("range queries with limits", async () => {
for (let i = 0; i < 50; i++) {
await db.set(`test:range:${i.toString().padStart(3, "0")}`, `value${i}`);
}
const page1 = await db.getRangeAll("test:range:", "test:range:~", { limit: 10 });
const page2 = await db.getRangeAll("test:range:", "test:range:~", {
limit: 10,
reverse: true
});
expect(page1).toHaveLength(10);
expect(page2).toHaveLength(10);
expect(page1[0][0].toString()).not.toBe(page2[0][0].toString());
});
});Helper functions for common test scenarios.
import fdb, { FDBError } from "foundationdb";
export class FDBTestHelper {
private db: ReturnType<typeof fdb.open>;
private testPrefix: string;
constructor(testPrefix = "test:") {
fdb.setAPIVersion(620);
this.db = fdb.open();
this.testPrefix = testPrefix;
}
getDB() {
return this.db;
}
async cleanup() {
await this.db.clearRangeStartsWith(this.testPrefix);
}
async close() {
this.db.close();
}
async withTransaction<T>(
fn: (tn: any) => Promise<T>,
opts?: any
): Promise<T> {
return await this.db.doTransaction(fn, opts);
}
async expectError(
fn: () => Promise<any>,
errorCode?: number
): Promise<FDBError> {
try {
await fn();
throw new Error("Expected operation to throw");
} catch (error) {
if (error instanceof FDBError) {
if (errorCode !== undefined && error.code !== errorCode) {
throw new Error(`Expected error code ${errorCode}, got ${error.code}`);
}
return error;
}
throw error;
}
}
async populateTestData(count: number, prefix?: string) {
const pfx = prefix || this.testPrefix;
const operations = Array.from({ length: count }, (_, i) =>
this.db.set(`${pfx}${i}`, `value${i}`)
);
await Promise.all(operations);
}
async assertKeyExists(key: string): Promise<Buffer> {
const value = await this.db.get(key);
if (value === undefined) {
throw new Error(`Expected key "${key}" to exist`);
}
return value;
}
async assertKeyNotExists(key: string): Promise<void> {
const value = await this.db.get(key);
if (value !== undefined) {
throw new Error(`Expected key "${key}" to not exist`);
}
}
}
// Usage in tests
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "vitest";
describe("Using FDBTestHelper", () => {
let helper: FDBTestHelper;
beforeAll(() => {
helper = new FDBTestHelper("test:helper:");
});
afterAll(async () => {
await helper.cleanup();
await helper.close();
});
beforeEach(async () => {
await helper.cleanup();
});
test("populate and verify test data", async () => {
await helper.populateTestData(10);
await helper.assertKeyExists("test:helper:0");
await helper.assertKeyExists("test:helper:9");
await helper.assertKeyNotExists("test:helper:10");
});
test("transaction helper", async () => {
const result = await helper.withTransaction(async (tn) => {
tn.set("test:helper:tx", "value");
return "success";
});
expect(result).toBe("success");
});
});Manage database connections efficiently across your application.
import fdb from "foundationdb";
class FDBConnectionPool {
private static instance: FDBConnectionPool;
private db: ReturnType<typeof fdb.open> | null = null;
private initialized = false;
private constructor() {}
static getInstance(): FDBConnectionPool {
if (!FDBConnectionPool.instance) {
FDBConnectionPool.instance = new FDBConnectionPool();
}
return FDBConnectionPool.instance;
}
async initialize(opts?: { clusterFile?: string; trace?: string }) {
if (this.initialized) return;
try {
fdb.setAPIVersion(620);
if (opts?.trace) {
fdb.configNetwork({
trace_enable: opts.trace,
trace_format: "json",
});
}
this.db = fdb.open(opts?.clusterFile);
this.db.setNativeOptions({
transaction_timeout: 10000,
transaction_retry_limit: 100,
max_watches: 20000,
});
this.initialized = true;
} catch (error) {
console.error("Failed to initialize FDB connection:", error);
throw error;
}
}
getDatabase(): ReturnType<typeof fdb.open> {
if (!this.db) {
throw new Error("Database not initialized. Call initialize() first.");
}
return this.db;
}
async shutdown() {
if (this.db) {
this.db.close();
fdb.stopNetworkSync();
this.db = null;
this.initialized = false;
}
}
isInitialized(): boolean {
return this.initialized;
}
}
// Usage across application
export const fdbPool = FDBConnectionPool.getInstance();
// In application startup
await fdbPool.initialize({ trace: "./fdb-traces" });
// In any module
import { fdbPool } from "./fdb-pool";
async function getData(key: string) {
const db = fdbPool.getDatabase();
return await db.get(key);
}
// Graceful shutdown
process.on("SIGTERM", async () => {
await fdbPool.shutdown();
process.exit(0);
});Implement robust retry logic for transient failures.
import fdb, { FDBError } from "foundationdb";
async function withRetry<T>(
operation: () => Promise<T>,
maxRetries = 5,
baseDelay = 100
): Promise<T> {
let lastError: Error | undefined;
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
lastError = error as Error;
// Don't retry on non-retryable errors
if (error instanceof FDBError && error.code === 1007) {
throw error; // Not retryable
}
if (attempt < maxRetries) {
const delay = baseDelay * Math.pow(2, attempt);
const jitter = Math.random() * delay * 0.1;
await new Promise((resolve) => setTimeout(resolve, delay + jitter));
}
}
}
throw new Error(`Operation failed after ${maxRetries} retries: ${lastError?.message}`);
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const result = await withRetry(async () => {
return await db.doTransaction(async (tn) => {
const value = await tn.get("critical:data");
tn.set("critical:data", "updated");
return value;
});
});Protect against cascading failures with circuit breaker.
import fdb, { FDBError } from "foundationdb";
class CircuitBreaker {
private failures = 0;
private lastFailureTime = 0;
private state: "closed" | "open" | "half-open" = "closed";
constructor(
private threshold = 5,
private timeout = 60000,
private resetTimeout = 30000
) {}
async execute<T>(operation: () => Promise<T>): Promise<T> {
if (this.state === "open") {
if (Date.now() - this.lastFailureTime > this.resetTimeout) {
this.state = "half-open";
} else {
throw new Error("Circuit breaker is OPEN");
}
}
try {
const result = await operation();
this.onSuccess();
return result;
} catch (error) {
this.onFailure();
throw error;
}
}
private onSuccess() {
this.failures = 0;
this.state = "closed";
}
private onFailure() {
this.failures++;
this.lastFailureTime = Date.now();
if (this.failures >= this.threshold) {
this.state = "open";
}
}
getState() {
return this.state;
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const breaker = new CircuitBreaker(5, 60000, 30000);
async function safeQuery(key: string): Promise<Buffer | undefined> {
return await breaker.execute(async () => {
return await db.get(key);
});
}Process large datasets efficiently with automatic chunking.
import fdb from "foundationdb";
async function processBatchInChunks<T>(
db: ReturnType<typeof fdb.open>,
prefix: string,
processor: (batch: Array<[Buffer, Buffer]>) => Promise<T[]>,
chunkSize = 100
): Promise<T[]> {
const results: T[] = [];
let startKey = prefix;
while (true) {
const chunk = await db.getRangeAll(
startKey,
prefix + "~",
{ limit: chunkSize }
);
if (chunk.length === 0) break;
const chunkResults = await processor(chunk);
results.push(...chunkResults);
if (chunk.length < chunkSize) break;
// Continue from after last key
startKey = chunk[chunk.length - 1][0].toString() + "\x00";
}
return results;
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const processed = await processBatchInChunks(
db,
"users:",
async (batch) => {
return batch.map(([key, value]) => ({
key: key.toString(),
parsed: JSON.parse(value.toString()),
}));
},
50
);Implement application-level caching with FDB watches.
import fdb from "foundationdb";
class FDBCache<T> {
private cache = new Map<string, { value: T; watch: any }>();
constructor(private db: ReturnType<typeof fdb.open>) {}
async get(key: string, parser: (buf: Buffer) => T): Promise<T | undefined> {
// Check cache first
const cached = this.cache.get(key);
if (cached) {
return cached.value;
}
// Fetch and cache with watch
const watch = await this.db.getAndWatch(key);
if (watch.value === undefined) {
return undefined;
}
const value = parser(watch.value);
this.cache.set(key, { value, watch });
// Invalidate on change
watch.promise.then(() => {
this.cache.delete(key);
});
return value;
}
async set(key: string, value: T, serializer: (val: T) => Buffer) {
await this.db.set(key, serializer(value));
this.cache.delete(key); // Invalidate cache
}
clear() {
this.cache.forEach(({ watch }) => watch.cancel());
this.cache.clear();
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const cache = new FDBCache(db);
const user = await cache.get(
"user:123",
(buf) => JSON.parse(buf.toString())
);
await cache.set(
"user:123",
{ name: "Alice", email: "alice@example.com" },
(val) => Buffer.from(JSON.stringify(val))
);Implement distributed locking for coordination.
import fdb from "foundationdb";
class DistributedLock {
private lockKey: string;
private lockValue: string;
constructor(
private db: ReturnType<typeof fdb.open>,
lockName: string,
private ttl = 30000
) {
this.lockKey = `locks:${lockName}`;
this.lockValue = `${Date.now()}-${Math.random()}`;
}
async acquire(timeout = 10000): Promise<boolean> {
const startTime = Date.now();
while (Date.now() - startTime < timeout) {
try {
const acquired = await this.db.doTransaction(async (tn) => {
const existing = await tn.get(this.lockKey);
if (existing === undefined) {
// Lock is free
tn.set(this.lockKey, this.lockValue);
return true;
}
// Check if lock expired
const lockData = existing.toString();
const lockTime = parseInt(lockData.split("-")[0]);
if (Date.now() - lockTime > this.ttl) {
// Lock expired, take it
tn.set(this.lockKey, this.lockValue);
return true;
}
return false;
});
if (acquired) return true;
} catch (error) {
// Transaction conflict, retry
}
// Wait before retry
await new Promise((resolve) => setTimeout(resolve, 100));
}
return false;
}
async release(): Promise<void> {
await this.db.doTransaction(async (tn) => {
const existing = await tn.get(this.lockKey);
if (existing?.toString() === this.lockValue) {
tn.clear(this.lockKey);
}
});
}
async withLock<T>(fn: () => Promise<T>, timeout = 10000): Promise<T> {
const acquired = await this.acquire(timeout);
if (!acquired) {
throw new Error(`Failed to acquire lock: ${this.lockKey}`);
}
try {
return await fn();
} finally {
await this.release();
}
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const lock = new DistributedLock(db, "resource:123", 30000);
await lock.withLock(async () => {
// Critical section - only one process can execute this at a time
const value = await db.get("shared:resource");
await db.set("shared:resource", "updated");
});Implement event sourcing with ordered event storage.
import fdb, { tuple } from "foundationdb";
interface Event {
type: string;
data: any;
timestamp: number;
version?: Buffer;
}
class EventStore {
private db: ReturnType<typeof fdb.open>;
constructor() {
fdb.setAPIVersion(620);
this.db = fdb.open()
.withKeyEncoding(fdb.encoders.tuple)
.withValueEncoding(fdb.encoders.json);
}
async appendEvent(
streamId: string,
eventType: string,
data: any
): Promise<Buffer> {
return await this.db.doTransaction(async (tn) => {
const key = ["events", streamId, tuple.unboundVersionstamp()];
const event: Event = {
type: eventType,
data,
timestamp: Date.now(),
};
tn.setVersionstampedKey(key, event);
const versionstamp = tn.getVersionstamp();
return versionstamp.promise;
});
}
async getEvents(
streamId: string,
fromVersion?: Buffer,
limit?: number
): Promise<Event[]> {
const start = fromVersion
? ["events", streamId, fromVersion]
: ["events", streamId];
const events = await this.db.getRangeAll(
start,
["events", streamId, Buffer.from([0xff])],
{ limit }
);
return events.map(([key, value]) => ({
...(value as any),
version: key[2] as Buffer,
}));
}
async replay(
streamId: string,
handler: (event: Event) => void | Promise<void>
): Promise<void> {
const events = await this.getEvents(streamId);
for (const event of events) {
await handler(event);
}
}
async getSnapshot(streamId: string): Promise<any> {
const events = await this.getEvents(streamId);
// Rebuild state from events
let state: any = {};
for (const event of events) {
state = this.applyEvent(state, event);
}
return state;
}
private applyEvent(state: any, event: Event): any {
// Apply event to state based on event type
switch (event.type) {
case "created":
return { ...event.data, created: true };
case "updated":
return { ...state, ...event.data };
case "deleted":
return { ...state, deleted: true };
default:
return state;
}
}
}
// Usage
const store = new EventStore();
// Append events
await store.appendEvent("order:123", "OrderCreated", {
items: ["item1", "item2"],
total: 100,
});
await store.appendEvent("order:123", "ItemAdded", {
item: "item3",
});
await store.appendEvent("order:123", "OrderConfirmed", {
confirmedAt: Date.now(),
});
// Replay events
await store.replay("order:123", (event) => {
console.log(`Event: ${event.type}`, event.data);
});
// Get current snapshot
const currentState = await store.getSnapshot("order:123");
console.log("Current state:", currentState);Understanding and handling common FoundationDB error codes.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleFDBErrors() {
try {
await db.doTransaction(async (tn) => {
tn.set("key", "value");
});
} catch (error) {
if (error instanceof FDBError) {
switch (error.code) {
case 1007: // Transaction too old
console.error("Transaction took too long - increase timeout");
break;
case 1009: // Request for future version
console.error("Clock skew detected - check system time");
break;
case 1020: // Not committed (transaction may have succeeded)
console.error("Commit status unknown - check if data was written");
break;
case 1021: // Transaction cancelled
console.error("Transaction was cancelled");
break;
case 1025: // Transaction timed out
console.error("Operation exceeded timeout limit");
break;
case 2017: // Transaction too large
console.error("Transaction size exceeds limit - split into smaller transactions");
break;
default:
console.error(`FDB Error ${error.code}: ${error.message}`);
}
} else {
console.error("Non-FDB error:", error);
}
}
}Handle transaction conflicts with proper retry logic.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleConflicts() {
let attempts = 0;
const maxAttempts = 5;
while (attempts < maxAttempts) {
try {
const result = await db.doTransaction(async (tn) => {
attempts++;
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
// Simulate some processing
await new Promise(resolve => setTimeout(resolve, 10));
tn.set("counter", (count + 1).toString());
return count + 1;
});
console.log(`Success after ${attempts} attempts:`, result);
return result;
} catch (error) {
if (error instanceof FDBError && error.code === 1007) {
console.log(`Attempt ${attempts} failed with conflict, retrying...`);
if (attempts >= maxAttempts) {
throw new Error(`Failed after ${maxAttempts} attempts`);
}
// doTransaction handles retry automatically, but showing manual retry for illustration
} else {
throw error;
}
}
}
}Handle and prevent timeout errors effectively.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleTimeouts() {
try {
await db.doTransaction(async (tn) => {
// Long-running operation
for await (const batch of tn.getRangeBatch("data:", "data:~")) {
// Process batch
await processBatch(batch);
}
}, {
timeout: 30000, // 30 second timeout
});
} catch (error) {
if (error instanceof FDBError && error.code === 1025) {
console.error("Transaction timed out");
// Strategies to fix:
// 1. Increase timeout
// 2. Split into smaller transactions
// 3. Use snapshot reads where possible
// 4. Optimize query performance
}
throw error;
}
}
async function processBatch(batch: any) {
// Batch processing logic
}Handle network-related errors and connectivity issues.
import fdb, { FDBError } from "foundationdb";
async function handleNetworkErrors() {
fdb.setAPIVersion(620);
try {
const db = fdb.open("/path/to/fdb.cluster");
await db.doTransaction(async (tn) => {
tn.set("key", "value");
});
} catch (error) {
if (error instanceof FDBError) {
if (error.code === 1031) {
console.error("Cannot connect to cluster - check network and cluster file");
} else if (error.code === 1032) {
console.error("Cluster file invalid or corrupted");
} else if (error.code === 2501) {
console.error("No coordinators available - check FDB cluster health");
}
}
throw error;
}
}Handle directory-specific errors.
import fdb, { directory, DirectoryError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleDirectoryErrors() {
try {
// Try to create existing directory
const dir = await directory.create(db, ["myapp", "users"]);
} catch (error) {
if (error instanceof DirectoryError) {
console.error("Directory operation failed:", error.message);
// Check specific error messages
if (error.message.includes("already exists")) {
console.log("Directory exists, opening instead");
const dir = await directory.open(db, ["myapp", "users"]);
} else if (error.message.includes("does not exist")) {
console.log("Directory missing, creating");
const dir = await directory.create(db, ["myapp", "users"]);
} else if (error.message.includes("layer mismatch")) {
console.error("Directory layer type mismatch");
}
}
throw error;
}
}Implement graceful error recovery patterns.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function safeOperation<T>(
operation: () => Promise<T>,
fallback?: T
): Promise<T | undefined> {
try {
return await operation();
} catch (error) {
if (error instanceof FDBError) {
console.error(`FDB Error ${error.code}:`, error.message);
// Return fallback for specific errors
if (error.code === 1025 || error.code === 1007) {
console.log("Using fallback value due to transient error");
return fallback;
}
}
// Re-throw non-recoverable errors
throw error;
}
}
// Usage
const value = await safeOperation(
async () => await db.get("config:setting"),
Buffer.from("default-value")
);
// With retry wrapper
async function withGracefulRetry<T>(
operation: () => Promise<T>,
maxRetries = 3
): Promise<T> {
let lastError: Error;
for (let i = 0; i < maxRetries; i++) {
try {
return await operation();
} catch (error) {
lastError = error as Error;
if (error instanceof FDBError) {
// Don't retry fatal errors
if ([1031, 1032, 2501].includes(error.code)) {
throw error;
}
}
// Exponential backoff
await new Promise(resolve =>
setTimeout(resolve, Math.pow(2, i) * 100)
);
}
}
throw new Error(`Failed after ${maxRetries} retries: ${lastError!.message}`);
}Diagnose and fix common performance problems.
Issue: Slow Transactions
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Large transaction with many operations
async function slowTransaction() {
await db.doTransaction(async (tn) => {
// Thousands of operations in single transaction
for (let i = 0; i < 10000; i++) {
tn.set(`key:${i}`, `value:${i}`);
}
});
}
// Solution: Split into smaller transactions
async function fastTransactions() {
const batchSize = 100;
for (let i = 0; i < 10000; i += batchSize) {
await db.doTransaction(async (tn) => {
for (let j = i; j < i + batchSize && j < 10000; j++) {
tn.set(`key:${j}`, `value:${j}`);
}
});
}
}
// Solution: Use appropriate transaction options
await db.doTransaction(async (tn) => {
// Process data
}, {
timeout: 30000,
size_limit: 50000000,
});Issue: High Conflict Rate
// Problem: Many transactions competing for same keys
async function highConflict() {
await Promise.all(
Array.from({ length: 100 }, () =>
db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
})
)
);
}
// Solution: Use atomic operations
async function lowConflict() {
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await Promise.all(
Array.from({ length: 100 }, () =>
db.add("counter", delta)
)
);
}
// Solution: Shard hot keys
class ShardedCounter {
constructor(private db: typeof db, private shards = 10) {}
async increment() {
const shard = Math.floor(Math.random() * this.shards);
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await this.db.add(`counter:${shard}`, delta);
}
async getTotal(): Promise<number> {
let total = 0;
for (let i = 0; i < this.shards; i++) {
const value = await this.db.get(`counter:${i}`);
if (value) {
total += Number(value.readBigInt64LE(0));
}
}
return total;
}
}Issue: Memory Usage
// Problem: Loading too much data at once
async function highMemory() {
const allData = await db.getRangeAll("data:", "data:~");
// Process huge dataset - may cause OOM
}
// Solution: Use streaming with batches
async function lowMemory() {
for await (const batch of db.getRangeBatch("data:", "data:~")) {
// Process one batch at a time
await processBatch(batch);
}
}
// Solution: Use appropriate streaming mode
import { StreamingMode } from "foundationdb";
async function optimizedStreaming() {
for await (const batch of db.getRangeBatch("data:", "data:~", {
streamingMode: StreamingMode.Small, // Smaller batches
})) {
await processBatch(batch);
}
}
async function processBatch(batch: any) {
// Process batch
}Diagnose and resolve connection issues.
Issue: Cannot Connect to Cluster
import fdb from "foundationdb";
// Check 1: Verify cluster file
console.log("Checking cluster file...");
try {
const clusterFile = "/etc/foundationdb/fdb.cluster";
const fs = require("fs");
const content = fs.readFileSync(clusterFile, "utf8");
console.log("Cluster file content:", content);
} catch (error) {
console.error("Cannot read cluster file:", error);
}
// Check 2: Test connection
fdb.setAPIVersion(620);
try {
const db = fdb.open();
await db.get("test");
console.log("Connection successful");
} catch (error) {
console.error("Connection failed:", error);
}
// Check 3: Verify network configuration
fdb.configNetwork({
trace_enable: "./fdb-traces",
trace_format: "json",
});
// Check traces for connection errorsIssue: Transaction Timeouts
// Diagnostic: Check transaction size
await db.doTransaction(async (tn) => {
// Perform operations
const size = tn.getApproximateSize();
console.log("Transaction size:", size);
if (size > 5000000) {
console.warn("Transaction size large, may timeout");
}
});
// Solution: Increase timeout or split transaction
db.setNativeOptions({
transaction_timeout: 30000, // 30 seconds
});
// Or per-transaction
await db.doTransaction(async (tn) => {
// Operations
}, {
timeout: 60000, // 60 seconds
});Issue: Watch Not Triggering
// Problem: Watch created outside transaction
// const watch = db.watch("key"); // ERROR
// Solution: Create watch in transaction
const watch = await db.doTransaction(async (tn) => {
return tn.watch("key");
});
// Problem: Awaiting watch inside transaction
await db.doTransaction(async (tn) => {
const watch = tn.watch("key");
// await watch.promise; // DEADLOCK!
return watch;
}).then(async (watch) => {
await watch.promise; // Correct
});
// Problem: Too many watches
db.setNativeOptions({
max_watches: 20000, // Increase limit
});Prevent and diagnose data integrity problems.
Issue: Encoding Mismatch
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Writing with one encoder, reading with another
await db.withValueEncoding(fdb.encoders.json)
.set("key", { value: 123 });
const wrong = await db.withValueEncoding(fdb.encoders.string)
.get("key"); // Wrong encoding!
// Solution: Consistent encoder usage
const jsonDb = db.withValueEncoding(fdb.encoders.json);
await jsonDb.set("key", { value: 123 });
const correct = await jsonDb.get("key"); // { value: 123 }
// Solution: Document encoding choices
class UserStore {
private db: ReturnType<typeof fdb.open>;
constructor(db: ReturnType<typeof fdb.open>) {
this.db = db
.at("users:")
.withKeyEncoding(fdb.encoders.string)
.withValueEncoding(fdb.encoders.json);
}
async save(id: string, user: any) {
await this.db.set(id, user);
}
async load(id: string) {
return await this.db.get(id);
}
}Issue: Lost Updates
// Problem: Not using transactions properly
async function lostUpdate() {
const value = await db.get("counter");
const count = parseInt(value?.toString() || "0");
// Another process might update here!
await db.set("counter", (count + 1).toString());
}
// Solution: Use transactions
async function safeUpdate() {
await db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
});
}
// Better: Use atomic operations
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await db.add("counter", delta);Issue: Directory Conflicts
import fdb, { directory } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Racing directory creation
async function racingCreation() {
try {
const dir = await directory.create(db, ["myapp", "tenant"]);
} catch (error) {
// Second process fails
}
}
// Solution: Use createOrOpen
async function safeCreation() {
const dir = await directory.createOrOpen(db, ["myapp", "tenant"]);
// Works for all processes
}
// Solution: Handle errors gracefully
async function robustCreation() {
try {
const dir = await directory.create(db, ["myapp", "tenant"]);
} catch (error) {
if (error.message.includes("already exists")) {
const dir = await directory.open(db, ["myapp", "tenant"]);
return dir;
}
throw error;
}
}Tools and techniques for debugging issues.
Enable Tracing
import fdb from "foundationdb";
// Enable detailed tracing
fdb.configNetwork({
trace_enable: "./fdb-traces",
trace_format: "json",
trace_log_group: "myapp",
});
fdb.setAPIVersion(620);
const db = fdb.open();
// Check traces at ./fdb-traces/*.json
// Look for: errors, warnings, slow_task eventsTransaction Debugging
import fdb, { TransactionOptionCode } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Enable transaction logging
tn.setOption(TransactionOptionCode.DebugTransactionIdentifier, "my-tx-123");
tn.setOption(TransactionOptionCode.LogTransaction);
// Perform operations
tn.set("key", "value");
}, {
debug_transaction_identifier: "test-transaction",
log_transaction: true,
});
// Check traces for transaction detailsPerformance Profiling
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function profileOperation() {
const start = Date.now();
await db.doTransaction(async (tn) => {
const opStart = Date.now();
const value = await tn.get("key");
console.log(`Get took ${Date.now() - opStart}ms`);
const setStart = Date.now();
tn.set("key", "value");
console.log(`Set took ${Date.now() - setStart}ms`);
});
console.log(`Total transaction: ${Date.now() - start}ms`);
}
// Monitor transaction sizes
await db.doTransaction(async (tn) => {
for (let i = 0; i < 1000; i++) {
tn.set(`key:${i}`, `value:${i}`);
if (i % 100 === 0) {
console.log(`Size at ${i}:`, tn.getApproximateSize());
}
}
});1. Keep Transactions Short
Minimize transaction duration to reduce conflicts and avoid timeouts.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Bad: Long-running computation in transaction
await db.doTransaction(async (tn) => {
const data = await tn.get("data");
// Expensive computation
const result = await expensiveComputation(data);
tn.set("result", result);
});
// Good: Compute outside transaction
const data = await db.get("data");
const result = await expensiveComputation(data);
await db.doTransaction(async (tn) => {
tn.set("result", result);
});2. Use Atomic Operations for Counters
Avoid read-modify-write patterns for counters.
// Bad: Read-modify-write
await db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
});
// Good: Atomic add
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await db.add("counter", delta);3. Use Snapshot Reads When Possible
Reduce conflicts by using snapshot reads for non-critical data.
await db.doTransaction(async (tn) => {
// Critical read (causes conflicts)
const critical = await tn.get("critical:data");
// Non-critical read (no conflicts)
const metadata = await tn.snapshot().get("metadata");
// Write based on critical data
tn.set("result", processData(critical));
});4. Batch Related Operations
Group related operations in single transactions.
// Bad: Multiple transactions
await db.set("user:alice:name", "Alice");
await db.set("user:alice:email", "alice@example.com");
await db.set("user:alice:age", "30");
// Good: Single transaction
await db.doTransaction(async (tn) => {
tn.set("user:alice:name", "Alice");
tn.set("user:alice:email", "alice@example.com");
tn.set("user:alice:age", "30");
});5. Handle Large Datasets with Chunking
Split large operations into manageable chunks.
async function processLargeDataset() {
let startKey = "data:";
const chunkSize = 1000;
while (true) {
const chunk = await db.getRangeAll(
startKey,
"data:~",
{ limit: chunkSize }
);
if (chunk.length === 0) break;
await db.doTransaction(async (tn) => {
for (const [key, value] of chunk) {
// Process item
tn.set(key.toString() + ":processed", "true");
}
});
if (chunk.length < chunkSize) break;
startKey = chunk[chunk.length - 1][0].toString() + "\x00";
}
}6. Use Hierarchical Key Structure
Organize keys hierarchically for efficient queries.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Good: Hierarchical structure
await db.set("app:users:alice:profile", "...");
await db.set("app:users:alice:settings", "...");
await db.set("app:users:bob:profile", "...");
await db.set("app:orders:12345:items", "...");
// Query all user data
const aliceData = await db.getRangeAllStartsWith("app:users:alice:");
// Query all orders
const orders = await db.getRangeAllStartsWith("app:orders:");7. Use Tuple Encoding for Composite Keys
Leverage tuple encoding for structured keys.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open().withKeyEncoding(fdb.encoders.tuple);
// Store with composite keys
await db.set(["user", "alice", "profile"], "data");
await db.set(["user", "alice", "settings"], "data");
await db.set(["order", 12345, "items"], "data");
// Query by prefix
const aliceData = await db.getRangeAllStartsWith(["user", "alice"]);
const orders = await db.getRangeAllStartsWith(["order"]);8. Use Directories for Multi-Tenancy
Isolate tenant data with directory layer.
import fdb, { directory } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Create tenant directories
const tenant1 = await directory.createOrOpen(db, ["tenants", "acme"]);
const tenant2 = await directory.createOrOpen(db, ["tenants", "techcorp"]);
// Scoped databases
const acmeDb = db.at(tenant1);
const techcorpDb = db.at(tenant2);
// Isolated data
await acmeDb.set("data", "Acme data");
await techcorpDb.set("data", "Techcorp data");9. Implement Retry Logic with Backoff
Handle transient failures gracefully.
async function withRetry<T>(
operation: () => Promise<T>,
maxRetries = 5,
baseDelay = 100
): Promise<T> {
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
if (attempt === maxRetries) throw error;
const delay = baseDelay * Math.pow(2, attempt);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
throw new Error("Should not reach here");
}
const result = await withRetry(() => db.get("key"));10. Monitor Transaction Metrics
Track performance and errors for optimization.
class MetricsCollector {
private metrics = {
transactionCount: 0,
errorCount: 0,
avgDuration: 0,
};
async trackTransaction<T>(
operation: () => Promise<T>
): Promise<T> {
const start = Date.now();
try {
const result = await operation();
this.metrics.transactionCount++;
const duration = Date.now() - start;
this.metrics.avgDuration =
(this.metrics.avgDuration * (this.metrics.transactionCount - 1) + duration) /
this.metrics.transactionCount;
return result;
} catch (error) {
this.metrics.errorCount++;
throw error;
}
}
getMetrics() {
return { ...this.metrics };
}
}
const metrics = new MetricsCollector();
await metrics.trackTransaction(() =>
db.doTransaction(async (tn) => {
tn.set("key", "value");
})
);
console.log("Metrics:", metrics.getMetrics());11. Close Connections Properly
Ensure clean shutdown of database connections.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Use try-finally for cleanup
try {
await db.set("key", "value");
} finally {
db.close();
fdb.stopNetworkSync();
}
// Handle graceful shutdown
process.on("SIGTERM", () => {
console.log("Shutting down...");
db.close();
fdb.stopNetworkSync();
process.exit(0);
});
process.on("SIGINT", () => {
console.log("Interrupted, cleaning up...");
db.close();
fdb.stopNetworkSync();
process.exit(0);
});12. Use Connection Pooling
Reuse database connections across application.
class DatabasePool {
private static db: ReturnType<typeof fdb.open> | null = null;
static initialize() {
if (!this.db) {
fdb.setAPIVersion(620);
this.db = fdb.open();
this.db.setNativeOptions({
transaction_timeout: 10000,
max_watches: 20000,
});
}
}
static getDatabase() {
if (!this.db) {
throw new Error("Database not initialized");
}
return this.db;
}
static shutdown() {
if (this.db) {
this.db.close();
fdb.stopNetworkSync();
this.db = null;
}
}
}
// Initialize once at startup
DatabasePool.initialize();
// Use throughout application
const db = DatabasePool.getDatabase();
await db.set("key", "value");
// Shutdown on exit
process.on("exit", () => {
DatabasePool.shutdown();
});Install with Tessl CLI
npx tessl i tessl/npm-foundationdb