Atomic mutations enable lock-free concurrent updates with operations like arithmetic, bitwise operations, and byte comparisons. These operations are performed atomically by the database without read-modify-write cycles.
Core atomic mutation operations available in FoundationDB.
/**
* Mutation types for atomic operations
*/
enum MutationType {
/** Little-endian integer addition */
Add = 2,
/** Bitwise AND operation */
BitAnd = 6,
/** Bitwise OR operation */
BitOr = 7,
/** Bitwise XOR operation */
BitXor = 8,
/** Append data if result fits in value size limit */
AppendIfFits = 9,
/** Little-endian integer maximum */
Max = 12,
/** Little-endian integer minimum */
Min = 13,
/** Set versionstamped key */
SetVersionstampedKey = 14,
/** Set versionstamped value */
SetVersionstampedValue = 15,
/** Lexicographic byte minimum */
ByteMin = 16,
/** Lexicographic byte maximum */
ByteMax = 17,
/** Compare and clear */
CompareAndClear = 20,
}Perform any atomic mutation by mutation type.
/**
* Generic atomic operation with key and value encoding
* @param opType - The mutation type
* @param key - The key to mutate
* @param oper - The operand value
*/
atomicOp(opType: MutationType, key: KeyIn, oper: ValIn): void;
/**
* Atomic operation with key encoding and Buffer operand
* @param opType - The mutation type
* @param key - The key to mutate
* @param oper - The operand as Buffer
*/
atomicOpKB(opType: MutationType, key: KeyIn, oper: Buffer): void;
/**
* Raw atomic operation with no encoding
* @param opType - The mutation type
* @param key - The key as string or Buffer
* @param oper - The operand as string or Buffer
*/
atomicOpNative(
opType: MutationType,
key: NativeValue,
oper: NativeValue
): void;
type NativeValue = string | Buffer;Usage Example:
import fdb, { MutationType } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Using generic atomic operation
tn.atomicOp(MutationType.Add, "counter", Buffer.from([1, 0, 0, 0]));
// With Buffer operand
const mask = Buffer.from([0xff, 0x00, 0xff, 0x00]);
tn.atomicOpKB(MutationType.BitAnd, "flags", mask);
// Raw operation
tn.atomicOpNative(MutationType.Add, "raw:counter", Buffer.from([5]));
});Add to an integer value atomically (little-endian).
/**
* Atomic addition of little-endian integers
* Treats existing value and operand as little-endian integers and adds them
* If key doesn't exist, treats existing value as 0
* @param key - The key to mutate
* @param oper - The value to add (number or Buffer in little-endian format)
*/
add(key: KeyIn, oper: ValIn): void;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Increment counter
await db.add("counter", Buffer.from([1, 0, 0, 0])); // Add 1 (32-bit LE)
// Using with number encoding
const counterDb = db.withValueEncoding(fdb.encoders.int32BE);
// Note: add() expects little-endian, so use Buffer
await db.doTransaction(async (tn) => {
// Add 5 to counter
const delta = Buffer.allocUnsafe(4);
delta.writeInt32LE(5, 0);
tn.add("hits", delta);
});
// Concurrent increments (no conflicts!)
await Promise.all([
db.add("total", Buffer.from([1, 0, 0, 0])),
db.add("total", Buffer.from([1, 0, 0, 0])),
db.add("total", Buffer.from([1, 0, 0, 0])),
]);
// Result: total = 3Set value to maximum or minimum of current and operand (little-endian comparison).
/**
* Atomic maximum operation
* Sets value to max(current_value, operand) using little-endian integer comparison
* If key doesn't exist, sets to operand
* @param key - The key to mutate
* @param oper - The value to compare (number or Buffer in little-endian format)
*/
max(key: KeyIn, oper: ValIn): void;
/**
* Atomic minimum operation
* Sets value to min(current_value, operand) using little-endian integer comparison
* If key doesn't exist, sets to operand
* @param key - The key to mutate
* @param oper - The value to compare (number or Buffer in little-endian format)
*/
min(key: KeyIn, oper: ValIn): void;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Track maximum value seen
const value = Buffer.allocUnsafe(4);
value.writeInt32LE(100, 0);
tn.max("max_score", value);
// Track minimum value seen
const minValue = Buffer.allocUnsafe(4);
minValue.writeInt32LE(50, 0);
tn.min("min_score", minValue);
});
// Concurrent max updates (no conflicts)
await Promise.all([
db.max("high_water", Buffer.from([100, 0, 0, 0])),
db.max("high_water", Buffer.from([150, 0, 0, 0])),
db.max("high_water", Buffer.from([75, 0, 0, 0])),
]);
// Result: high_water = 150Perform bitwise AND, OR, XOR atomically.
/**
* Atomic bitwise AND
* Performs bitwise AND between current value and operand
* @param key - The key to mutate
* @param oper - The operand value
*/
bitAnd(key: KeyIn, oper: ValIn): void;
/**
* Atomic bitwise OR
* Performs bitwise OR between current value and operand
* @param key - The key to mutate
* @param oper - The operand value
*/
bitOr(key: KeyIn, oper: ValIn): void;
/**
* Atomic bitwise XOR
* Performs bitwise XOR between current value and operand
* @param key - The key to mutate
* @param oper - The operand value
*/
bitXor(key: KeyIn, oper: ValIn): void;
/**
* Atomic bitwise AND with Buffer operand (for precise bit control)
* @param key - The key to mutate
* @param oper - The operand as Buffer
*/
bitAndBuf(key: KeyIn, oper: Buffer): void;
/**
* Atomic bitwise OR with Buffer operand (for precise bit control)
* @param key - The key to mutate
* @param oper - The operand as Buffer
*/
bitOrBuf(key: KeyIn, oper: Buffer): void;
/**
* Atomic bitwise XOR with Buffer operand (for precise bit control)
* @param key - The key to mutate
* @param oper - The operand as Buffer
*/
bitXorBuf(key: KeyIn, oper: Buffer): void;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Set flags using bitwise OR
const flagMask = Buffer.from([0x01]); // Set bit 0
tn.bitOrBuf("user:flags", flagMask);
// Clear flags using bitwise AND
const clearMask = Buffer.from([0xfe]); // Clear bit 0 (AND with ~0x01)
tn.bitAndBuf("user:flags", clearMask);
// Toggle flags using bitwise XOR
const toggleMask = Buffer.from([0x04]); // Toggle bit 2
tn.bitXorBuf("user:flags", toggleMask);
});
// Use case: Feature flags
await db.doTransaction(async (tn) => {
// Enable feature A (bit 0)
tn.bitOrBuf("features", Buffer.from([0x01]));
// Enable feature B (bit 1)
tn.bitOrBuf("features", Buffer.from([0x02]));
// Disable feature A (bit 0)
tn.bitAndBuf("features", Buffer.from([0xfe]));
// Check features
const features = await tn.get("features");
if (features && features[0] & 0x02) {
console.log("Feature B is enabled");
}
});Lexicographic byte comparison for min and max.
/**
* Atomic byte-wise minimum
* Sets value to lexicographic minimum of current value and operand
* Compares bytes lexicographically, not as integers
* If key doesn't exist, sets to operand
* @param key - The key to mutate
* @param value - The value to compare
*/
byteMin(key: KeyIn, value: ValIn): void;
/**
* Atomic byte-wise maximum
* Sets value to lexicographic maximum of current value and operand
* Compares bytes lexicographically, not as integers
* If key doesn't exist, sets to operand
* @param key - The key to mutate
* @param value - The value to compare
*/
byteMax(key: KeyIn, value: ValIn): void;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Track lexicographically smallest string
tn.byteMin("min_name", "alice");
// Track lexicographically largest string
tn.byteMax("max_name", "bob");
});
// Concurrent byte operations
await Promise.all([
db.byteMin("first", "charlie"),
db.byteMin("first", "alice"), // Will win (lexicographically smaller)
db.byteMin("first", "bob"),
]);
// Result: first = "alice"
await Promise.all([
db.byteMax("last", "charlie"),
db.byteMax("last", "alice"),
db.byteMax("last", "zebra"), // Will win (lexicographically larger)
]);
// Result: last = "zebra"Implement distributed counters using atomic add.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Simple counter
class Counter {
constructor(private db: typeof db, private key: string) {}
async increment(amount: number = 1): Promise<void> {
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(BigInt(amount), 0);
await this.db.add(this.key, delta);
}
async getValue(): Promise<number> {
const value = await this.db.get(this.key);
if (!value) return 0;
return Number(value.readBigInt64LE(0));
}
}
// Usage
const pageViews = new Counter(db, "stats:page_views");
// Multiple concurrent increments (no conflicts!)
await Promise.all([
pageViews.increment(),
pageViews.increment(),
pageViews.increment(5),
]);
const total = await pageViews.getValue();
console.log("Total page views:", total); // 7Use bitwise operations for feature flags and permissions.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Feature flags system
enum Feature {
FeatureA = 1 << 0, // 0x01
FeatureB = 1 << 1, // 0x02
FeatureC = 1 << 2, // 0x04
FeatureD = 1 << 3, // 0x08
}
class FeatureFlags {
constructor(private db: typeof db, private key: string) {}
async enable(feature: Feature): Promise<void> {
const mask = Buffer.from([feature]);
await this.db.bitOrBuf(this.key, mask);
}
async disable(feature: Feature): Promise<void> {
const mask = Buffer.from([~feature & 0xff]);
await this.db.bitAndBuf(this.key, mask);
}
async toggle(feature: Feature): Promise<void> {
const mask = Buffer.from([feature]);
await this.db.bitXorBuf(this.key, mask);
}
async isEnabled(feature: Feature): Promise<boolean> {
const flags = await this.db.get(this.key);
if (!flags) return false;
return (flags[0] & feature) !== 0;
}
}
// Usage
const userFlags = new FeatureFlags(db, "user:123:flags");
await userFlags.enable(Feature.FeatureA);
await userFlags.enable(Feature.FeatureB);
console.log(await userFlags.isEnabled(Feature.FeatureA)); // true
await userFlags.toggle(Feature.FeatureA);
console.log(await userFlags.isEnabled(Feature.FeatureA)); // falseDistribute counters across shards to reduce contention.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Sharded counter for high-contention scenarios
class ShardedCounter {
constructor(
private db: typeof db,
private baseKey: string,
private numShards: number = 10
) {}
async increment(amount: number = 1): Promise<void> {
// Pick random shard
const shard = Math.floor(Math.random() * this.numShards);
const key = `${this.baseKey}:${shard}`;
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(BigInt(amount), 0);
await this.db.add(key, delta);
}
async getValue(): Promise<number> {
// Sum all shards
let total = 0;
for (let i = 0; i < this.numShards; i++) {
const value = await this.db.get(`${this.baseKey}:${i}`);
if (value) {
total += Number(value.readBigInt64LE(0));
}
}
return total;
}
}
// Usage with high concurrency
const apiCalls = new ShardedCounter(db, "stats:api_calls", 20);
// Many concurrent increments with reduced conflicts
await Promise.all(
Array(1000)
.fill(0)
.map(() => apiCalls.increment())
);
const total = await apiCalls.getValue();
console.log("Total API calls:", total); // 1000Track extremes using atomic max/min operations.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Track statistics
class Statistics {
constructor(private db: typeof db, private prefix: string) {}
async recordValue(value: number): Promise<void> {
const buf = Buffer.allocUnsafe(8);
buf.writeBigInt64LE(BigInt(value), 0);
await this.db.doTransaction(async (tn) => {
// Track maximum
tn.max(`${this.prefix}:max`, buf);
// Track minimum
tn.min(`${this.prefix}:min`, buf);
// Increment count
const one = Buffer.allocUnsafe(8);
one.writeBigInt64LE(1n, 0);
tn.add(`${this.prefix}:count`, one);
// Add to sum
tn.add(`${this.prefix}:sum`, buf);
});
}
async getStats(): Promise<{
min: number;
max: number;
count: number;
average: number;
}> {
const [minVal, maxVal, count, sum] = await Promise.all([
this.db.get(`${this.prefix}:min`),
this.db.get(`${this.prefix}:max`),
this.db.get(`${this.prefix}:count`),
this.db.get(`${this.prefix}:sum`),
]);
const min = minVal ? Number(minVal.readBigInt64LE(0)) : 0;
const max = maxVal ? Number(maxVal.readBigInt64LE(0)) : 0;
const cnt = count ? Number(count.readBigInt64LE(0)) : 0;
const total = sum ? Number(sum.readBigInt64LE(0)) : 0;
return {
min,
max,
count: cnt,
average: cnt > 0 ? total / cnt : 0,
};
}
}
// Usage
const responseTime = new Statistics(db, "metrics:response_time");
// Record measurements concurrently (no conflicts!)
await Promise.all([
responseTime.recordValue(100),
responseTime.recordValue(250),
responseTime.recordValue(75),
responseTime.recordValue(300),
responseTime.recordValue(150),
]);
const stats = await responseTime.getStats();
console.log("Response time stats:", stats);
// { min: 75, max: 300, count: 5, average: 175 }All atomic operations are available on both Database and Transaction classes.
/**
* Database class atomic operations (executed in implicit transactions)
*/
class Database<KeyIn, KeyOut, ValIn, ValOut> {
add(key: KeyIn, oper: ValIn): Promise<void>;
max(key: KeyIn, oper: ValIn): Promise<void>;
min(key: KeyIn, oper: ValIn): Promise<void>;
bitAnd(key: KeyIn, oper: ValIn): Promise<void>;
bitOr(key: KeyIn, oper: ValIn): Promise<void>;
bitXor(key: KeyIn, oper: ValIn): Promise<void>;
bitAndBuf(key: KeyIn, oper: Buffer): Promise<void>;
bitOrBuf(key: KeyIn, oper: Buffer): Promise<void>;
bitXorBuf(key: KeyIn, oper: Buffer): Promise<void>;
byteMin(key: KeyIn, oper: ValIn): Promise<void>;
byteMax(key: KeyIn, oper: ValIn): Promise<void>;
atomicOp(op: MutationType, key: KeyIn, oper: ValIn): Promise<void>;
atomicOpKB(op: MutationType, key: KeyIn, oper: Buffer): Promise<void>;
atomicOpNative(op: MutationType, key: NativeValue, oper: NativeValue): Promise<void>;
}
/**
* Transaction class atomic operations (part of transaction)
*/
class Transaction<KeyIn, KeyOut, ValIn, ValOut> {
add(key: KeyIn, oper: ValIn): void;
max(key: KeyIn, oper: ValIn): void;
min(key: KeyIn, oper: ValIn): void;
bitAnd(key: KeyIn, oper: ValIn): void;
bitOr(key: KeyIn, oper: ValIn): void;
bitXor(key: KeyIn, oper: ValIn): void;
bitAndBuf(key: KeyIn, oper: Buffer): void;
bitOrBuf(key: KeyIn, oper: Buffer): void;
bitXorBuf(key: KeyIn, oper: Buffer): void;
byteMin(key: KeyIn, value: ValIn): void;
byteMax(key: KeyIn, value: ValIn): void;
atomicOp(opType: MutationType, key: KeyIn, oper: ValIn): void;
atomicOpKB(opType: MutationType, key: KeyIn, oper: Buffer): void;
atomicOpNative(opType: MutationType, key: NativeValue, oper: NativeValue): void;
}Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// On database (implicit transaction)
await db.add("counter", Buffer.from([1, 0, 0, 0]));
// In explicit transaction
await db.doTransaction(async (tn) => {
tn.add("counter", Buffer.from([1, 0, 0, 0]));
tn.max("max_value", Buffer.from([100, 0, 0, 0]));
// Both operations are atomic within the transaction
});Test FoundationDB operations with Jest's async/await support and lifecycle hooks.
import fdb from "foundationdb";
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "@jest/globals";
describe("FoundationDB Tests", () => {
let db: ReturnType<typeof fdb.open>;
beforeAll(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
afterAll(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
// Clear test data
await db.clearRangeStartsWith("test:");
});
test("should perform basic operations", async () => {
await db.set("test:key", "value");
const result = await db.get("test:key");
expect(result?.toString()).toBe("value");
});
test("should handle transactions", async () => {
const count = await db.doTransaction(async (tn) => {
tn.set("test:counter", "5");
const val = await tn.get("test:counter");
return parseInt(val?.toString() || "0");
});
expect(count).toBe(5);
});
test("should handle errors gracefully", async () => {
await expect(async () => {
await db.doTransaction(async (tn) => {
throw new Error("Test error");
});
}).rejects.toThrow("Test error");
});
});Use Mocha with Promise-based tests and proper cleanup.
import fdb from "foundationdb";
import { describe, before, after, beforeEach, it } from "mocha";
import { expect } from "chai";
describe("FoundationDB Operations", function() {
this.timeout(10000); // 10 second timeout
let db: ReturnType<typeof fdb.open>;
before(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
after(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
await db.clearRangeStartsWith("test:");
});
it("should read and write data", async () => {
await db.set("test:mocha", "data");
const value = await db.get("test:mocha");
expect(value?.toString()).to.equal("data");
});
it("should handle concurrent operations", async () => {
const operations = Array.from({ length: 10 }, (_, i) =>
db.set(`test:item:${i}`, `value${i}`)
);
await Promise.all(operations);
const items = await db.getRangeAllStartsWith("test:item:");
expect(items).to.have.lengthOf(10);
});
it("should support atomic operations", async () => {
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await Promise.all([
db.add("test:atomic", delta),
db.add("test:atomic", delta),
db.add("test:atomic", delta)
]);
const result = await db.get("test:atomic");
expect(result?.readBigInt64LE(0)).to.equal(3n);
});
});Leverage Vitest's fast execution and modern testing features.
import fdb from "foundationdb";
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "vitest";
describe("FoundationDB with Vitest", () => {
let db: ReturnType<typeof fdb.open>;
beforeAll(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
afterAll(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
await db.clearRangeStartsWith("test:");
});
test("concurrent writes with snapshot isolation", async () => {
const writes = Array.from({ length: 100 }, (_, i) =>
db.set(`test:concurrent:${i}`, `value${i}`)
);
await Promise.all(writes);
const results = await db.getRangeAllStartsWith("test:concurrent:");
expect(results).toHaveLength(100);
});
test("transaction retry logic", async () => {
let attempts = 0;
const result = await db.doTransaction(async (tn) => {
attempts++;
const value = await tn.get("test:retry");
tn.set("test:retry", (parseInt(value?.toString() || "0") + 1).toString());
return attempts;
});
expect(result).toBeGreaterThanOrEqual(1);
});
test("range queries with limits", async () => {
for (let i = 0; i < 50; i++) {
await db.set(`test:range:${i.toString().padStart(3, "0")}`, `value${i}`);
}
const page1 = await db.getRangeAll("test:range:", "test:range:~", { limit: 10 });
const page2 = await db.getRangeAll("test:range:", "test:range:~", {
limit: 10,
reverse: true
});
expect(page1).toHaveLength(10);
expect(page2).toHaveLength(10);
expect(page1[0][0].toString()).not.toBe(page2[0][0].toString());
});
});Helper functions for common test scenarios.
import fdb, { FDBError } from "foundationdb";
export class FDBTestHelper {
private db: ReturnType<typeof fdb.open>;
private testPrefix: string;
constructor(testPrefix = "test:") {
fdb.setAPIVersion(620);
this.db = fdb.open();
this.testPrefix = testPrefix;
}
getDB() {
return this.db;
}
async cleanup() {
await this.db.clearRangeStartsWith(this.testPrefix);
}
async close() {
this.db.close();
}
async withTransaction<T>(
fn: (tn: any) => Promise<T>,
opts?: any
): Promise<T> {
return await this.db.doTransaction(fn, opts);
}
async expectError(
fn: () => Promise<any>,
errorCode?: number
): Promise<FDBError> {
try {
await fn();
throw new Error("Expected operation to throw");
} catch (error) {
if (error instanceof FDBError) {
if (errorCode !== undefined && error.code !== errorCode) {
throw new Error(`Expected error code ${errorCode}, got ${error.code}`);
}
return error;
}
throw error;
}
}
async populateTestData(count: number, prefix?: string) {
const pfx = prefix || this.testPrefix;
const operations = Array.from({ length: count }, (_, i) =>
this.db.set(`${pfx}${i}`, `value${i}`)
);
await Promise.all(operations);
}
async assertKeyExists(key: string): Promise<Buffer> {
const value = await this.db.get(key);
if (value === undefined) {
throw new Error(`Expected key "${key}" to exist`);
}
return value;
}
async assertKeyNotExists(key: string): Promise<void> {
const value = await this.db.get(key);
if (value !== undefined) {
throw new Error(`Expected key "${key}" to not exist`);
}
}
}
// Usage in tests
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "vitest";
describe("Using FDBTestHelper", () => {
let helper: FDBTestHelper;
beforeAll(() => {
helper = new FDBTestHelper("test:helper:");
});
afterAll(async () => {
await helper.cleanup();
await helper.close();
});
beforeEach(async () => {
await helper.cleanup();
});
test("populate and verify test data", async () => {
await helper.populateTestData(10);
await helper.assertKeyExists("test:helper:0");
await helper.assertKeyExists("test:helper:9");
await helper.assertKeyNotExists("test:helper:10");
});
test("transaction helper", async () => {
const result = await helper.withTransaction(async (tn) => {
tn.set("test:helper:tx", "value");
return "success";
});
expect(result).toBe("success");
});
});Manage database connections efficiently across your application.
import fdb from "foundationdb";
class FDBConnectionPool {
private static instance: FDBConnectionPool;
private db: ReturnType<typeof fdb.open> | null = null;
private initialized = false;
private constructor() {}
static getInstance(): FDBConnectionPool {
if (!FDBConnectionPool.instance) {
FDBConnectionPool.instance = new FDBConnectionPool();
}
return FDBConnectionPool.instance;
}
async initialize(opts?: { clusterFile?: string; trace?: string }) {
if (this.initialized) return;
try {
fdb.setAPIVersion(620);
if (opts?.trace) {
fdb.configNetwork({
trace_enable: opts.trace,
trace_format: "json",
});
}
this.db = fdb.open(opts?.clusterFile);
this.db.setNativeOptions({
transaction_timeout: 10000,
transaction_retry_limit: 100,
max_watches: 20000,
});
this.initialized = true;
} catch (error) {
console.error("Failed to initialize FDB connection:", error);
throw error;
}
}
getDatabase(): ReturnType<typeof fdb.open> {
if (!this.db) {
throw new Error("Database not initialized. Call initialize() first.");
}
return this.db;
}
async shutdown() {
if (this.db) {
this.db.close();
fdb.stopNetworkSync();
this.db = null;
this.initialized = false;
}
}
isInitialized(): boolean {
return this.initialized;
}
}
// Usage across application
export const fdbPool = FDBConnectionPool.getInstance();
// In application startup
await fdbPool.initialize({ trace: "./fdb-traces" });
// In any module
import { fdbPool } from "./fdb-pool";
async function getData(key: string) {
const db = fdbPool.getDatabase();
return await db.get(key);
}
// Graceful shutdown
process.on("SIGTERM", async () => {
await fdbPool.shutdown();
process.exit(0);
});Implement robust retry logic for transient failures.
import fdb, { FDBError } from "foundationdb";
async function withRetry<T>(
operation: () => Promise<T>,
maxRetries = 5,
baseDelay = 100
): Promise<T> {
let lastError: Error | undefined;
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
lastError = error as Error;
// Don't retry on non-retryable errors
if (error instanceof FDBError && error.code === 1007) {
throw error; // Not retryable
}
if (attempt < maxRetries) {
const delay = baseDelay * Math.pow(2, attempt);
const jitter = Math.random() * delay * 0.1;
await new Promise((resolve) => setTimeout(resolve, delay + jitter));
}
}
}
throw new Error(`Operation failed after ${maxRetries} retries: ${lastError?.message}`);
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const result = await withRetry(async () => {
return await db.doTransaction(async (tn) => {
const value = await tn.get("critical:data");
tn.set("critical:data", "updated");
return value;
});
});Protect against cascading failures with circuit breaker.
import fdb, { FDBError } from "foundationdb";
class CircuitBreaker {
private failures = 0;
private lastFailureTime = 0;
private state: "closed" | "open" | "half-open" = "closed";
constructor(
private threshold = 5,
private timeout = 60000,
private resetTimeout = 30000
) {}
async execute<T>(operation: () => Promise<T>): Promise<T> {
if (this.state === "open") {
if (Date.now() - this.lastFailureTime > this.resetTimeout) {
this.state = "half-open";
} else {
throw new Error("Circuit breaker is OPEN");
}
}
try {
const result = await operation();
this.onSuccess();
return result;
} catch (error) {
this.onFailure();
throw error;
}
}
private onSuccess() {
this.failures = 0;
this.state = "closed";
}
private onFailure() {
this.failures++;
this.lastFailureTime = Date.now();
if (this.failures >= this.threshold) {
this.state = "open";
}
}
getState() {
return this.state;
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const breaker = new CircuitBreaker(5, 60000, 30000);
async function safeQuery(key: string): Promise<Buffer | undefined> {
return await breaker.execute(async () => {
return await db.get(key);
});
}Process large datasets efficiently with automatic chunking.
import fdb from "foundationdb";
async function processBatchInChunks<T>(
db: ReturnType<typeof fdb.open>,
prefix: string,
processor: (batch: Array<[Buffer, Buffer]>) => Promise<T[]>,
chunkSize = 100
): Promise<T[]> {
const results: T[] = [];
let startKey = prefix;
while (true) {
const chunk = await db.getRangeAll(
startKey,
prefix + "~",
{ limit: chunkSize }
);
if (chunk.length === 0) break;
const chunkResults = await processor(chunk);
results.push(...chunkResults);
if (chunk.length < chunkSize) break;
// Continue from after last key
startKey = chunk[chunk.length - 1][0].toString() + "\x00";
}
return results;
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const processed = await processBatchInChunks(
db,
"users:",
async (batch) => {
return batch.map(([key, value]) => ({
key: key.toString(),
parsed: JSON.parse(value.toString()),
}));
},
50
);Implement application-level caching with FDB watches.
import fdb from "foundationdb";
class FDBCache<T> {
private cache = new Map<string, { value: T; watch: any }>();
constructor(private db: ReturnType<typeof fdb.open>) {}
async get(key: string, parser: (buf: Buffer) => T): Promise<T | undefined> {
// Check cache first
const cached = this.cache.get(key);
if (cached) {
return cached.value;
}
// Fetch and cache with watch
const watch = await this.db.getAndWatch(key);
if (watch.value === undefined) {
return undefined;
}
const value = parser(watch.value);
this.cache.set(key, { value, watch });
// Invalidate on change
watch.promise.then(() => {
this.cache.delete(key);
});
return value;
}
async set(key: string, value: T, serializer: (val: T) => Buffer) {
await this.db.set(key, serializer(value));
this.cache.delete(key); // Invalidate cache
}
clear() {
this.cache.forEach(({ watch }) => watch.cancel());
this.cache.clear();
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const cache = new FDBCache(db);
const user = await cache.get(
"user:123",
(buf) => JSON.parse(buf.toString())
);
await cache.set(
"user:123",
{ name: "Alice", email: "alice@example.com" },
(val) => Buffer.from(JSON.stringify(val))
);Implement distributed locking for coordination.
import fdb from "foundationdb";
class DistributedLock {
private lockKey: string;
private lockValue: string;
constructor(
private db: ReturnType<typeof fdb.open>,
lockName: string,
private ttl = 30000
) {
this.lockKey = `locks:${lockName}`;
this.lockValue = `${Date.now()}-${Math.random()}`;
}
async acquire(timeout = 10000): Promise<boolean> {
const startTime = Date.now();
while (Date.now() - startTime < timeout) {
try {
const acquired = await this.db.doTransaction(async (tn) => {
const existing = await tn.get(this.lockKey);
if (existing === undefined) {
// Lock is free
tn.set(this.lockKey, this.lockValue);
return true;
}
// Check if lock expired
const lockData = existing.toString();
const lockTime = parseInt(lockData.split("-")[0]);
if (Date.now() - lockTime > this.ttl) {
// Lock expired, take it
tn.set(this.lockKey, this.lockValue);
return true;
}
return false;
});
if (acquired) return true;
} catch (error) {
// Transaction conflict, retry
}
// Wait before retry
await new Promise((resolve) => setTimeout(resolve, 100));
}
return false;
}
async release(): Promise<void> {
await this.db.doTransaction(async (tn) => {
const existing = await tn.get(this.lockKey);
if (existing?.toString() === this.lockValue) {
tn.clear(this.lockKey);
}
});
}
async withLock<T>(fn: () => Promise<T>, timeout = 10000): Promise<T> {
const acquired = await this.acquire(timeout);
if (!acquired) {
throw new Error(`Failed to acquire lock: ${this.lockKey}`);
}
try {
return await fn();
} finally {
await this.release();
}
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const lock = new DistributedLock(db, "resource:123", 30000);
await lock.withLock(async () => {
// Critical section - only one process can execute this at a time
const value = await db.get("shared:resource");
await db.set("shared:resource", "updated");
});Implement event sourcing with ordered event storage.
import fdb, { tuple } from "foundationdb";
interface Event {
type: string;
data: any;
timestamp: number;
version?: Buffer;
}
class EventStore {
private db: ReturnType<typeof fdb.open>;
constructor() {
fdb.setAPIVersion(620);
this.db = fdb.open()
.withKeyEncoding(fdb.encoders.tuple)
.withValueEncoding(fdb.encoders.json);
}
async appendEvent(
streamId: string,
eventType: string,
data: any
): Promise<Buffer> {
return await this.db.doTransaction(async (tn) => {
const key = ["events", streamId, tuple.unboundVersionstamp()];
const event: Event = {
type: eventType,
data,
timestamp: Date.now(),
};
tn.setVersionstampedKey(key, event);
const versionstamp = tn.getVersionstamp();
return versionstamp.promise;
});
}
async getEvents(
streamId: string,
fromVersion?: Buffer,
limit?: number
): Promise<Event[]> {
const start = fromVersion
? ["events", streamId, fromVersion]
: ["events", streamId];
const events = await this.db.getRangeAll(
start,
["events", streamId, Buffer.from([0xff])],
{ limit }
);
return events.map(([key, value]) => ({
...(value as any),
version: key[2] as Buffer,
}));
}
async replay(
streamId: string,
handler: (event: Event) => void | Promise<void>
): Promise<void> {
const events = await this.getEvents(streamId);
for (const event of events) {
await handler(event);
}
}
async getSnapshot(streamId: string): Promise<any> {
const events = await this.getEvents(streamId);
// Rebuild state from events
let state: any = {};
for (const event of events) {
state = this.applyEvent(state, event);
}
return state;
}
private applyEvent(state: any, event: Event): any {
// Apply event to state based on event type
switch (event.type) {
case "created":
return { ...event.data, created: true };
case "updated":
return { ...state, ...event.data };
case "deleted":
return { ...state, deleted: true };
default:
return state;
}
}
}
// Usage
const store = new EventStore();
// Append events
await store.appendEvent("order:123", "OrderCreated", {
items: ["item1", "item2"],
total: 100,
});
await store.appendEvent("order:123", "ItemAdded", {
item: "item3",
});
await store.appendEvent("order:123", "OrderConfirmed", {
confirmedAt: Date.now(),
});
// Replay events
await store.replay("order:123", (event) => {
console.log(`Event: ${event.type}`, event.data);
});
// Get current snapshot
const currentState = await store.getSnapshot("order:123");
console.log("Current state:", currentState);Understanding and handling common FoundationDB error codes.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleFDBErrors() {
try {
await db.doTransaction(async (tn) => {
tn.set("key", "value");
});
} catch (error) {
if (error instanceof FDBError) {
switch (error.code) {
case 1007: // Transaction too old
console.error("Transaction took too long - increase timeout");
break;
case 1009: // Request for future version
console.error("Clock skew detected - check system time");
break;
case 1020: // Not committed (transaction may have succeeded)
console.error("Commit status unknown - check if data was written");
break;
case 1021: // Transaction cancelled
console.error("Transaction was cancelled");
break;
case 1025: // Transaction timed out
console.error("Operation exceeded timeout limit");
break;
case 2017: // Transaction too large
console.error("Transaction size exceeds limit - split into smaller transactions");
break;
default:
console.error(`FDB Error ${error.code}: ${error.message}`);
}
} else {
console.error("Non-FDB error:", error);
}
}
}Handle transaction conflicts with proper retry logic.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleConflicts() {
let attempts = 0;
const maxAttempts = 5;
while (attempts < maxAttempts) {
try {
const result = await db.doTransaction(async (tn) => {
attempts++;
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
// Simulate some processing
await new Promise(resolve => setTimeout(resolve, 10));
tn.set("counter", (count + 1).toString());
return count + 1;
});
console.log(`Success after ${attempts} attempts:`, result);
return result;
} catch (error) {
if (error instanceof FDBError && error.code === 1007) {
console.log(`Attempt ${attempts} failed with conflict, retrying...`);
if (attempts >= maxAttempts) {
throw new Error(`Failed after ${maxAttempts} attempts`);
}
// doTransaction handles retry automatically, but showing manual retry for illustration
} else {
throw error;
}
}
}
}Handle and prevent timeout errors effectively.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleTimeouts() {
try {
await db.doTransaction(async (tn) => {
// Long-running operation
for await (const batch of tn.getRangeBatch("data:", "data:~")) {
// Process batch
await processBatch(batch);
}
}, {
timeout: 30000, // 30 second timeout
});
} catch (error) {
if (error instanceof FDBError && error.code === 1025) {
console.error("Transaction timed out");
// Strategies to fix:
// 1. Increase timeout
// 2. Split into smaller transactions
// 3. Use snapshot reads where possible
// 4. Optimize query performance
}
throw error;
}
}
async function processBatch(batch: any) {
// Batch processing logic
}Handle network-related errors and connectivity issues.
import fdb, { FDBError } from "foundationdb";
async function handleNetworkErrors() {
fdb.setAPIVersion(620);
try {
const db = fdb.open("/path/to/fdb.cluster");
await db.doTransaction(async (tn) => {
tn.set("key", "value");
});
} catch (error) {
if (error instanceof FDBError) {
if (error.code === 1031) {
console.error("Cannot connect to cluster - check network and cluster file");
} else if (error.code === 1032) {
console.error("Cluster file invalid or corrupted");
} else if (error.code === 2501) {
console.error("No coordinators available - check FDB cluster health");
}
}
throw error;
}
}Handle directory-specific errors.
import fdb, { directory, DirectoryError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleDirectoryErrors() {
try {
// Try to create existing directory
const dir = await directory.create(db, ["myapp", "users"]);
} catch (error) {
if (error instanceof DirectoryError) {
console.error("Directory operation failed:", error.message);
// Check specific error messages
if (error.message.includes("already exists")) {
console.log("Directory exists, opening instead");
const dir = await directory.open(db, ["myapp", "users"]);
} else if (error.message.includes("does not exist")) {
console.log("Directory missing, creating");
const dir = await directory.create(db, ["myapp", "users"]);
} else if (error.message.includes("layer mismatch")) {
console.error("Directory layer type mismatch");
}
}
throw error;
}
}Implement graceful error recovery patterns.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function safeOperation<T>(
operation: () => Promise<T>,
fallback?: T
): Promise<T | undefined> {
try {
return await operation();
} catch (error) {
if (error instanceof FDBError) {
console.error(`FDB Error ${error.code}:`, error.message);
// Return fallback for specific errors
if (error.code === 1025 || error.code === 1007) {
console.log("Using fallback value due to transient error");
return fallback;
}
}
// Re-throw non-recoverable errors
throw error;
}
}
// Usage
const value = await safeOperation(
async () => await db.get("config:setting"),
Buffer.from("default-value")
);
// With retry wrapper
async function withGracefulRetry<T>(
operation: () => Promise<T>,
maxRetries = 3
): Promise<T> {
let lastError: Error;
for (let i = 0; i < maxRetries; i++) {
try {
return await operation();
} catch (error) {
lastError = error as Error;
if (error instanceof FDBError) {
// Don't retry fatal errors
if ([1031, 1032, 2501].includes(error.code)) {
throw error;
}
}
// Exponential backoff
await new Promise(resolve =>
setTimeout(resolve, Math.pow(2, i) * 100)
);
}
}
throw new Error(`Failed after ${maxRetries} retries: ${lastError!.message}`);
}Diagnose and fix common performance problems.
Issue: Slow Transactions
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Large transaction with many operations
async function slowTransaction() {
await db.doTransaction(async (tn) => {
// Thousands of operations in single transaction
for (let i = 0; i < 10000; i++) {
tn.set(`key:${i}`, `value:${i}`);
}
});
}
// Solution: Split into smaller transactions
async function fastTransactions() {
const batchSize = 100;
for (let i = 0; i < 10000; i += batchSize) {
await db.doTransaction(async (tn) => {
for (let j = i; j < i + batchSize && j < 10000; j++) {
tn.set(`key:${j}`, `value:${j}`);
}
});
}
}
// Solution: Use appropriate transaction options
await db.doTransaction(async (tn) => {
// Process data
}, {
timeout: 30000,
size_limit: 50000000,
});Issue: High Conflict Rate
// Problem: Many transactions competing for same keys
async function highConflict() {
await Promise.all(
Array.from({ length: 100 }, () =>
db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
})
)
);
}
// Solution: Use atomic operations
async function lowConflict() {
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await Promise.all(
Array.from({ length: 100 }, () =>
db.add("counter", delta)
)
);
}
// Solution: Shard hot keys
class ShardedCounter {
constructor(private db: typeof db, private shards = 10) {}
async increment() {
const shard = Math.floor(Math.random() * this.shards);
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await this.db.add(`counter:${shard}`, delta);
}
async getTotal(): Promise<number> {
let total = 0;
for (let i = 0; i < this.shards; i++) {
const value = await this.db.get(`counter:${i}`);
if (value) {
total += Number(value.readBigInt64LE(0));
}
}
return total;
}
}Issue: Memory Usage
// Problem: Loading too much data at once
async function highMemory() {
const allData = await db.getRangeAll("data:", "data:~");
// Process huge dataset - may cause OOM
}
// Solution: Use streaming with batches
async function lowMemory() {
for await (const batch of db.getRangeBatch("data:", "data:~")) {
// Process one batch at a time
await processBatch(batch);
}
}
// Solution: Use appropriate streaming mode
import { StreamingMode } from "foundationdb";
async function optimizedStreaming() {
for await (const batch of db.getRangeBatch("data:", "data:~", {
streamingMode: StreamingMode.Small, // Smaller batches
})) {
await processBatch(batch);
}
}
async function processBatch(batch: any) {
// Process batch
}Diagnose and resolve connection issues.
Issue: Cannot Connect to Cluster
import fdb from "foundationdb";
// Check 1: Verify cluster file
console.log("Checking cluster file...");
try {
const clusterFile = "/etc/foundationdb/fdb.cluster";
const fs = require("fs");
const content = fs.readFileSync(clusterFile, "utf8");
console.log("Cluster file content:", content);
} catch (error) {
console.error("Cannot read cluster file:", error);
}
// Check 2: Test connection
fdb.setAPIVersion(620);
try {
const db = fdb.open();
await db.get("test");
console.log("Connection successful");
} catch (error) {
console.error("Connection failed:", error);
}
// Check 3: Verify network configuration
fdb.configNetwork({
trace_enable: "./fdb-traces",
trace_format: "json",
});
// Check traces for connection errorsIssue: Transaction Timeouts
// Diagnostic: Check transaction size
await db.doTransaction(async (tn) => {
// Perform operations
const size = tn.getApproximateSize();
console.log("Transaction size:", size);
if (size > 5000000) {
console.warn("Transaction size large, may timeout");
}
});
// Solution: Increase timeout or split transaction
db.setNativeOptions({
transaction_timeout: 30000, // 30 seconds
});
// Or per-transaction
await db.doTransaction(async (tn) => {
// Operations
}, {
timeout: 60000, // 60 seconds
});Issue: Watch Not Triggering
// Problem: Watch created outside transaction
// const watch = db.watch("key"); // ERROR
// Solution: Create watch in transaction
const watch = await db.doTransaction(async (tn) => {
return tn.watch("key");
});
// Problem: Awaiting watch inside transaction
await db.doTransaction(async (tn) => {
const watch = tn.watch("key");
// await watch.promise; // DEADLOCK!
return watch;
}).then(async (watch) => {
await watch.promise; // Correct
});
// Problem: Too many watches
db.setNativeOptions({
max_watches: 20000, // Increase limit
});Prevent and diagnose data integrity problems.
Issue: Encoding Mismatch
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Writing with one encoder, reading with another
await db.withValueEncoding(fdb.encoders.json)
.set("key", { value: 123 });
const wrong = await db.withValueEncoding(fdb.encoders.string)
.get("key"); // Wrong encoding!
// Solution: Consistent encoder usage
const jsonDb = db.withValueEncoding(fdb.encoders.json);
await jsonDb.set("key", { value: 123 });
const correct = await jsonDb.get("key"); // { value: 123 }
// Solution: Document encoding choices
class UserStore {
private db: ReturnType<typeof fdb.open>;
constructor(db: ReturnType<typeof fdb.open>) {
this.db = db
.at("users:")
.withKeyEncoding(fdb.encoders.string)
.withValueEncoding(fdb.encoders.json);
}
async save(id: string, user: any) {
await this.db.set(id, user);
}
async load(id: string) {
return await this.db.get(id);
}
}Issue: Lost Updates
// Problem: Not using transactions properly
async function lostUpdate() {
const value = await db.get("counter");
const count = parseInt(value?.toString() || "0");
// Another process might update here!
await db.set("counter", (count + 1).toString());
}
// Solution: Use transactions
async function safeUpdate() {
await db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
});
}
// Better: Use atomic operations
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await db.add("counter", delta);Issue: Directory Conflicts
import fdb, { directory } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Racing directory creation
async function racingCreation() {
try {
const dir = await directory.create(db, ["myapp", "tenant"]);
} catch (error) {
// Second process fails
}
}
// Solution: Use createOrOpen
async function safeCreation() {
const dir = await directory.createOrOpen(db, ["myapp", "tenant"]);
// Works for all processes
}
// Solution: Handle errors gracefully
async function robustCreation() {
try {
const dir = await directory.create(db, ["myapp", "tenant"]);
} catch (error) {
if (error.message.includes("already exists")) {
const dir = await directory.open(db, ["myapp", "tenant"]);
return dir;
}
throw error;
}
}Tools and techniques for debugging issues.
Enable Tracing
import fdb from "foundationdb";
// Enable detailed tracing
fdb.configNetwork({
trace_enable: "./fdb-traces",
trace_format: "json",
trace_log_group: "myapp",
});
fdb.setAPIVersion(620);
const db = fdb.open();
// Check traces at ./fdb-traces/*.json
// Look for: errors, warnings, slow_task eventsTransaction Debugging
import fdb, { TransactionOptionCode } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Enable transaction logging
tn.setOption(TransactionOptionCode.DebugTransactionIdentifier, "my-tx-123");
tn.setOption(TransactionOptionCode.LogTransaction);
// Perform operations
tn.set("key", "value");
}, {
debug_transaction_identifier: "test-transaction",
log_transaction: true,
});
// Check traces for transaction detailsPerformance Profiling
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function profileOperation() {
const start = Date.now();
await db.doTransaction(async (tn) => {
const opStart = Date.now();
const value = await tn.get("key");
console.log(`Get took ${Date.now() - opStart}ms`);
const setStart = Date.now();
tn.set("key", "value");
console.log(`Set took ${Date.now() - setStart}ms`);
});
console.log(`Total transaction: ${Date.now() - start}ms`);
}
// Monitor transaction sizes
await db.doTransaction(async (tn) => {
for (let i = 0; i < 1000; i++) {
tn.set(`key:${i}`, `value:${i}`);
if (i % 100 === 0) {
console.log(`Size at ${i}:`, tn.getApproximateSize());
}
}
});1. Keep Transactions Short
Minimize transaction duration to reduce conflicts and avoid timeouts.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Bad: Long-running computation in transaction
await db.doTransaction(async (tn) => {
const data = await tn.get("data");
// Expensive computation
const result = await expensiveComputation(data);
tn.set("result", result);
});
// Good: Compute outside transaction
const data = await db.get("data");
const result = await expensiveComputation(data);
await db.doTransaction(async (tn) => {
tn.set("result", result);
});2. Use Atomic Operations for Counters
Avoid read-modify-write patterns for counters.
// Bad: Read-modify-write
await db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
});
// Good: Atomic add
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await db.add("counter", delta);3. Use Snapshot Reads When Possible
Reduce conflicts by using snapshot reads for non-critical data.
await db.doTransaction(async (tn) => {
// Critical read (causes conflicts)
const critical = await tn.get("critical:data");
// Non-critical read (no conflicts)
const metadata = await tn.snapshot().get("metadata");
// Write based on critical data
tn.set("result", processData(critical));
});4. Batch Related Operations
Group related operations in single transactions.
// Bad: Multiple transactions
await db.set("user:alice:name", "Alice");
await db.set("user:alice:email", "alice@example.com");
await db.set("user:alice:age", "30");
// Good: Single transaction
await db.doTransaction(async (tn) => {
tn.set("user:alice:name", "Alice");
tn.set("user:alice:email", "alice@example.com");
tn.set("user:alice:age", "30");
});5. Handle Large Datasets with Chunking
Split large operations into manageable chunks.
async function processLargeDataset() {
let startKey = "data:";
const chunkSize = 1000;
while (true) {
const chunk = await db.getRangeAll(
startKey,
"data:~",
{ limit: chunkSize }
);
if (chunk.length === 0) break;
await db.doTransaction(async (tn) => {
for (const [key, value] of chunk) {
// Process item
tn.set(key.toString() + ":processed", "true");
}
});
if (chunk.length < chunkSize) break;
startKey = chunk[chunk.length - 1][0].toString() + "\x00";
}
}6. Use Hierarchical Key Structure
Organize keys hierarchically for efficient queries.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Good: Hierarchical structure
await db.set("app:users:alice:profile", "...");
await db.set("app:users:alice:settings", "...");
await db.set("app:users:bob:profile", "...");
await db.set("app:orders:12345:items", "...");
// Query all user data
const aliceData = await db.getRangeAllStartsWith("app:users:alice:");
// Query all orders
const orders = await db.getRangeAllStartsWith("app:orders:");7. Use Tuple Encoding for Composite Keys
Leverage tuple encoding for structured keys.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open().withKeyEncoding(fdb.encoders.tuple);
// Store with composite keys
await db.set(["user", "alice", "profile"], "data");
await db.set(["user", "alice", "settings"], "data");
await db.set(["order", 12345, "items"], "data");
// Query by prefix
const aliceData = await db.getRangeAllStartsWith(["user", "alice"]);
const orders = await db.getRangeAllStartsWith(["order"]);8. Use Directories for Multi-Tenancy
Isolate tenant data with directory layer.
import fdb, { directory } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Create tenant directories
const tenant1 = await directory.createOrOpen(db, ["tenants", "acme"]);
const tenant2 = await directory.createOrOpen(db, ["tenants", "techcorp"]);
// Scoped databases
const acmeDb = db.at(tenant1);
const techcorpDb = db.at(tenant2);
// Isolated data
await acmeDb.set("data", "Acme data");
await techcorpDb.set("data", "Techcorp data");9. Implement Retry Logic with Backoff
Handle transient failures gracefully.
async function withRetry<T>(
operation: () => Promise<T>,
maxRetries = 5,
baseDelay = 100
): Promise<T> {
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
if (attempt === maxRetries) throw error;
const delay = baseDelay * Math.pow(2, attempt);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
throw new Error("Should not reach here");
}
const result = await withRetry(() => db.get("key"));10. Monitor Transaction Metrics
Track performance and errors for optimization.
class MetricsCollector {
private metrics = {
transactionCount: 0,
errorCount: 0,
avgDuration: 0,
};
async trackTransaction<T>(
operation: () => Promise<T>
): Promise<T> {
const start = Date.now();
try {
const result = await operation();
this.metrics.transactionCount++;
const duration = Date.now() - start;
this.metrics.avgDuration =
(this.metrics.avgDuration * (this.metrics.transactionCount - 1) + duration) /
this.metrics.transactionCount;
return result;
} catch (error) {
this.metrics.errorCount++;
throw error;
}
}
getMetrics() {
return { ...this.metrics };
}
}
const metrics = new MetricsCollector();
await metrics.trackTransaction(() =>
db.doTransaction(async (tn) => {
tn.set("key", "value");
})
);
console.log("Metrics:", metrics.getMetrics());11. Close Connections Properly
Ensure clean shutdown of database connections.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Use try-finally for cleanup
try {
await db.set("key", "value");
} finally {
db.close();
fdb.stopNetworkSync();
}
// Handle graceful shutdown
process.on("SIGTERM", () => {
console.log("Shutting down...");
db.close();
fdb.stopNetworkSync();
process.exit(0);
});
process.on("SIGINT", () => {
console.log("Interrupted, cleaning up...");
db.close();
fdb.stopNetworkSync();
process.exit(0);
});12. Use Connection Pooling
Reuse database connections across application.
class DatabasePool {
private static db: ReturnType<typeof fdb.open> | null = null;
static initialize() {
if (!this.db) {
fdb.setAPIVersion(620);
this.db = fdb.open();
this.db.setNativeOptions({
transaction_timeout: 10000,
max_watches: 20000,
});
}
}
static getDatabase() {
if (!this.db) {
throw new Error("Database not initialized");
}
return this.db;
}
static shutdown() {
if (this.db) {
this.db.close();
fdb.stopNetworkSync();
this.db = null;
}
}
}
// Initialize once at startup
DatabasePool.initialize();
// Use throughout application
const db = DatabasePool.getDatabase();
await db.set("key", "value");
// Shutdown on exit
process.on("exit", () => {
DatabasePool.shutdown();
});