Range query operations for efficiently querying multiple key-value pairs with async iterators, batching, and configurable streaming modes. Range queries are fundamental for scanning data in FoundationDB.
Stream key-value pairs one at a time using async iteration.
/**
* Async iterator over key-value pairs in a range
* Values are streamed from the database as they are read
* @param start - Start key or key selector (inclusive)
* @param end - End key or key selector (exclusive). If omitted, uses start as prefix
* @param opts - Range query options
* @returns AsyncIterableIterator of [key, value] tuples
*/
getRange(
start: KeyIn | KeySelector<KeyIn>,
end?: KeyIn | KeySelector<KeyIn>,
opts?: RangeOptions
): AsyncIterableIterator<[KeyOut, ValOut]>;
interface KeySelector<Key> {
/** The reference key */
key: Key;
/** Include equal keys */
orEqual: boolean;
/** Offset from the reference key */
offset: number;
}Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Iterate over range
for await (const [key, value] of tn.getRange("user:", "user:~")) {
console.log(key.toString(), "=>", value.toString());
// Process each item as it arrives
// Can break early if needed
if (someCondition) break;
}
// Using prefix (automatic range calculation)
for await (const [key, value] of tn.getRange("product:")) {
// Iterates over all keys starting with "product:"
console.log(key.toString(), value.toString());
}
});Stream key-value pairs in batches for better performance with large ranges.
/**
* Async iterator over batches of key-value pairs
* More efficient than getRange for large datasets
* @param start - Start key or key selector (inclusive)
* @param end - End key or key selector (exclusive). If omitted, uses start as prefix
* @param opts - Range query options
* @returns AsyncIterableIterator of arrays of [key, value] tuples
*/
getRangeBatch(
start: KeyIn | KeySelector<KeyIn>,
end?: KeyIn | KeySelector<KeyIn>,
opts?: RangeOptions
): AsyncIterableIterator<Array<[KeyOut, ValOut]>>;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Process in batches for better performance
for await (const batch of tn.getRangeBatch("user:", "user:~")) {
console.log("Processing batch of", batch.length, "items");
// Process entire batch at once
for (const [key, value] of batch) {
// Efficient batch processing
}
}
});Fetch all key-value pairs in a range as a single array.
/**
* Get all key-value pairs in a range as an array
* Prefetches and returns all values rather than streaming
* Convenient for small ranges
* @param start - Start key or key selector (inclusive)
* @param end - End key or key selector (exclusive). If omitted, uses start as prefix
* @param opts - Range query options
* @returns Promise resolving to array of [key, value] tuples
*/
getRangeAll(
start: KeyIn | KeySelector<KeyIn>,
end?: KeyIn | KeySelector<KeyIn>,
opts?: RangeOptions
): Promise<Array<[KeyOut, ValOut]>>;
/**
* Get all key-value pairs with a given prefix
* @param prefix - Key prefix to match
* @param opts - Range query options
* @returns Promise resolving to array of [key, value] tuples
*/
getRangeAllStartsWith(
prefix: KeyIn | KeySelector<KeyIn>,
opts?: RangeOptions
): Promise<Array<[KeyOut, ValOut]>>;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Simple range query
const users = await db.getRangeAllStartsWith("user:");
for (const [key, value] of users) {
console.log(key.toString(), "=>", value.toString());
}
// With explicit start and end
const range = await db.getRangeAll("user:a", "user:m");
// Using in a transaction
await db.doTransaction(async (tn) => {
const products = await tn.getRangeAll("product:", "product:~");
console.log("Found", products.length, "products");
});Configure range query behavior.
/**
* Options for range queries
*/
interface RangeOptions {
/** Maximum number of results to return */
limit?: number;
/** Return results in reverse order (from high to low keys) */
reverse?: boolean;
/** Streaming mode controlling fetch eagerness */
streamingMode?: StreamingMode;
/** Target byte size for results */
targetBytes?: number;
}
/**
* Streaming modes control how eagerly FDB prefetches data
*/
enum StreamingMode {
/** Fetch entire range eagerly (best for small ranges) */
WantAll = -2,
/** Default balanced fetching for iterators */
Iterator = -1,
/** Fetch exact number of items specified */
Exact = 0,
/** Small batches (more round trips, less memory) */
Small = 1,
/** Medium batches (balanced) */
Medium = 2,
/** Large batches (fewer round trips, more memory) */
Large = 3,
/** Very large batches (maximum throughput) */
Serial = 4,
}Usage Example:
import fdb, { StreamingMode } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Limit results
const firstTen = await tn.getRangeAll("user:", "user:~", {
limit: 10,
});
// Reverse order
const lastFive = await tn.getRangeAll("user:", "user:~", {
limit: 5,
reverse: true,
});
// Large streaming mode for bulk reads
for await (const batch of tn.getRangeBatch("data:", "data:~", {
streamingMode: StreamingMode.Large,
})) {
// Process large batches efficiently
}
// Target specific byte size
const chunk = await tn.getRangeAll("blob:", "blob:~", {
targetBytes: 1024 * 1024, // ~1 MB
});
});Use key selectors for precise range boundaries.
/**
* Key selectors enable relative positioning and range boundaries
*/
interface KeySelector<Key> {
/** The reference key */
key: Key;
/** Include equal keys in the result */
orEqual: boolean;
/** Offset from the reference key */
offset: number;
}Usage Example:
import fdb, { keySelector } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Range with selectors
const range = await tn.getRangeAll(
keySelector.firstGreaterOrEqual("user:100"),
keySelector.lastLessThan("user:200")
);
// Find keys after a specific key
const next = await tn.getRangeAll(
keySelector.firstGreaterThan("user:alice"),
"user:~",
{ limit: 5 }
);
// Complex selector usage
const lastKey = await tn.getKey(keySelector.lastLessThan("user:~"));
if (lastKey) {
// Get last 10 users
const lastUsers = await tn.getRangeAll(
keySelector.firstGreaterOrEqual(lastKey).add(-9),
keySelector.firstGreaterThan(lastKey)
);
}
});Query all keys with a specific prefix.
/**
* Get all key-value pairs with a given prefix
* Automatically calculates the range [prefix, prefix + '\xFF')
* @param prefix - Key prefix to match
* @param opts - Range query options
* @returns Promise resolving to array of [key, value] tuples
*/
getRangeAllStartsWith(
prefix: KeyIn | KeySelector<KeyIn>,
opts?: RangeOptions
): Promise<Array<[KeyOut, ValOut]>>;
/**
* Clear all keys with a given prefix
* @param prefix - Key prefix to match
*/
clearRangeStartsWith(prefix: KeyIn): void;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Get all with prefix
const sessions = await db.getRangeAllStartsWith("session:");
// In transaction
await db.doTransaction(async (tn) => {
// Query with prefix
const tempData = await tn.getRangeAllStartsWith("temp:");
// Clear all temp data
tn.clearRangeStartsWith("temp:");
});
// With options
const recent = await db.getRangeAllStartsWith("log:", {
limit: 100,
reverse: true, // Get most recent
});Query ranges in reverse order (high to low keys).
/**
* Range options with reverse flag
*/
interface RangeOptions {
/** Return results in reverse order (high to low keys) */
reverse?: boolean;
// ... other options
}Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Get last 10 entries
const lastTen = await tn.getRangeAll("log:", "log:~", {
limit: 10,
reverse: true,
});
// Iterate in reverse
for await (const [key, value] of tn.getRange("user:", "user:~", {
reverse: true,
})) {
console.log("Reverse:", key.toString());
}
// Reverse with key selectors
const range = await tn.getRangeAll(
keySelector.firstGreaterOrEqual("user:a"),
keySelector.lastLessThan("user:z"),
{ reverse: true }
);
});Common patterns for range queries.
Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Pattern 1: Process all items
for await (const [key, value] of tn.getRange("item:")) {
// Process each item
}
// Pattern 2: Early termination
for await (const [key, value] of tn.getRange("search:")) {
if (matches(value)) {
console.log("Found:", key.toString());
break; // Stop searching
}
}
// Pattern 3: Batch processing
for await (const batch of tn.getRangeBatch("data:", "data:~")) {
await processBatch(batch);
}
// Pattern 4: Pagination
let startKey = "user:";
const pageSize = 20;
while (true) {
const page = await tn.getRangeAll(
keySelector.firstGreaterOrEqual(startKey),
"user:~",
{ limit: pageSize }
);
if (page.length === 0) break;
// Process page
console.log("Page:", page);
// Next page starts after last key
startKey = keySelector.firstGreaterThan(page[page.length - 1][0]);
}
// Pattern 5: Count items
let count = 0;
for await (const _ of tn.getRange("counter:")) {
count++;
}
console.log("Total items:", count);
});Choose optimal streaming mode for your use case.
Usage Example:
import fdb, { StreamingMode } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Small range: use WantAll for single fetch
const config = await tn.getRangeAll("config:", "config:~", {
streamingMode: StreamingMode.WantAll,
});
// Large range with unknown size: use Iterator (default)
for await (const [key, value] of tn.getRange("data:")) {
// Balanced fetching
}
// Bulk export: use Serial for maximum throughput
for await (const batch of tn.getRangeBatch("export:", "export:~", {
streamingMode: StreamingMode.Serial,
})) {
// Large batches, fewer round trips
}
// Memory constrained: use Small
for await (const batch of tn.getRangeBatch("large:", "large:~", {
streamingMode: StreamingMode.Small,
})) {
// Smaller batches, more round trips
}
});Remove ranges of keys.
/**
* Remove all keys in a range [start, end)
* @param start - Start of range (inclusive)
* @param end - End of range (exclusive). If omitted, removes all keys with start as prefix
*/
clearRange(start: KeyIn, end?: KeyIn): void;
/**
* Remove all keys with a given prefix
* @param prefix - The key prefix to match
*/
clearRangeStartsWith(prefix: KeyIn): void;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Clear explicit range
tn.clearRange("temp:100", "temp:200");
// Clear by prefix (same as clearRange with one arg)
tn.clearRangeStartsWith("cache:");
// Clear all keys in subspace
const sessionDb = db.at("sessions:");
await sessionDb.doTransaction(async (tn) => {
tn.clearRange("", "\xFF"); // Clear entire subspace
});
});Analyze range size for performance planning.
/**
* Estimate the size of a key range in bytes
* @param start - Start of range (inclusive)
* @param end - End of range (exclusive)
* @returns Promise resolving to estimated size in bytes
*/
getEstimatedRangeSizeBytes(start: KeyIn, end: KeyIn): Promise<number>;
/**
* Get split points for dividing a range into chunks
* Useful for parallel processing
* @param start - Start of range (inclusive)
* @param end - End of range (exclusive)
* @param chunkSize - Desired chunk size in bytes
* @returns Promise resolving to array of split point keys
*/
getRangeSplitPoints(
start: KeyIn,
end: KeyIn,
chunkSize: number
): Promise<KeyOut[]>;Usage Example:
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Estimate range size
const sizeBytes = await db.getEstimatedRangeSizeBytes("user:", "user:~");
console.log("Range size:", (sizeBytes / 1024 / 1024).toFixed(2), "MB");
// Split for parallel processing
const splitPoints = await db.getRangeSplitPoints(
"data:",
"data:~",
100 * 1024 * 1024 // 100 MB chunks
);
// Process each chunk in parallel
const chunks: Array<[Buffer, Buffer]> = [];
for (let i = 0; i < splitPoints.length - 1; i++) {
chunks.push([splitPoints[i], splitPoints[i + 1]]);
}
await Promise.all(
chunks.map(([start, end]) =>
db.doTransaction(async (tn) => {
for await (const [key, value] of tn.getRange(start, end)) {
// Process chunk
}
})
)
);Best practices for efficient range queries.
Usage Example:
import fdb, { StreamingMode } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Tip 1: Use appropriate streaming mode
await db.doTransaction(async (tn) => {
// For small known ranges
const small = await tn.getRangeAll("config:", "config:~", {
streamingMode: StreamingMode.WantAll,
});
// For large unknown ranges
for await (const batch of tn.getRangeBatch("data:", "data:~", {
streamingMode: StreamingMode.Serial, // Bulk reads
})) {
// Process
}
});
// Tip 2: Use limits to avoid transaction timeout
await db.doTransaction(async (tn) => {
// Process in manageable chunks
let startKey = "log:";
const batchSize = 1000;
while (true) {
const batch = await tn.getRangeAll(
keySelector.firstGreaterOrEqual(startKey),
"log:~",
{ limit: batchSize }
);
if (batch.length === 0) break;
// Process batch
for (const [key, value] of batch) {
// ...
}
startKey = keySelector.firstGreaterThan(batch[batch.length - 1][0]);
// Commit periodically for long-running operations
if (shouldCommit()) break;
}
});
// Tip 3: Use snapshot reads when possible
await db.doTransaction(async (tn) => {
// Snapshot reads don't cause conflicts
const data = await tn.snapshot().getRangeAll("readonly:");
// Regular write
tn.set("counter", "value");
});
// Tip 4: Scope to subspace for cleaner code
const userDb = db.at("users:");
const users = await userDb.getRangeAll("", "\xFF"); // All usersTest FoundationDB operations with Jest's async/await support and lifecycle hooks.
import fdb from "foundationdb";
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "@jest/globals";
describe("FoundationDB Tests", () => {
let db: ReturnType<typeof fdb.open>;
beforeAll(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
afterAll(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
// Clear test data
await db.clearRangeStartsWith("test:");
});
test("should perform basic operations", async () => {
await db.set("test:key", "value");
const result = await db.get("test:key");
expect(result?.toString()).toBe("value");
});
test("should handle transactions", async () => {
const count = await db.doTransaction(async (tn) => {
tn.set("test:counter", "5");
const val = await tn.get("test:counter");
return parseInt(val?.toString() || "0");
});
expect(count).toBe(5);
});
test("should handle errors gracefully", async () => {
await expect(async () => {
await db.doTransaction(async (tn) => {
throw new Error("Test error");
});
}).rejects.toThrow("Test error");
});
});Use Mocha with Promise-based tests and proper cleanup.
import fdb from "foundationdb";
import { describe, before, after, beforeEach, it } from "mocha";
import { expect } from "chai";
describe("FoundationDB Operations", function() {
this.timeout(10000); // 10 second timeout
let db: ReturnType<typeof fdb.open>;
before(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
after(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
await db.clearRangeStartsWith("test:");
});
it("should read and write data", async () => {
await db.set("test:mocha", "data");
const value = await db.get("test:mocha");
expect(value?.toString()).to.equal("data");
});
it("should handle concurrent operations", async () => {
const operations = Array.from({ length: 10 }, (_, i) =>
db.set(`test:item:${i}`, `value${i}`)
);
await Promise.all(operations);
const items = await db.getRangeAllStartsWith("test:item:");
expect(items).to.have.lengthOf(10);
});
it("should support atomic operations", async () => {
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await Promise.all([
db.add("test:atomic", delta),
db.add("test:atomic", delta),
db.add("test:atomic", delta)
]);
const result = await db.get("test:atomic");
expect(result?.readBigInt64LE(0)).to.equal(3n);
});
});Leverage Vitest's fast execution and modern testing features.
import fdb from "foundationdb";
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "vitest";
describe("FoundationDB with Vitest", () => {
let db: ReturnType<typeof fdb.open>;
beforeAll(() => {
fdb.setAPIVersion(620);
db = fdb.open();
});
afterAll(() => {
db.close();
fdb.stopNetworkSync();
});
beforeEach(async () => {
await db.clearRangeStartsWith("test:");
});
test("concurrent writes with snapshot isolation", async () => {
const writes = Array.from({ length: 100 }, (_, i) =>
db.set(`test:concurrent:${i}`, `value${i}`)
);
await Promise.all(writes);
const results = await db.getRangeAllStartsWith("test:concurrent:");
expect(results).toHaveLength(100);
});
test("transaction retry logic", async () => {
let attempts = 0;
const result = await db.doTransaction(async (tn) => {
attempts++;
const value = await tn.get("test:retry");
tn.set("test:retry", (parseInt(value?.toString() || "0") + 1).toString());
return attempts;
});
expect(result).toBeGreaterThanOrEqual(1);
});
test("range queries with limits", async () => {
for (let i = 0; i < 50; i++) {
await db.set(`test:range:${i.toString().padStart(3, "0")}`, `value${i}`);
}
const page1 = await db.getRangeAll("test:range:", "test:range:~", { limit: 10 });
const page2 = await db.getRangeAll("test:range:", "test:range:~", {
limit: 10,
reverse: true
});
expect(page1).toHaveLength(10);
expect(page2).toHaveLength(10);
expect(page1[0][0].toString()).not.toBe(page2[0][0].toString());
});
});Helper functions for common test scenarios.
import fdb, { FDBError } from "foundationdb";
export class FDBTestHelper {
private db: ReturnType<typeof fdb.open>;
private testPrefix: string;
constructor(testPrefix = "test:") {
fdb.setAPIVersion(620);
this.db = fdb.open();
this.testPrefix = testPrefix;
}
getDB() {
return this.db;
}
async cleanup() {
await this.db.clearRangeStartsWith(this.testPrefix);
}
async close() {
this.db.close();
}
async withTransaction<T>(
fn: (tn: any) => Promise<T>,
opts?: any
): Promise<T> {
return await this.db.doTransaction(fn, opts);
}
async expectError(
fn: () => Promise<any>,
errorCode?: number
): Promise<FDBError> {
try {
await fn();
throw new Error("Expected operation to throw");
} catch (error) {
if (error instanceof FDBError) {
if (errorCode !== undefined && error.code !== errorCode) {
throw new Error(`Expected error code ${errorCode}, got ${error.code}`);
}
return error;
}
throw error;
}
}
async populateTestData(count: number, prefix?: string) {
const pfx = prefix || this.testPrefix;
const operations = Array.from({ length: count }, (_, i) =>
this.db.set(`${pfx}${i}`, `value${i}`)
);
await Promise.all(operations);
}
async assertKeyExists(key: string): Promise<Buffer> {
const value = await this.db.get(key);
if (value === undefined) {
throw new Error(`Expected key "${key}" to exist`);
}
return value;
}
async assertKeyNotExists(key: string): Promise<void> {
const value = await this.db.get(key);
if (value !== undefined) {
throw new Error(`Expected key "${key}" to not exist`);
}
}
}
// Usage in tests
import { describe, beforeAll, afterAll, beforeEach, test, expect } from "vitest";
describe("Using FDBTestHelper", () => {
let helper: FDBTestHelper;
beforeAll(() => {
helper = new FDBTestHelper("test:helper:");
});
afterAll(async () => {
await helper.cleanup();
await helper.close();
});
beforeEach(async () => {
await helper.cleanup();
});
test("populate and verify test data", async () => {
await helper.populateTestData(10);
await helper.assertKeyExists("test:helper:0");
await helper.assertKeyExists("test:helper:9");
await helper.assertKeyNotExists("test:helper:10");
});
test("transaction helper", async () => {
const result = await helper.withTransaction(async (tn) => {
tn.set("test:helper:tx", "value");
return "success";
});
expect(result).toBe("success");
});
});Manage database connections efficiently across your application.
import fdb from "foundationdb";
class FDBConnectionPool {
private static instance: FDBConnectionPool;
private db: ReturnType<typeof fdb.open> | null = null;
private initialized = false;
private constructor() {}
static getInstance(): FDBConnectionPool {
if (!FDBConnectionPool.instance) {
FDBConnectionPool.instance = new FDBConnectionPool();
}
return FDBConnectionPool.instance;
}
async initialize(opts?: { clusterFile?: string; trace?: string }) {
if (this.initialized) return;
try {
fdb.setAPIVersion(620);
if (opts?.trace) {
fdb.configNetwork({
trace_enable: opts.trace,
trace_format: "json",
});
}
this.db = fdb.open(opts?.clusterFile);
this.db.setNativeOptions({
transaction_timeout: 10000,
transaction_retry_limit: 100,
max_watches: 20000,
});
this.initialized = true;
} catch (error) {
console.error("Failed to initialize FDB connection:", error);
throw error;
}
}
getDatabase(): ReturnType<typeof fdb.open> {
if (!this.db) {
throw new Error("Database not initialized. Call initialize() first.");
}
return this.db;
}
async shutdown() {
if (this.db) {
this.db.close();
fdb.stopNetworkSync();
this.db = null;
this.initialized = false;
}
}
isInitialized(): boolean {
return this.initialized;
}
}
// Usage across application
export const fdbPool = FDBConnectionPool.getInstance();
// In application startup
await fdbPool.initialize({ trace: "./fdb-traces" });
// In any module
import { fdbPool } from "./fdb-pool";
async function getData(key: string) {
const db = fdbPool.getDatabase();
return await db.get(key);
}
// Graceful shutdown
process.on("SIGTERM", async () => {
await fdbPool.shutdown();
process.exit(0);
});Implement robust retry logic for transient failures.
import fdb, { FDBError } from "foundationdb";
async function withRetry<T>(
operation: () => Promise<T>,
maxRetries = 5,
baseDelay = 100
): Promise<T> {
let lastError: Error | undefined;
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
lastError = error as Error;
// Don't retry on non-retryable errors
if (error instanceof FDBError && error.code === 1007) {
throw error; // Not retryable
}
if (attempt < maxRetries) {
const delay = baseDelay * Math.pow(2, attempt);
const jitter = Math.random() * delay * 0.1;
await new Promise((resolve) => setTimeout(resolve, delay + jitter));
}
}
}
throw new Error(`Operation failed after ${maxRetries} retries: ${lastError?.message}`);
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const result = await withRetry(async () => {
return await db.doTransaction(async (tn) => {
const value = await tn.get("critical:data");
tn.set("critical:data", "updated");
return value;
});
});Protect against cascading failures with circuit breaker.
import fdb, { FDBError } from "foundationdb";
class CircuitBreaker {
private failures = 0;
private lastFailureTime = 0;
private state: "closed" | "open" | "half-open" = "closed";
constructor(
private threshold = 5,
private timeout = 60000,
private resetTimeout = 30000
) {}
async execute<T>(operation: () => Promise<T>): Promise<T> {
if (this.state === "open") {
if (Date.now() - this.lastFailureTime > this.resetTimeout) {
this.state = "half-open";
} else {
throw new Error("Circuit breaker is OPEN");
}
}
try {
const result = await operation();
this.onSuccess();
return result;
} catch (error) {
this.onFailure();
throw error;
}
}
private onSuccess() {
this.failures = 0;
this.state = "closed";
}
private onFailure() {
this.failures++;
this.lastFailureTime = Date.now();
if (this.failures >= this.threshold) {
this.state = "open";
}
}
getState() {
return this.state;
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const breaker = new CircuitBreaker(5, 60000, 30000);
async function safeQuery(key: string): Promise<Buffer | undefined> {
return await breaker.execute(async () => {
return await db.get(key);
});
}Process large datasets efficiently with automatic chunking.
import fdb from "foundationdb";
async function processBatchInChunks<T>(
db: ReturnType<typeof fdb.open>,
prefix: string,
processor: (batch: Array<[Buffer, Buffer]>) => Promise<T[]>,
chunkSize = 100
): Promise<T[]> {
const results: T[] = [];
let startKey = prefix;
while (true) {
const chunk = await db.getRangeAll(
startKey,
prefix + "~",
{ limit: chunkSize }
);
if (chunk.length === 0) break;
const chunkResults = await processor(chunk);
results.push(...chunkResults);
if (chunk.length < chunkSize) break;
// Continue from after last key
startKey = chunk[chunk.length - 1][0].toString() + "\x00";
}
return results;
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const processed = await processBatchInChunks(
db,
"users:",
async (batch) => {
return batch.map(([key, value]) => ({
key: key.toString(),
parsed: JSON.parse(value.toString()),
}));
},
50
);Implement application-level caching with FDB watches.
import fdb from "foundationdb";
class FDBCache<T> {
private cache = new Map<string, { value: T; watch: any }>();
constructor(private db: ReturnType<typeof fdb.open>) {}
async get(key: string, parser: (buf: Buffer) => T): Promise<T | undefined> {
// Check cache first
const cached = this.cache.get(key);
if (cached) {
return cached.value;
}
// Fetch and cache with watch
const watch = await this.db.getAndWatch(key);
if (watch.value === undefined) {
return undefined;
}
const value = parser(watch.value);
this.cache.set(key, { value, watch });
// Invalidate on change
watch.promise.then(() => {
this.cache.delete(key);
});
return value;
}
async set(key: string, value: T, serializer: (val: T) => Buffer) {
await this.db.set(key, serializer(value));
this.cache.delete(key); // Invalidate cache
}
clear() {
this.cache.forEach(({ watch }) => watch.cancel());
this.cache.clear();
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const cache = new FDBCache(db);
const user = await cache.get(
"user:123",
(buf) => JSON.parse(buf.toString())
);
await cache.set(
"user:123",
{ name: "Alice", email: "alice@example.com" },
(val) => Buffer.from(JSON.stringify(val))
);Implement distributed locking for coordination.
import fdb from "foundationdb";
class DistributedLock {
private lockKey: string;
private lockValue: string;
constructor(
private db: ReturnType<typeof fdb.open>,
lockName: string,
private ttl = 30000
) {
this.lockKey = `locks:${lockName}`;
this.lockValue = `${Date.now()}-${Math.random()}`;
}
async acquire(timeout = 10000): Promise<boolean> {
const startTime = Date.now();
while (Date.now() - startTime < timeout) {
try {
const acquired = await this.db.doTransaction(async (tn) => {
const existing = await tn.get(this.lockKey);
if (existing === undefined) {
// Lock is free
tn.set(this.lockKey, this.lockValue);
return true;
}
// Check if lock expired
const lockData = existing.toString();
const lockTime = parseInt(lockData.split("-")[0]);
if (Date.now() - lockTime > this.ttl) {
// Lock expired, take it
tn.set(this.lockKey, this.lockValue);
return true;
}
return false;
});
if (acquired) return true;
} catch (error) {
// Transaction conflict, retry
}
// Wait before retry
await new Promise((resolve) => setTimeout(resolve, 100));
}
return false;
}
async release(): Promise<void> {
await this.db.doTransaction(async (tn) => {
const existing = await tn.get(this.lockKey);
if (existing?.toString() === this.lockValue) {
tn.clear(this.lockKey);
}
});
}
async withLock<T>(fn: () => Promise<T>, timeout = 10000): Promise<T> {
const acquired = await this.acquire(timeout);
if (!acquired) {
throw new Error(`Failed to acquire lock: ${this.lockKey}`);
}
try {
return await fn();
} finally {
await this.release();
}
}
}
// Usage
fdb.setAPIVersion(620);
const db = fdb.open();
const lock = new DistributedLock(db, "resource:123", 30000);
await lock.withLock(async () => {
// Critical section - only one process can execute this at a time
const value = await db.get("shared:resource");
await db.set("shared:resource", "updated");
});Implement event sourcing with ordered event storage.
import fdb, { tuple } from "foundationdb";
interface Event {
type: string;
data: any;
timestamp: number;
version?: Buffer;
}
class EventStore {
private db: ReturnType<typeof fdb.open>;
constructor() {
fdb.setAPIVersion(620);
this.db = fdb.open()
.withKeyEncoding(fdb.encoders.tuple)
.withValueEncoding(fdb.encoders.json);
}
async appendEvent(
streamId: string,
eventType: string,
data: any
): Promise<Buffer> {
return await this.db.doTransaction(async (tn) => {
const key = ["events", streamId, tuple.unboundVersionstamp()];
const event: Event = {
type: eventType,
data,
timestamp: Date.now(),
};
tn.setVersionstampedKey(key, event);
const versionstamp = tn.getVersionstamp();
return versionstamp.promise;
});
}
async getEvents(
streamId: string,
fromVersion?: Buffer,
limit?: number
): Promise<Event[]> {
const start = fromVersion
? ["events", streamId, fromVersion]
: ["events", streamId];
const events = await this.db.getRangeAll(
start,
["events", streamId, Buffer.from([0xff])],
{ limit }
);
return events.map(([key, value]) => ({
...(value as any),
version: key[2] as Buffer,
}));
}
async replay(
streamId: string,
handler: (event: Event) => void | Promise<void>
): Promise<void> {
const events = await this.getEvents(streamId);
for (const event of events) {
await handler(event);
}
}
async getSnapshot(streamId: string): Promise<any> {
const events = await this.getEvents(streamId);
// Rebuild state from events
let state: any = {};
for (const event of events) {
state = this.applyEvent(state, event);
}
return state;
}
private applyEvent(state: any, event: Event): any {
// Apply event to state based on event type
switch (event.type) {
case "created":
return { ...event.data, created: true };
case "updated":
return { ...state, ...event.data };
case "deleted":
return { ...state, deleted: true };
default:
return state;
}
}
}
// Usage
const store = new EventStore();
// Append events
await store.appendEvent("order:123", "OrderCreated", {
items: ["item1", "item2"],
total: 100,
});
await store.appendEvent("order:123", "ItemAdded", {
item: "item3",
});
await store.appendEvent("order:123", "OrderConfirmed", {
confirmedAt: Date.now(),
});
// Replay events
await store.replay("order:123", (event) => {
console.log(`Event: ${event.type}`, event.data);
});
// Get current snapshot
const currentState = await store.getSnapshot("order:123");
console.log("Current state:", currentState);Understanding and handling common FoundationDB error codes.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleFDBErrors() {
try {
await db.doTransaction(async (tn) => {
tn.set("key", "value");
});
} catch (error) {
if (error instanceof FDBError) {
switch (error.code) {
case 1007: // Transaction too old
console.error("Transaction took too long - increase timeout");
break;
case 1009: // Request for future version
console.error("Clock skew detected - check system time");
break;
case 1020: // Not committed (transaction may have succeeded)
console.error("Commit status unknown - check if data was written");
break;
case 1021: // Transaction cancelled
console.error("Transaction was cancelled");
break;
case 1025: // Transaction timed out
console.error("Operation exceeded timeout limit");
break;
case 2017: // Transaction too large
console.error("Transaction size exceeds limit - split into smaller transactions");
break;
default:
console.error(`FDB Error ${error.code}: ${error.message}`);
}
} else {
console.error("Non-FDB error:", error);
}
}
}Handle transaction conflicts with proper retry logic.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleConflicts() {
let attempts = 0;
const maxAttempts = 5;
while (attempts < maxAttempts) {
try {
const result = await db.doTransaction(async (tn) => {
attempts++;
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
// Simulate some processing
await new Promise(resolve => setTimeout(resolve, 10));
tn.set("counter", (count + 1).toString());
return count + 1;
});
console.log(`Success after ${attempts} attempts:`, result);
return result;
} catch (error) {
if (error instanceof FDBError && error.code === 1007) {
console.log(`Attempt ${attempts} failed with conflict, retrying...`);
if (attempts >= maxAttempts) {
throw new Error(`Failed after ${maxAttempts} attempts`);
}
// doTransaction handles retry automatically, but showing manual retry for illustration
} else {
throw error;
}
}
}
}Handle and prevent timeout errors effectively.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleTimeouts() {
try {
await db.doTransaction(async (tn) => {
// Long-running operation
for await (const batch of tn.getRangeBatch("data:", "data:~")) {
// Process batch
await processBatch(batch);
}
}, {
timeout: 30000, // 30 second timeout
});
} catch (error) {
if (error instanceof FDBError && error.code === 1025) {
console.error("Transaction timed out");
// Strategies to fix:
// 1. Increase timeout
// 2. Split into smaller transactions
// 3. Use snapshot reads where possible
// 4. Optimize query performance
}
throw error;
}
}
async function processBatch(batch: any) {
// Batch processing logic
}Handle network-related errors and connectivity issues.
import fdb, { FDBError } from "foundationdb";
async function handleNetworkErrors() {
fdb.setAPIVersion(620);
try {
const db = fdb.open("/path/to/fdb.cluster");
await db.doTransaction(async (tn) => {
tn.set("key", "value");
});
} catch (error) {
if (error instanceof FDBError) {
if (error.code === 1031) {
console.error("Cannot connect to cluster - check network and cluster file");
} else if (error.code === 1032) {
console.error("Cluster file invalid or corrupted");
} else if (error.code === 2501) {
console.error("No coordinators available - check FDB cluster health");
}
}
throw error;
}
}Handle directory-specific errors.
import fdb, { directory, DirectoryError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function handleDirectoryErrors() {
try {
// Try to create existing directory
const dir = await directory.create(db, ["myapp", "users"]);
} catch (error) {
if (error instanceof DirectoryError) {
console.error("Directory operation failed:", error.message);
// Check specific error messages
if (error.message.includes("already exists")) {
console.log("Directory exists, opening instead");
const dir = await directory.open(db, ["myapp", "users"]);
} else if (error.message.includes("does not exist")) {
console.log("Directory missing, creating");
const dir = await directory.create(db, ["myapp", "users"]);
} else if (error.message.includes("layer mismatch")) {
console.error("Directory layer type mismatch");
}
}
throw error;
}
}Implement graceful error recovery patterns.
import fdb, { FDBError } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function safeOperation<T>(
operation: () => Promise<T>,
fallback?: T
): Promise<T | undefined> {
try {
return await operation();
} catch (error) {
if (error instanceof FDBError) {
console.error(`FDB Error ${error.code}:`, error.message);
// Return fallback for specific errors
if (error.code === 1025 || error.code === 1007) {
console.log("Using fallback value due to transient error");
return fallback;
}
}
// Re-throw non-recoverable errors
throw error;
}
}
// Usage
const value = await safeOperation(
async () => await db.get("config:setting"),
Buffer.from("default-value")
);
// With retry wrapper
async function withGracefulRetry<T>(
operation: () => Promise<T>,
maxRetries = 3
): Promise<T> {
let lastError: Error;
for (let i = 0; i < maxRetries; i++) {
try {
return await operation();
} catch (error) {
lastError = error as Error;
if (error instanceof FDBError) {
// Don't retry fatal errors
if ([1031, 1032, 2501].includes(error.code)) {
throw error;
}
}
// Exponential backoff
await new Promise(resolve =>
setTimeout(resolve, Math.pow(2, i) * 100)
);
}
}
throw new Error(`Failed after ${maxRetries} retries: ${lastError!.message}`);
}Diagnose and fix common performance problems.
Issue: Slow Transactions
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Large transaction with many operations
async function slowTransaction() {
await db.doTransaction(async (tn) => {
// Thousands of operations in single transaction
for (let i = 0; i < 10000; i++) {
tn.set(`key:${i}`, `value:${i}`);
}
});
}
// Solution: Split into smaller transactions
async function fastTransactions() {
const batchSize = 100;
for (let i = 0; i < 10000; i += batchSize) {
await db.doTransaction(async (tn) => {
for (let j = i; j < i + batchSize && j < 10000; j++) {
tn.set(`key:${j}`, `value:${j}`);
}
});
}
}
// Solution: Use appropriate transaction options
await db.doTransaction(async (tn) => {
// Process data
}, {
timeout: 30000,
size_limit: 50000000,
});Issue: High Conflict Rate
// Problem: Many transactions competing for same keys
async function highConflict() {
await Promise.all(
Array.from({ length: 100 }, () =>
db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
})
)
);
}
// Solution: Use atomic operations
async function lowConflict() {
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await Promise.all(
Array.from({ length: 100 }, () =>
db.add("counter", delta)
)
);
}
// Solution: Shard hot keys
class ShardedCounter {
constructor(private db: typeof db, private shards = 10) {}
async increment() {
const shard = Math.floor(Math.random() * this.shards);
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await this.db.add(`counter:${shard}`, delta);
}
async getTotal(): Promise<number> {
let total = 0;
for (let i = 0; i < this.shards; i++) {
const value = await this.db.get(`counter:${i}`);
if (value) {
total += Number(value.readBigInt64LE(0));
}
}
return total;
}
}Issue: Memory Usage
// Problem: Loading too much data at once
async function highMemory() {
const allData = await db.getRangeAll("data:", "data:~");
// Process huge dataset - may cause OOM
}
// Solution: Use streaming with batches
async function lowMemory() {
for await (const batch of db.getRangeBatch("data:", "data:~")) {
// Process one batch at a time
await processBatch(batch);
}
}
// Solution: Use appropriate streaming mode
import { StreamingMode } from "foundationdb";
async function optimizedStreaming() {
for await (const batch of db.getRangeBatch("data:", "data:~", {
streamingMode: StreamingMode.Small, // Smaller batches
})) {
await processBatch(batch);
}
}
async function processBatch(batch: any) {
// Process batch
}Diagnose and resolve connection issues.
Issue: Cannot Connect to Cluster
import fdb from "foundationdb";
// Check 1: Verify cluster file
console.log("Checking cluster file...");
try {
const clusterFile = "/etc/foundationdb/fdb.cluster";
const fs = require("fs");
const content = fs.readFileSync(clusterFile, "utf8");
console.log("Cluster file content:", content);
} catch (error) {
console.error("Cannot read cluster file:", error);
}
// Check 2: Test connection
fdb.setAPIVersion(620);
try {
const db = fdb.open();
await db.get("test");
console.log("Connection successful");
} catch (error) {
console.error("Connection failed:", error);
}
// Check 3: Verify network configuration
fdb.configNetwork({
trace_enable: "./fdb-traces",
trace_format: "json",
});
// Check traces for connection errorsIssue: Transaction Timeouts
// Diagnostic: Check transaction size
await db.doTransaction(async (tn) => {
// Perform operations
const size = tn.getApproximateSize();
console.log("Transaction size:", size);
if (size > 5000000) {
console.warn("Transaction size large, may timeout");
}
});
// Solution: Increase timeout or split transaction
db.setNativeOptions({
transaction_timeout: 30000, // 30 seconds
});
// Or per-transaction
await db.doTransaction(async (tn) => {
// Operations
}, {
timeout: 60000, // 60 seconds
});Issue: Watch Not Triggering
// Problem: Watch created outside transaction
// const watch = db.watch("key"); // ERROR
// Solution: Create watch in transaction
const watch = await db.doTransaction(async (tn) => {
return tn.watch("key");
});
// Problem: Awaiting watch inside transaction
await db.doTransaction(async (tn) => {
const watch = tn.watch("key");
// await watch.promise; // DEADLOCK!
return watch;
}).then(async (watch) => {
await watch.promise; // Correct
});
// Problem: Too many watches
db.setNativeOptions({
max_watches: 20000, // Increase limit
});Prevent and diagnose data integrity problems.
Issue: Encoding Mismatch
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Writing with one encoder, reading with another
await db.withValueEncoding(fdb.encoders.json)
.set("key", { value: 123 });
const wrong = await db.withValueEncoding(fdb.encoders.string)
.get("key"); // Wrong encoding!
// Solution: Consistent encoder usage
const jsonDb = db.withValueEncoding(fdb.encoders.json);
await jsonDb.set("key", { value: 123 });
const correct = await jsonDb.get("key"); // { value: 123 }
// Solution: Document encoding choices
class UserStore {
private db: ReturnType<typeof fdb.open>;
constructor(db: ReturnType<typeof fdb.open>) {
this.db = db
.at("users:")
.withKeyEncoding(fdb.encoders.string)
.withValueEncoding(fdb.encoders.json);
}
async save(id: string, user: any) {
await this.db.set(id, user);
}
async load(id: string) {
return await this.db.get(id);
}
}Issue: Lost Updates
// Problem: Not using transactions properly
async function lostUpdate() {
const value = await db.get("counter");
const count = parseInt(value?.toString() || "0");
// Another process might update here!
await db.set("counter", (count + 1).toString());
}
// Solution: Use transactions
async function safeUpdate() {
await db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
});
}
// Better: Use atomic operations
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await db.add("counter", delta);Issue: Directory Conflicts
import fdb, { directory } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Problem: Racing directory creation
async function racingCreation() {
try {
const dir = await directory.create(db, ["myapp", "tenant"]);
} catch (error) {
// Second process fails
}
}
// Solution: Use createOrOpen
async function safeCreation() {
const dir = await directory.createOrOpen(db, ["myapp", "tenant"]);
// Works for all processes
}
// Solution: Handle errors gracefully
async function robustCreation() {
try {
const dir = await directory.create(db, ["myapp", "tenant"]);
} catch (error) {
if (error.message.includes("already exists")) {
const dir = await directory.open(db, ["myapp", "tenant"]);
return dir;
}
throw error;
}
}Tools and techniques for debugging issues.
Enable Tracing
import fdb from "foundationdb";
// Enable detailed tracing
fdb.configNetwork({
trace_enable: "./fdb-traces",
trace_format: "json",
trace_log_group: "myapp",
});
fdb.setAPIVersion(620);
const db = fdb.open();
// Check traces at ./fdb-traces/*.json
// Look for: errors, warnings, slow_task eventsTransaction Debugging
import fdb, { TransactionOptionCode } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
await db.doTransaction(async (tn) => {
// Enable transaction logging
tn.setOption(TransactionOptionCode.DebugTransactionIdentifier, "my-tx-123");
tn.setOption(TransactionOptionCode.LogTransaction);
// Perform operations
tn.set("key", "value");
}, {
debug_transaction_identifier: "test-transaction",
log_transaction: true,
});
// Check traces for transaction detailsPerformance Profiling
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
async function profileOperation() {
const start = Date.now();
await db.doTransaction(async (tn) => {
const opStart = Date.now();
const value = await tn.get("key");
console.log(`Get took ${Date.now() - opStart}ms`);
const setStart = Date.now();
tn.set("key", "value");
console.log(`Set took ${Date.now() - setStart}ms`);
});
console.log(`Total transaction: ${Date.now() - start}ms`);
}
// Monitor transaction sizes
await db.doTransaction(async (tn) => {
for (let i = 0; i < 1000; i++) {
tn.set(`key:${i}`, `value:${i}`);
if (i % 100 === 0) {
console.log(`Size at ${i}:`, tn.getApproximateSize());
}
}
});1. Keep Transactions Short
Minimize transaction duration to reduce conflicts and avoid timeouts.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Bad: Long-running computation in transaction
await db.doTransaction(async (tn) => {
const data = await tn.get("data");
// Expensive computation
const result = await expensiveComputation(data);
tn.set("result", result);
});
// Good: Compute outside transaction
const data = await db.get("data");
const result = await expensiveComputation(data);
await db.doTransaction(async (tn) => {
tn.set("result", result);
});2. Use Atomic Operations for Counters
Avoid read-modify-write patterns for counters.
// Bad: Read-modify-write
await db.doTransaction(async (tn) => {
const value = await tn.get("counter");
const count = parseInt(value?.toString() || "0");
tn.set("counter", (count + 1).toString());
});
// Good: Atomic add
const delta = Buffer.allocUnsafe(8);
delta.writeBigInt64LE(1n, 0);
await db.add("counter", delta);3. Use Snapshot Reads When Possible
Reduce conflicts by using snapshot reads for non-critical data.
await db.doTransaction(async (tn) => {
// Critical read (causes conflicts)
const critical = await tn.get("critical:data");
// Non-critical read (no conflicts)
const metadata = await tn.snapshot().get("metadata");
// Write based on critical data
tn.set("result", processData(critical));
});4. Batch Related Operations
Group related operations in single transactions.
// Bad: Multiple transactions
await db.set("user:alice:name", "Alice");
await db.set("user:alice:email", "alice@example.com");
await db.set("user:alice:age", "30");
// Good: Single transaction
await db.doTransaction(async (tn) => {
tn.set("user:alice:name", "Alice");
tn.set("user:alice:email", "alice@example.com");
tn.set("user:alice:age", "30");
});5. Handle Large Datasets with Chunking
Split large operations into manageable chunks.
async function processLargeDataset() {
let startKey = "data:";
const chunkSize = 1000;
while (true) {
const chunk = await db.getRangeAll(
startKey,
"data:~",
{ limit: chunkSize }
);
if (chunk.length === 0) break;
await db.doTransaction(async (tn) => {
for (const [key, value] of chunk) {
// Process item
tn.set(key.toString() + ":processed", "true");
}
});
if (chunk.length < chunkSize) break;
startKey = chunk[chunk.length - 1][0].toString() + "\x00";
}
}6. Use Hierarchical Key Structure
Organize keys hierarchically for efficient queries.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Good: Hierarchical structure
await db.set("app:users:alice:profile", "...");
await db.set("app:users:alice:settings", "...");
await db.set("app:users:bob:profile", "...");
await db.set("app:orders:12345:items", "...");
// Query all user data
const aliceData = await db.getRangeAllStartsWith("app:users:alice:");
// Query all orders
const orders = await db.getRangeAllStartsWith("app:orders:");7. Use Tuple Encoding for Composite Keys
Leverage tuple encoding for structured keys.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open().withKeyEncoding(fdb.encoders.tuple);
// Store with composite keys
await db.set(["user", "alice", "profile"], "data");
await db.set(["user", "alice", "settings"], "data");
await db.set(["order", 12345, "items"], "data");
// Query by prefix
const aliceData = await db.getRangeAllStartsWith(["user", "alice"]);
const orders = await db.getRangeAllStartsWith(["order"]);8. Use Directories for Multi-Tenancy
Isolate tenant data with directory layer.
import fdb, { directory } from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Create tenant directories
const tenant1 = await directory.createOrOpen(db, ["tenants", "acme"]);
const tenant2 = await directory.createOrOpen(db, ["tenants", "techcorp"]);
// Scoped databases
const acmeDb = db.at(tenant1);
const techcorpDb = db.at(tenant2);
// Isolated data
await acmeDb.set("data", "Acme data");
await techcorpDb.set("data", "Techcorp data");9. Implement Retry Logic with Backoff
Handle transient failures gracefully.
async function withRetry<T>(
operation: () => Promise<T>,
maxRetries = 5,
baseDelay = 100
): Promise<T> {
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
if (attempt === maxRetries) throw error;
const delay = baseDelay * Math.pow(2, attempt);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
throw new Error("Should not reach here");
}
const result = await withRetry(() => db.get("key"));10. Monitor Transaction Metrics
Track performance and errors for optimization.
class MetricsCollector {
private metrics = {
transactionCount: 0,
errorCount: 0,
avgDuration: 0,
};
async trackTransaction<T>(
operation: () => Promise<T>
): Promise<T> {
const start = Date.now();
try {
const result = await operation();
this.metrics.transactionCount++;
const duration = Date.now() - start;
this.metrics.avgDuration =
(this.metrics.avgDuration * (this.metrics.transactionCount - 1) + duration) /
this.metrics.transactionCount;
return result;
} catch (error) {
this.metrics.errorCount++;
throw error;
}
}
getMetrics() {
return { ...this.metrics };
}
}
const metrics = new MetricsCollector();
await metrics.trackTransaction(() =>
db.doTransaction(async (tn) => {
tn.set("key", "value");
})
);
console.log("Metrics:", metrics.getMetrics());11. Close Connections Properly
Ensure clean shutdown of database connections.
import fdb from "foundationdb";
fdb.setAPIVersion(620);
const db = fdb.open();
// Use try-finally for cleanup
try {
await db.set("key", "value");
} finally {
db.close();
fdb.stopNetworkSync();
}
// Handle graceful shutdown
process.on("SIGTERM", () => {
console.log("Shutting down...");
db.close();
fdb.stopNetworkSync();
process.exit(0);
});
process.on("SIGINT", () => {
console.log("Interrupted, cleaning up...");
db.close();
fdb.stopNetworkSync();
process.exit(0);
});12. Use Connection Pooling
Reuse database connections across application.
class DatabasePool {
private static db: ReturnType<typeof fdb.open> | null = null;
static initialize() {
if (!this.db) {
fdb.setAPIVersion(620);
this.db = fdb.open();
this.db.setNativeOptions({
transaction_timeout: 10000,
max_watches: 20000,
});
}
}
static getDatabase() {
if (!this.db) {
throw new Error("Database not initialized");
}
return this.db;
}
static shutdown() {
if (this.db) {
this.db.close();
fdb.stopNetworkSync();
this.db = null;
}
}
}
// Initialize once at startup
DatabasePool.initialize();
// Use throughout application
const db = DatabasePool.getDatabase();
await db.set("key", "value");
// Shutdown on exit
process.on("exit", () => {
DatabasePool.shutdown();
});