Comprehensive guide to Drizzle ORM's advanced features including transactions, prepared statements, migrations, raw SQL, batch operations, read replicas, caching, logging, and more.
Transactions allow you to execute multiple database operations atomically. All operations succeed together or fail together.
import { drizzle } from 'drizzle-orm/postgres-js';
import { users, posts } from './schema';
const db = drizzle(client);
// Basic transaction
await db.transaction(async (tx) => {
// All operations within this callback are part of the transaction
await tx.insert(users).values({ name: 'John', email: 'john@example.com' });
await tx.insert(posts).values({ title: 'Hello World', userId: 1 });
// If any operation fails, all changes are rolled back
});Type Signature:
transaction<T>(
transaction: (tx: Transaction) => Promise<T>,
config?: TransactionConfig
): Promise<T>import { TransactionRollbackError } from 'drizzle-orm';
await db.transaction(async (tx) => {
const user = await tx.insert(users).values({
name: 'John',
email: 'john@example.com'
}).returning();
// Check some condition
if (user[0].email.includes('spam')) {
// Explicitly rollback the transaction
tx.rollback();
}
await tx.insert(posts).values({
title: 'Hello World',
userId: user[0].id
});
});Error Classes:
// Base error class
class DrizzleError extends Error {
constructor(options: { message?: string; cause?: unknown })
}
// Query execution error
class DrizzleQueryError extends Error {
constructor(
public query: string,
public params: any[],
public cause?: Error
)
}
// Transaction rollback error
class TransactionRollbackError extends DrizzleError {
constructor()
}import { PgTransactionConfig } from 'drizzle-orm/pg-core';
// Read uncommitted - lowest isolation, highest performance
await db.transaction(async (tx) => {
// Your operations
}, {
isolationLevel: 'read uncommitted'
});
// Read committed - default in PostgreSQL
await db.transaction(async (tx) => {
// Your operations
}, {
isolationLevel: 'read committed'
});
// Repeatable read - prevents non-repeatable reads
await db.transaction(async (tx) => {
// Your operations
}, {
isolationLevel: 'repeatable read'
});
// Serializable - highest isolation, prevents all anomalies
await db.transaction(async (tx) => {
// Your operations
}, {
isolationLevel: 'serializable'
});Transaction Config Interface:
interface PgTransactionConfig {
isolationLevel?:
| 'read uncommitted'
| 'read committed'
| 'repeatable read'
| 'serializable';
accessMode?: 'read only' | 'read write';
deferrable?: boolean;
}// Read-only transaction (optimized for reads)
await db.transaction(async (tx) => {
const users = await tx.select().from(users);
const posts = await tx.select().from(posts);
// Cannot perform writes in this transaction
}, {
accessMode: 'read only'
});
// Read-write transaction (default)
await db.transaction(async (tx) => {
await tx.insert(users).values({ name: 'John' });
}, {
accessMode: 'read write'
});// Deferrable transaction - can be delayed if conflicts
await db.transaction(async (tx) => {
// Your operations
}, {
isolationLevel: 'serializable',
deferrable: true
});
// Not deferrable (default)
await db.transaction(async (tx) => {
// Your operations
}, {
deferrable: false
});// Nested transactions (savepoints)
await db.transaction(async (tx) => {
await tx.insert(users).values({ name: 'John' });
// Nested transaction
await tx.transaction(async (tx2) => {
await tx2.insert(posts).values({ title: 'Post 1', userId: 1 });
await tx2.insert(posts).values({ title: 'Post 2', userId: 1 });
});
// If nested transaction fails, only nested operations are rolled back
await tx.insert(comments).values({ text: 'Comment', postId: 1 });
});try {
await db.transaction(async (tx) => {
await tx.insert(users).values({ name: 'John', email: 'john@example.com' });
// This will fail if email is duplicate
await tx.insert(users).values({ name: 'Jane', email: 'john@example.com' });
});
} catch (error) {
if (error instanceof TransactionRollbackError) {
console.log('Transaction was explicitly rolled back');
} else if (error instanceof DrizzleQueryError) {
console.log('Query failed:', error.query);
console.log('Params:', error.params);
console.log('Cause:', error.cause);
} else {
console.log('Unexpected error:', error);
}
}// Set transaction config dynamically within transaction
await db.transaction(async (tx) => {
// Change transaction settings mid-transaction
await tx.setTransaction({
isolationLevel: 'serializable'
});
// Your operations with new settings
await tx.select().from(users);
});Prepared statements improve performance for repeated queries by pre-compiling SQL and reusing it with different parameters.
import { eq } from 'drizzle-orm';
import { sql } from 'drizzle-orm';
// Prepare a SELECT query
const getUserById = db
.select()
.from(users)
.where(eq(users.id, sql.placeholder('id')))
.prepare('get_user_by_id');
// Execute multiple times with different values
const user1 = await getUserById.execute({ id: 1 });
const user2 = await getUserById.execute({ id: 2 });
const user3 = await getUserById.execute({ id: 3 });Type Signature:
interface PreparedQuery<T> {
execute(placeholderValues?: Record<string, unknown>): Promise<T>;
}
// On query builders
prepare(name?: string): PreparedQuery<Result>// Prepare an INSERT statement
const insertUser = db
.insert(users)
.values({
name: sql.placeholder('name'),
email: sql.placeholder('email'),
age: sql.placeholder('age')
})
.returning()
.prepare('insert_user');
// Execute with different values
const user1 = await insertUser.execute({
name: 'John',
email: 'john@example.com',
age: 30
});
const user2 = await insertUser.execute({
name: 'Jane',
email: 'jane@example.com',
age: 25
});// Prepare an UPDATE statement
const updateUserAge = db
.update(users)
.set({ age: sql.placeholder('age') })
.where(eq(users.id, sql.placeholder('id')))
.returning()
.prepare('update_user_age');
// Execute multiple times
await updateUserAge.execute({ id: 1, age: 31 });
await updateUserAge.execute({ id: 2, age: 26 });// Prepare a DELETE statement
const deleteUserById = db
.delete(users)
.where(eq(users.id, sql.placeholder('id')))
.prepare('delete_user');
// Execute with different IDs
await deleteUserById.execute({ id: 1 });
await deleteUserById.execute({ id: 2 });// Prepare a complex query with multiple placeholders
const searchUsers = db
.select()
.from(users)
.where(
and(
gte(users.age, sql.placeholder('minAge')),
lte(users.age, sql.placeholder('maxAge')),
like(users.name, sql.placeholder('namePattern'))
)
)
.orderBy(users.name)
.limit(sql.placeholder('limit'))
.prepare('search_users');
// Execute with different criteria
const results1 = await searchUsers.execute({
minAge: 18,
maxAge: 30,
namePattern: 'J%',
limit: 10
});
const results2 = await searchUsers.execute({
minAge: 25,
maxAge: 40,
namePattern: 'A%',
limit: 20
});// Without prepared statements (parsed every time)
for (let i = 0; i < 1000; i++) {
await db.select().from(users).where(eq(users.id, i));
}
// With prepared statements (parsed once, executed 1000 times)
const getUserById = db
.select()
.from(users)
.where(eq(users.id, sql.placeholder('id')))
.prepare();
for (let i = 0; i < 1000; i++) {
await getUserById.execute({ id: i });
}Benefits:
// Named prepared statement (can be reused across sessions in some drivers)
const namedQuery = db
.select()
.from(users)
.where(eq(users.id, sql.placeholder('id')))
.prepare('my_query_name');
// Unnamed prepared statement (session-scoped)
const unnamedQuery = db
.select()
.from(users)
.where(eq(users.id, sql.placeholder('id')))
.prepare();Drizzle provides utilities to read and execute database migrations generated by drizzle-kit.
drizzle/
├── meta/
│ └── _journal.json # Migration metadata
├── 0000_initial.sql # First migration
├── 0001_add_users.sql # Second migration
└── 0002_add_posts.sql # Third migrationJournal Format (_journal.json):
{
"entries": [
{
"idx": 0,
"when": 1234567890000,
"tag": "0000_initial",
"breakpoints": true
},
{
"idx": 1,
"when": 1234567891000,
"tag": "0001_add_users",
"breakpoints": true
}
]
}import { readMigrationFiles } from 'drizzle-orm/migrator';
const migrations = readMigrationFiles({
migrationsFolder: './drizzle',
migrationsTable: '__drizzle_migrations', // optional
migrationsSchema: 'public' // optional, PostgreSQL only
});Type Definitions:
interface MigrationConfig {
migrationsFolder: string;
migrationsTable?: string;
migrationsSchema?: string; // PostgreSQL only
}
interface MigrationMeta {
sql: string[]; // SQL statements to execute
folderMillis: number; // Timestamp from folder name
hash: string; // SHA-256 hash of SQL content
bps: boolean; // Whether to use breakpoints
}
function readMigrationFiles(config: MigrationConfig): MigrationMeta[]import { drizzle } from 'drizzle-orm/postgres-js';
import { migrate } from 'drizzle-orm/postgres-js/migrator';
import postgres from 'postgres';
const sql = postgres('postgresql://user:password@host:5432/db', { max: 1 });
const db = drizzle(sql);
// Run migrations
await migrate(db, {
migrationsFolder: './drizzle',
migrationsTable: '__drizzle_migrations', // optional, default: __drizzle_migrations
migrationsSchema: 'public' // optional, default: public
});
// Close connection after migrations
await sql.end();import { drizzle } from 'drizzle-orm/node-postgres';
import { migrate } from 'drizzle-orm/node-postgres/migrator';
import { Pool } from 'pg';
const pool = new Pool({
connectionString: 'postgresql://user:password@host:5432/db'
});
const db = drizzle(pool);
// Run migrations
await migrate(db, {
migrationsFolder: './drizzle'
});
// Close pool after migrations
await pool.end();import { drizzle } from 'drizzle-orm/mysql2';
import { migrate } from 'drizzle-orm/mysql2/migrator';
import mysql from 'mysql2/promise';
const connection = await mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'password',
database: 'mydb'
});
const db = drizzle(connection);
// Run migrations
await migrate(db, {
migrationsFolder: './drizzle',
migrationsTable: '__drizzle_migrations' // optional
});
await connection.end();import { drizzle } from 'drizzle-orm/better-sqlite3';
import { migrate } from 'drizzle-orm/better-sqlite3/migrator';
import Database from 'better-sqlite3';
const sqlite = new Database('mydb.db');
const db = drizzle(sqlite);
// Run migrations (synchronous)
migrate(db, {
migrationsFolder: './drizzle'
});
sqlite.close();import { drizzle } from 'drizzle-orm/bun-sqlite';
import { migrate } from 'drizzle-orm/bun-sqlite/migrator';
import { Database } from 'bun:sqlite';
const sqlite = new Database('mydb.db');
const db = drizzle(sqlite);
// Run migrations (synchronous)
migrate(db, {
migrationsFolder: './drizzle'
});
sqlite.close();import { drizzle } from 'drizzle-orm/libsql';
import { migrate } from 'drizzle-orm/libsql/migrator';
import { createClient } from '@libsql/client';
const client = createClient({
url: 'libsql://your-db.turso.io',
authToken: 'your-auth-token'
});
const db = drizzle(client);
// Run migrations
await migrate(db, {
migrationsFolder: './drizzle'
});import { drizzle } from 'drizzle-orm/d1';
import { migrate } from 'drizzle-orm/d1/migrator';
export default {
async fetch(request: Request, env: Env) {
const db = drizzle(env.DB);
// Run migrations
await migrate(db, {
migrationsFolder: './drizzle'
});
return new Response('Migrations complete');
}
};import { drizzle } from 'drizzle-orm/expo-sqlite';
import { migrate } from 'drizzle-orm/expo-sqlite/migrator';
import { openDatabaseSync } from 'expo-sqlite/next';
const expo = openDatabaseSync('mydb.db');
const db = drizzle(expo);
// Run migrations
await migrate(db, {
migrationsFolder: './drizzle'
});import { migrate } from 'drizzle-orm/postgres-js/migrator';
try {
await migrate(db, {
migrationsFolder: './drizzle'
});
console.log('Migrations completed successfully');
} catch (error) {
console.error('Migration failed:', error);
// Handle error (e.g., rollback, alert, exit)
process.exit(1);
}import { readMigrationFiles } from 'drizzle-orm/migrator';
import { sql } from 'drizzle-orm';
// Read migrations manually
const migrations = readMigrationFiles({
migrationsFolder: './drizzle'
});
// Execute with custom logic
for (const migration of migrations) {
console.log(`Running migration: ${migration.hash}`);
// Check if already applied
const [existing] = await db
.select()
.from(migrationsTable)
.where(eq(migrationsTable.hash, migration.hash));
if (existing) {
console.log('Already applied, skipping');
continue;
}
// Execute migration
await db.transaction(async (tx) => {
for (const stmt of migration.sql) {
await tx.execute(sql.raw(stmt));
}
// Record migration
await tx.insert(migrationsTable).values({
hash: migration.hash,
created_at: new Date()
});
});
}Execute raw SQL queries when you need to go beyond the query builder.
import { sql } from 'drizzle-orm';
// Execute raw SQL that returns data
const result = await db.execute(sql`
SELECT * FROM users
WHERE age > ${25}
ORDER BY name
`);
console.log(result); // Raw database resultsType Signature:
execute<T>(query: SQL): Promise<T>import { sql } from 'drizzle-orm';
// Define return type
interface UserCount {
count: number;
avgAge: number;
}
const stats = await db.execute<UserCount[]>(sql`
SELECT
COUNT(*) as count,
AVG(age) as avgAge
FROM users
`);
console.log(stats[0].count); // Type-safe access
console.log(stats[0].avgAge);import { sql } from 'drizzle-orm';
// Parameters are automatically escaped
const minAge = 18;
const maxAge = 65;
const namePattern = '%John%';
const users = await db.execute(sql`
SELECT * FROM users
WHERE age BETWEEN ${minAge} AND ${maxAge}
AND name LIKE ${namePattern}
`);import { sql } from 'drizzle-orm';
// Use sql.identifier for table/column names
const tableName = 'users';
const columnName = 'email';
const result = await db.execute(sql`
SELECT ${sql.identifier(columnName)}
FROM ${sql.identifier(tableName)}
WHERE age > 18
`);import { drizzle } from 'drizzle-orm/better-sqlite3';
import { sql } from 'drizzle-orm';
const db = drizzle(sqlite);
// db.run() - Execute statement, return metadata
const runResult = db.run(sql`
INSERT INTO users (name, email)
VALUES ('John', 'john@example.com')
`);
console.log(runResult.changes); // Number of affected rows
console.log(runResult.lastInsertRowid); // Last inserted row ID
// db.all() - Fetch all rows
const allUsers = db.all(sql`SELECT * FROM users`);
console.log(allUsers); // Array of all users
// db.get() - Fetch single row
const user = db.get(sql`SELECT * FROM users WHERE id = ${1}`);
console.log(user); // Single user object or undefined
// db.values() - Fetch raw values (2D array)
const values = db.values(sql`
SELECT name, email FROM users
`);
console.log(values); // [['John', 'john@example.com'], ['Jane', 'jane@example.com']]Type Signatures:
interface DBResult {
changes: number;
lastInsertRowid: number | bigint;
}
interface BaseSQLiteDatabase {
run(query: SQL): DBResult; // Execute, return metadata
all<T>(query: SQL): T[]; // Fetch all rows
get<T>(query: SQL): T | undefined; // Fetch single row
values<T>(query: SQL): T[][]; // Fetch raw values
}import { sql } from 'drizzle-orm';
const userPosts = await db.execute(sql`
SELECT
u.id as user_id,
u.name as user_name,
p.id as post_id,
p.title as post_title
FROM users u
LEFT JOIN posts p ON u.id = p.user_id
WHERE u.age > ${18}
ORDER BY u.name, p.created_at DESC
`);import { sql } from 'drizzle-orm';
// Build SQL dynamically
const conditions: SQL[] = [];
if (minAge) {
conditions.push(sql`age >= ${minAge}`);
}
if (maxAge) {
conditions.push(sql`age <= ${maxAge}`);
}
if (namePattern) {
conditions.push(sql`name LIKE ${namePattern}`);
}
const whereClause = conditions.length > 0
? sql.join([sql`WHERE`, sql.join(conditions, sql` AND `)], sql` `)
: sql``;
const query = sql.join([
sql`SELECT * FROM users`,
whereClause,
sql`ORDER BY name`
], sql` `);
const users = await db.execute(query);import { sql } from 'drizzle-orm';
// sql.raw() - Create raw SQL string (no escaping)
const tableName = 'users';
const query1 = sql`SELECT * FROM ${sql.raw(tableName)}`;
// sql.empty() - Create empty SQL
const emptySql = sql.empty();
// sql.join() - Join SQL chunks
const parts = [
sql`SELECT *`,
sql`FROM users`,
sql`WHERE age > 18`
];
const joined = sql.join(parts, sql` `);
// sql.placeholder() - Create named placeholder for prepared statements
const prepared = sql`
SELECT * FROM users
WHERE id = ${sql.placeholder('userId')}
`;
// sql.param() - Create query parameter
const param = sql.param(123);Execute multiple queries in a single round-trip to the database for improved performance.
import { drizzle } from 'drizzle-orm/postgres-js';
const db = drizzle(client);
// Execute multiple queries in one batch
const [users, posts, comments] = await db.batch([
db.select().from(users),
db.select().from(posts),
db.select().from(comments)
]);
console.log(users); // Results from first query
console.log(posts); // Results from second query
console.log(comments); // Results from third queryType Signature:
interface Database {
batch<T extends BatchItem[]>(
queries: [...T]
): Promise<BatchResponse<T>>
}
type BatchItem = RunnableQuery<any, any>;
type BatchResponse<T extends BatchItem[]> = {
[K in keyof T]: T[K]['_']['result'];
};// Combine different query types in a single batch
const [selectedUsers, insertedPosts, updatedComments] = await db.batch([
// SELECT
db.select().from(users).where(eq(users.age, 25)),
// INSERT
db.insert(posts).values({
title: 'New Post',
userId: 1
}).returning(),
// UPDATE
db.update(comments)
.set({ approved: true })
.where(eq(comments.postId, 1))
.returning()
]);// Prepare statements
const getUserById = db
.select()
.from(users)
.where(eq(users.id, sql.placeholder('id')))
.prepare();
const getPostsByUserId = db
.select()
.from(posts)
.where(eq(posts.userId, sql.placeholder('userId')))
.prepare();
// Execute in batch
const [user1, user2, posts1, posts2] = await db.batch([
getUserById.execute({ id: 1 }),
getUserById.execute({ id: 2 }),
getPostsByUserId.execute({ userId: 1 }),
getPostsByUserId.execute({ userId: 2 })
]);// Sequential execution (slow - 3 round trips)
const users = await db.select().from(users);
const posts = await db.select().from(posts);
const comments = await db.select().from(comments);
// Batch execution (fast - 1 round trip)
const [users, posts, comments] = await db.batch([
db.select().from(users),
db.select().from(posts),
db.select().from(comments)
]);Batch operations are supported by specific drivers:
Supported:
postgres-js (PostgreSQL)neon-serverless (Neon PostgreSQL)neon-http (Neon HTTP)planetscale-serverless (PlanetScale MySQL)d1 (Cloudflare D1)libsql (Turso/libSQL)Not Supported:
node-postgres (pg)mysql2better-sqlite3// Check if batch is available
if ('batch' in db) {
const results = await db.batch([
db.select().from(users),
db.select().from(posts)
]);
} else {
// Fallback to sequential execution
const users = await db.select().from(users);
const posts = await db.select().from(posts);
}try {
const results = await db.batch([
db.select().from(users),
db.insert(posts).values({ title: 'Post' }), // This might fail
db.select().from(comments)
]);
} catch (error) {
// If any query fails, the entire batch fails
console.error('Batch execution failed:', error);
}// Full type inference for batch results
const results = await db.batch([
db.select({ id: users.id, name: users.name }).from(users),
db.select({ id: posts.id, title: posts.title }).from(posts)
]);
// results[0]: { id: number; name: string }[]
// results[1]: { id: number; title: string }[]
const [users, posts] = results;
console.log(users[0].name); // Type-safe
console.log(posts[0].title); // Type-safeConfigure read replicas to distribute SELECT queries across multiple database instances while sending writes to the primary.
import { drizzle } from 'drizzle-orm/postgres-js';
import { withReplicas } from 'drizzle-orm/pg-core';
import postgres from 'postgres';
// Primary database (for writes)
const primaryClient = postgres('postgresql://user:pass@primary:5432/db');
const primary = drizzle(primaryClient);
// Replica databases (for reads)
const replica1Client = postgres('postgresql://user:pass@replica1:5432/db');
const replica1 = drizzle(replica1Client);
const replica2Client = postgres('postgresql://user:pass@replica2:5432/db');
const replica2 = drizzle(replica2Client);
// Configure read replicas
const db = withReplicas(primary, [replica1, replica2]);
// SELECT queries automatically use replicas
const users = await db.select().from(users); // Uses replica1 or replica2
// Write queries always use primary
await db.insert(users).values({ name: 'John' }); // Uses primary
await db.update(users).set({ name: 'Jane' }); // Uses primary
await db.delete(users).where(eq(users.id, 1)); // Uses primaryType Signature:
function withReplicas<Q extends PgDatabase>(
primary: Q,
replicas: [Q, ...Q[]],
getReplica?: (replicas: Q[]) => Q
): PgWithReplicas<Q>
interface PgWithReplicas<Q> extends Q {
$primary: Q;
$replicas: Q[];
}import { withReplicas } from 'drizzle-orm/pg-core';
// Custom replica selection strategy
const db = withReplicas(
primary,
[replica1, replica2, replica3],
(replicas) => {
// Round-robin selection
const index = Math.floor(Date.now() / 1000) % replicas.length;
return replicas[index]!;
}
);
// Weighted selection
let currentIndex = 0;
const db2 = withReplicas(
primary,
[replica1, replica2, replica3],
(replicas) => {
// Simple round-robin
const replica = replicas[currentIndex % replicas.length]!;
currentIndex++;
return replica;
}
);
// Health-based selection
const replicaHealth = new Map([
[replica1, 100],
[replica2, 80],
[replica3, 90]
]);
const db3 = withReplicas(
primary,
[replica1, replica2, replica3],
(replicas) => {
// Select healthiest replica
return replicas.reduce((best, current) =>
(replicaHealth.get(current) ?? 0) > (replicaHealth.get(best) ?? 0)
? current
: best
);
}
);const db = withReplicas(primary, [replica1, replica2]);
// Force query to use primary
const freshData = await db.$primary.select().from(users);
// Force query to use specific replica
const cachedData = await db.$replicas[0].select().from(users);
// Access all replicas
for (const replica of db.$replicas) {
const health = await replica.execute(sql`SELECT 1`);
console.log('Replica health:', health);
}import { drizzle } from 'drizzle-orm/mysql2';
import { withReplicas } from 'drizzle-orm/mysql-core';
import mysql from 'mysql2/promise';
// Primary database
const primaryConn = await mysql.createConnection({
host: 'primary.mysql.example.com',
user: 'user',
password: 'password',
database: 'mydb'
});
const primary = drizzle(primaryConn);
// Replica databases
const replica1Conn = await mysql.createConnection({
host: 'replica1.mysql.example.com',
user: 'user',
password: 'password',
database: 'mydb'
});
const replica1 = drizzle(replica1Conn);
const replica2Conn = await mysql.createConnection({
host: 'replica2.mysql.example.com',
user: 'user',
password: 'password',
database: 'mydb'
});
const replica2 = drizzle(replica2Conn);
// Configure replicas
const db = withReplicas(primary, [replica1, replica2]);
// Reads use replicas, writes use primary
const users = await db.select().from(users); // replica
await db.insert(users).values({ name: 'John' }); // primaryimport { drizzle } from 'drizzle-orm/better-sqlite3';
import { withReplicas } from 'drizzle-orm/sqlite-core';
import Database from 'better-sqlite3';
// Primary database (for writes)
const primarySqlite = new Database('primary.db');
const primary = drizzle(primarySqlite);
// Replica databases (for reads)
const replica1Sqlite = new Database('replica1.db', { readonly: true });
const replica1 = drizzle(replica1Sqlite);
const replica2Sqlite = new Database('replica2.db', { readonly: true });
const replica2 = drizzle(replica2Sqlite);
// Configure replicas
const db = withReplicas(primary, [replica1, replica2]);
// SELECT queries use replicas
const users = await db.select().from(users);
// Mutations use primary
await db.insert(users).values({ name: 'John' });const db = withReplicas(primary, [replica1, replica2]);
// These use REPLICAS (random selection by default)
await db.select().from(users);
await db.selectDistinct().from(users);
await db.selectDistinctOn([users.email]).from(users);
await db.$count(users);
await db.with(cte).select().from(cte);
// These use PRIMARY
await db.insert(users).values({ name: 'John' });
await db.update(users).set({ name: 'Jane' });
await db.delete(users).where(eq(users.id, 1));
await db.execute(sql`INSERT INTO users VALUES (1, 'John')`);
await db.transaction(async (tx) => { /* ... */ });
await db.refreshMaterializedView(myView); // PostgreSQL onlyconst db = withReplicas(primary, [replica1, replica2]);
// Write to primary
const [newUser] = await db
.insert(users)
.values({ name: 'John' })
.returning();
// Immediately read from primary to avoid replication lag
const freshUser = await db.$primary
.select()
.from(users)
.where(eq(users.id, newUser.id));
// Later reads can use replicas (after replication catches up)
setTimeout(async () => {
const cachedUser = await db
.select()
.from(users)
.where(eq(users.id, newUser.id)); // Uses replica
}, 1000);Cache query results to reduce database load and improve performance.
import { Cache } from 'drizzle-orm/cache/core';
abstract class Cache {
// Define cache strategy
abstract strategy(): 'explicit' | 'all';
// Get cached value
abstract get(
key: string,
tables: string[],
isTag: boolean,
isAutoInvalidate?: boolean
): Promise<any[] | undefined>;
// Store value in cache
abstract put(
hashedQuery: string,
response: any,
tables: string[],
isTag: boolean,
config?: CacheConfig
): Promise<void>;
// Handle mutations (invalidate cache)
abstract onMutate(
params: MutationOption
): Promise<void>;
}
type MutationOption = {
tags?: string | string[];
tables?: Table | Table[] | string | string[];
};
interface CacheConfig {
ex?: number; // Expire time in seconds
px?: number; // Expire time in milliseconds
exat?: number; // Unix timestamp expiration (seconds)
pxat?: number; // Unix timestamp expiration (ms)
keepTtl?: boolean; // Retain existing TTL
hexOptions?: 'NX' | 'XX' | 'GT' | 'LT'; // Hash field expiration options
}import { NoopCache } from 'drizzle-orm/cache/core';
// Default cache that does nothing
const cache = new NoopCache();
const db = drizzle(client, {
cache
});
// Queries are not cached
const users = await db.select().from(users);import { drizzle } from 'drizzle-orm/postgres-js';
import { UpstashCache } from 'drizzle-orm/cache/upstash';
import { Redis } from '@upstash/redis';
// Create Redis client
const redis = new Redis({
url: process.env.UPSTASH_REDIS_URL!,
token: process.env.UPSTASH_REDIS_TOKEN!
});
// Create cache with TTL configuration
const cache = new UpstashCache(redis, {
ex: 60 * 60 // 1 hour TTL
});
// Configure database with cache
const db = drizzle(client, {
cache,
schema // Required for table tracking
});import { UpstashCache } from 'drizzle-orm/cache/upstash';
// Explicit cache - only cache when explicitly requested
const cache = new UpstashCache(
redis,
{ ex: 3600 }, // 1 hour TTL
false // useGlobally = false (explicit mode)
);
const db = drizzle(client, { cache, schema });
// Not cached
const users1 = await db.select().from(users);
// Cached with tag
const users2 = await db.select()
.from(users)
.$withCache({ tag: 'all-users' });
// Retrieve from cache by tag
const cached = await db.select()
.from(users)
.$withCache({ tag: 'all-users' });// Global cache - cache all SELECT queries automatically
const cache = new UpstashCache(
redis,
{ ex: 3600 }, // 1 hour TTL
true // useGlobally = true (all mode)
);
const db = drizzle(client, { cache, schema });
// Automatically cached
const users = await db.select().from(users);
// Cached with auto-invalidation on mutations
const posts = await db.select().from(posts);
// Mutations automatically invalidate related cache
await db.insert(posts).values({ title: 'New Post' });
// Previous posts query cache is now invalidated// Cache with custom tag
const users = await db.select()
.from(users)
.where(eq(users.role, 'admin'))
.$withCache({
tag: 'admin-users',
config: { ex: 300 } // 5 minute TTL
});
// Retrieve same query from cache
const cachedUsers = await db.select()
.from(users)
.where(eq(users.role, 'admin'))
.$withCache({ tag: 'admin-users' });
// Invalidate by tag
await db.$cache.invalidate({ tags: 'admin-users' });const cache = new UpstashCache(redis, { ex: 3600 }, true);
const db = drizzle(client, { cache, schema });
// SELECT is cached with table tracking
const users = await db.select().from(users);
// INSERT invalidates all queries involving 'users' table
await db.insert(users).values({ name: 'John' });
// Previous SELECT cache is automatically cleared
// UPDATE invalidates cache
await db.update(users).set({ name: 'Jane' });
// Cache cleared again
// DELETE invalidates cache
await db.delete(users).where(eq(users.id, 1));
// Cache cleared again// Cache without auto-invalidation (manual control)
const users = await db.select()
.from(users)
.$withCache({
tag: 'users-list',
autoInvalidate: false // Don't invalidate on mutations
});
// Mutations don't automatically clear cache
await db.insert(users).values({ name: 'John' });
// Cache is NOT cleared
// Manually invalidate when needed
await db.$cache.invalidate({ tags: 'users-list' });// Different TTL configurations
const cache1 = new UpstashCache(redis, {
ex: 60 // 60 seconds
});
const cache2 = new UpstashCache(redis, {
px: 60000 // 60000 milliseconds (60 seconds)
});
const cache3 = new UpstashCache(redis, {
exat: Math.floor(Date.now() / 1000) + 3600 // Expire at specific Unix time
});
// Per-query TTL override
const users = await db.select()
.from(users)
.$withCache({
tag: 'users',
config: { ex: 300 } // Override global TTL to 5 minutes
});import { Cache } from 'drizzle-orm/cache/core';
class RedisCache extends Cache {
constructor(private redis: RedisClient) {
super();
}
strategy() {
return 'explicit' as const;
}
async get(key: string): Promise<any[] | undefined> {
const cached = await this.redis.get(key);
return cached ? JSON.parse(cached) : undefined;
}
async put(
key: string,
response: any,
tables: string[],
isTag: boolean,
config?: CacheConfig
): Promise<void> {
const ttl = config?.ex ?? 3600;
await this.redis.setex(key, ttl, JSON.stringify(response));
// Track tables for invalidation
for (const table of tables) {
await this.redis.sadd(`table:${table}`, key);
}
}
async onMutate(params: MutationOption): Promise<void> {
const tables = Array.isArray(params.tables)
? params.tables
: [params.tables];
for (const table of tables) {
const keys = await this.redis.smembers(`table:${table}`);
if (keys.length > 0) {
await this.redis.del(...keys);
await this.redis.del(`table:${table}`);
}
}
}
}
// Use custom cache
const cache = new RedisCache(redisClient);
const db = drizzle(client, { cache, schema });import { hashQuery } from 'drizzle-orm/cache/core';
// Drizzle uses SHA-256 to hash queries for cache keys
const queryString = 'SELECT * FROM users WHERE age > $1';
const params = [18];
const hash = await hashQuery(queryString, params);
console.log(hash); // '3f79bb7b...' (64-character hex string)
// Same query + params = same hash
const hash2 = await hashQuery(queryString, [18]);
console.log(hash === hash2); // true
// Different params = different hash
const hash3 = await hashQuery(queryString, [21]);
console.log(hash === hash3); // falseConfigure logging to monitor and debug SQL queries.
interface Logger {
logQuery(query: string, params: unknown[]): void;
}
interface LogWriter {
write(message: string): void;
}import { DefaultLogger } from 'drizzle-orm';
// Default logger logs to console
const logger = new DefaultLogger();
const db = drizzle(client, {
logger
});
// Outputs to console:
// Query: SELECT * FROM users WHERE id = $1 -- params: [42]
const user = await db.select().from(users).where(eq(users.id, 42));import { DefaultLogger, LogWriter } from 'drizzle-orm';
class FileLogWriter implements LogWriter {
constructor(private filePath: string) {}
write(message: string): void {
fs.appendFileSync(this.filePath, message + '\n');
}
}
// Use custom writer
const logger = new DefaultLogger({
writer: new FileLogWriter('./queries.log')
});
const db = drizzle(client, { logger });import { NoopLogger } from 'drizzle-orm';
// Disable logging
const logger = new NoopLogger();
const db = drizzle(client, {
logger
});
// No output
const users = await db.select().from(users);// Enable default logging
const db = drizzle(client, {
logger: true // Uses DefaultLogger
});
// Disable logging
const db2 = drizzle(client, {
logger: false // Uses NoopLogger
});import { Logger } from 'drizzle-orm';
class CustomLogger implements Logger {
logQuery(query: string, params: unknown[]): void {
// Send to monitoring service
analytics.track('database_query', {
query,
params,
timestamp: new Date()
});
// Log to console in development
if (process.env.NODE_ENV === 'development') {
console.log(`[DB] ${query}`, params);
}
}
}
const db = drizzle(client, {
logger: new CustomLogger()
});class PerformanceLogger implements Logger {
logQuery(query: string, params: unknown[]): void {
const start = Date.now();
// Track query
const queryId = Math.random().toString(36);
console.log(`[${queryId}] START:`, query);
// Note: Actual execution happens after this returns
// For real timing, use OpenTelemetry tracing (see section 10)
}
}import { Logger } from 'drizzle-orm';
import winston from 'winston';
class WinstonLogger implements Logger {
private logger = winston.createLogger({
level: 'info',
format: winston.format.json(),
transports: [
new winston.transports.File({ filename: 'queries.log' })
]
});
logQuery(query: string, params: unknown[]): void {
this.logger.info('Database query', {
query,
params,
timestamp: new Date().toISOString()
});
}
}
const db = drizzle(client, {
logger: new WinstonLogger()
});class ConditionalLogger implements Logger {
constructor(private enabled: boolean = true) {}
logQuery(query: string, params: unknown[]): void {
if (!this.enabled) return;
// Only log slow queries
if (query.includes('JOIN') || query.includes('DISTINCT')) {
console.warn('[SLOW QUERY]', query, params);
}
}
}
const db = drizzle(client, {
logger: new ConditionalLogger(process.env.LOG_QUERIES === 'true')
});Automatically convert between database snake_case and JavaScript camelCase.
import { drizzle } from 'drizzle-orm/postgres-js';
// Convert database snake_case to JavaScript camelCase
const db = drizzle(client, {
casing: 'camelCase'
});
// Convert JavaScript camelCase to database snake_case
const db2 = drizzle(client, {
casing: 'snake_case'
});Type Definition:
type Casing = 'snake_case' | 'camelCase';
interface DrizzleConfig {
casing?: Casing;
// ... other options
}import { pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core';
// Define schema with snake_case (as in database)
const users = pgTable('users', {
id: serial('id').primaryKey(),
firstName: text('first_name').notNull(),
lastName: text('last_name').notNull(),
createdAt: timestamp('created_at').defaultNow()
});
const db = drizzle(client, {
casing: 'camelCase',
schema: { users }
});
// Query with camelCase in code
const result = await db.select({
id: users.id,
firstName: users.firstName, // Maps to 'first_name' in DB
lastName: users.lastName, // Maps to 'last_name' in DB
createdAt: users.createdAt // Maps to 'created_at' in DB
}).from(users);
// Result has camelCase keys
console.log(result[0].firstName); // Works!
console.log(result[0].createdAt); // Works!// Define schema with camelCase (as in code)
const users = pgTable('users', {
id: serial('id').primaryKey(),
firstName: text('firstName').notNull(),
lastName: text('lastName').notNull(),
createdAt: timestamp('createdAt').defaultNow()
});
const db = drizzle(client, {
casing: 'snake_case',
schema: { users }
});
// Drizzle converts to snake_case for database
// firstName -> first_name
// lastName -> last_name
// createdAt -> created_atimport { toSnakeCase, toCamelCase } from 'drizzle-orm/casing';
// Convert to snake_case
toSnakeCase('firstName'); // 'first_name'
toSnakeCase('createdAt'); // 'created_at'
toSnakeCase('userId'); // 'user_id'
toSnakeCase('isActive'); // 'is_active'
// Convert to camelCase
toCamelCase('first_name'); // 'firstName'
toCamelCase('created_at'); // 'createdAt'
toCamelCase('user_id'); // 'userId'
toCamelCase('is_active'); // 'isActive'import { CasingCache } from 'drizzle-orm/casing';
// Internal caching mechanism
class CasingCache {
constructor(casing?: 'snake_case' | 'camelCase');
getColumnCasing(column: Column): string;
clearCache(): void;
}
// Drizzle uses this internally to cache conversions
// Manual usage is rarely needed
const casingCache = new CasingCache('camelCase');
const convertedName = casingCache.getColumnCasing(users.firstName);const users = pgTable('users', {
id: serial('id').primaryKey(),
fullName: text('full_name')
});
const posts = pgTable('posts', {
id: serial('id').primaryKey(),
userId: integer('user_id').references(() => users.id),
postTitle: text('post_title')
});
const db = drizzle(client, {
casing: 'camelCase',
schema: { users, posts }
});
// Relational query with automatic casing
const result = await db.query.users.findMany({
with: {
posts: true
}
});
// Result has camelCase keys
console.log(result[0].fullName); // Converted from full_name
console.log(result[0].posts[0].postTitle); // Converted from post_title// ❌ Don't mix conventions in column definitions
const users = pgTable('users', {
id: serial('id'),
first_name: text('first_name'), // snake_case
lastName: text('lastName') // camelCase - inconsistent!
});
// ✅ Keep database names consistent (snake_case)
const users = pgTable('users', {
id: serial('id'),
firstName: text('first_name'), // Code: camelCase, DB: snake_case
lastName: text('last_name')
});
const db = drizzle(client, {
casing: 'camelCase' // Drizzle handles conversion
});
// ✅ Or keep everything as snake_case
const users = pgTable('users', {
id: serial('id'),
first_name: text('first_name'),
last_name: text('last_name')
});
const db = drizzle(client); // No casing conversion neededIntegrate with OpenTelemetry for distributed tracing and performance monitoring.
// Internal tracer object (used by Drizzle)
import { tracer } from 'drizzle-orm/tracing';
// Tracer interface
interface Tracer {
startActiveSpan<F extends (span?: Span) => unknown>(
name: SpanName,
fn: F
): ReturnType<F>
}
type SpanName =
| 'drizzle.operation' // Top-level database operation
| 'drizzle.prepareQuery' // Query preparation
| 'drizzle.buildSQL' // SQL building
| 'drizzle.execute' // Query execution
| 'drizzle.driver.execute' // Driver-level execution
| 'drizzle.mapResponse'; // Response mappingimport { NodeSDK } from '@opentelemetry/sdk-node';
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
import { JaegerExporter } from '@opentelemetry/exporter-jaeger';
// Initialize OpenTelemetry SDK
const sdk = new NodeSDK({
traceExporter: new JaegerExporter({
endpoint: 'http://localhost:14268/api/traces'
}),
instrumentations: [getNodeAutoInstrumentations()]
});
sdk.start();
// Drizzle automatically creates spans if OpenTelemetry is available
import { drizzle } from 'drizzle-orm/postgres-js';
const db = drizzle(client);
// This creates nested spans:
// - drizzle.operation
// - drizzle.prepareQuery
// - drizzle.execute
// - drizzle.driver.execute
// - drizzle.mapResponse
const users = await db.select().from(users);drizzle.operation (Top-level operation)
├── drizzle.prepareQuery (Query preparation)
│ └── drizzle.buildSQL (SQL building)
├── drizzle.execute (Execution coordination)
│ └── drizzle.driver.execute (Actual database call)
└── drizzle.mapResponse (Response transformation)import { trace } from '@opentelemetry/api';
// Get current tracer
const tracer = trace.getTracer('my-app');
// Add custom spans around Drizzle operations
await tracer.startActiveSpan('user-creation-flow', async (span) => {
span.setAttribute('user.email', email);
span.setAttribute('user.role', 'admin');
try {
// Drizzle creates nested spans
const [user] = await db.insert(users).values({
email,
role: 'admin'
}).returning();
span.setAttribute('user.id', user.id);
return user;
} catch (error) {
span.recordException(error);
span.setStatus({ code: SpanStatusCode.ERROR });
throw error;
} finally {
span.end();
}
});import { context, trace } from '@opentelemetry/api';
// Parent span
const parentSpan = trace.getActiveSpan();
// Create child span context
const ctx = trace.setSpan(context.active(), parentSpan!);
// Run Drizzle operations in context
await context.with(ctx, async () => {
// These operations inherit the parent span
const users = await db.select().from(users);
const posts = await db.select().from(posts);
});import { JaegerExporter } from '@opentelemetry/exporter-jaeger';
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node';
import { SimpleSpanProcessor } from '@opentelemetry/sdk-trace-base';
const provider = new NodeTracerProvider();
const exporter = new JaegerExporter({
serviceName: 'my-app',
endpoint: 'http://localhost:14268/api/traces'
});
provider.addSpanProcessor(new SimpleSpanProcessor(exporter));
provider.register();
// Drizzle operations are now traced to Jaeger
const db = drizzle(client);
const users = await db.select().from(users);import { ZipkinExporter } from '@opentelemetry/exporter-zipkin';
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node';
import { SimpleSpanProcessor } from '@opentelemetry/sdk-trace-base';
const provider = new NodeTracerProvider();
const exporter = new ZipkinExporter({
serviceName: 'my-app',
url: 'http://localhost:9411/api/v2/spans'
});
provider.addSpanProcessor(new SimpleSpanProcessor(exporter));
provider.register();import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node';
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
const provider = new NodeTracerProvider();
const exporter = new OTLPTraceExporter({
url: 'https://api.honeycomb.io/v1/traces',
headers: {
'x-honeycomb-team': process.env.HONEYCOMB_API_KEY
}
});
provider.addSpanProcessor(new BatchSpanProcessor(exporter));
provider.register();Useful helper functions for working with tables, columns, and query results.
import { getTableColumns } from 'drizzle-orm';
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull(),
age: integer('age')
});
// Get all columns as an object
const columns = getTableColumns(users);
console.log(columns);
// {
// id: Column { name: 'id', ... },
// name: Column { name: 'name', ... },
// email: Column { name: 'email', ... },
// age: Column { name: 'age', ... }
// }
// Use in queries
const result = await db.select(columns).from(users);
// Select subset using destructuring
const { id, name } = getTableColumns(users);
const result2 = await db.select({ id, name }).from(users);Type Signature:
function getTableColumns<T extends Table>(
table: T
): T['_']['columns']import { mapResultRow } from 'drizzle-orm/utils';
// Internal utility used by Drizzle to map database rows to JS objects
// You typically don't call this directly, but it's used internally
function mapResultRow<TResult>(
columns: SelectedFieldsOrdered<AnyColumn>,
row: unknown[],
joinsNotNullableMap: Record<string, boolean> | undefined
): TResultimport { orderSelectedFields } from 'drizzle-orm/utils';
// Internal utility that orders selected fields
// Used internally by query builders
function orderSelectedFields<TColumn extends AnyColumn>(
fields: Record<string, unknown>,
pathPrefix?: string[]
): SelectedFieldsOrdered<TColumn>import { haveSameKeys } from 'drizzle-orm/utils';
// Check if two objects have the same keys
const obj1 = { id: 1, name: 'John', email: 'john@example.com' };
const obj2 = { id: 2, name: 'Jane', email: 'jane@example.com' };
const obj3 = { id: 3, name: 'Bob' }; // Missing 'email'
haveSameKeys(obj1, obj2); // true
haveSameKeys(obj1, obj3); // falseType Signature:
function haveSameKeys(
left: Record<string, unknown>,
right: Record<string, unknown>
): booleanimport { mapUpdateSet } from 'drizzle-orm/utils';
// Internal utility that maps update values to SQL parameters
// Used internally by UPDATE query builders
function mapUpdateSet(
table: Table,
values: Record<string, unknown>
): UpdateSet
type UpdateSet = Record<string, SQL | Param | AnyColumn | null | undefined>;import { getTableName } from 'drizzle-orm';
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name')
});
const tableName = getTableName(users);
console.log(tableName); // 'users'
// Useful for dynamic queries
const query = sql`SELECT * FROM ${sql.identifier(getTableName(users))}`;Type Signature:
function getTableName(table: Table): stringimport { isTable } from 'drizzle-orm';
const users = pgTable('users', {
id: serial('id')
});
console.log(isTable(users)); // true
console.log(isTable({})); // false
console.log(isTable(null)); // falseType Signature:
function isTable(obj: unknown): obj is Tableimport { is } from 'drizzle-orm';
import { Table, Column, SQL } from 'drizzle-orm';
// Check entity types
const users = pgTable('users', {
id: serial('id'),
name: text('name')
});
is(users, Table); // true
is(users.id, Column); // true
is(sql`SELECT 1`, SQL); // true
// Useful for type narrowing
function processValue(value: unknown) {
if (is(value, Column)) {
console.log('Column name:', value.name);
} else if (is(value, SQL)) {
console.log('SQL expression');
}
}Type Signature:
function is<T>(
entity: unknown,
kind: { [entityKind]: string }
): entity is TDrizzle provides specific error classes for different failure scenarios.
import { DrizzleError } from 'drizzle-orm';
class DrizzleError extends Error {
constructor(options: {
message?: string;
cause?: unknown
})
}
// Usage
try {
// Some operation
} catch (error) {
if (error instanceof DrizzleError) {
console.error('Drizzle error:', error.message);
console.error('Cause:', error.cause);
}
}import { DrizzleQueryError } from 'drizzle-orm';
class DrizzleQueryError extends Error {
constructor(
public query: string,
public params: any[],
public cause?: Error
)
}
// Thrown when query execution fails
try {
await db.select().from(users).where(eq(users.invalidColumn, 1));
} catch (error) {
if (error instanceof DrizzleQueryError) {
console.error('Failed query:', error.query);
console.error('Parameters:', error.params);
console.error('Root cause:', error.cause);
}
}import { TransactionRollbackError } from 'drizzle-orm';
class TransactionRollbackError extends DrizzleError {
constructor()
}
// Thrown when transaction is explicitly rolled back
try {
await db.transaction(async (tx) => {
await tx.insert(users).values({ name: 'John' });
// Explicit rollback
tx.rollback();
});
} catch (error) {
if (error instanceof TransactionRollbackError) {
console.log('Transaction was rolled back');
}
}import {
DrizzleError,
DrizzleQueryError,
TransactionRollbackError
} from 'drizzle-orm';
async function createUser(data: NewUser) {
try {
return await db.transaction(async (tx) => {
const [user] = await tx
.insert(users)
.values(data)
.returning();
// Additional operations...
return user;
});
} catch (error) {
// Handle specific error types
if (error instanceof TransactionRollbackError) {
console.log('Transaction rolled back intentionally');
return null;
}
if (error instanceof DrizzleQueryError) {
console.error('Query failed:', {
query: error.query,
params: error.params,
cause: error.cause
});
// Check for specific database errors
if (error.cause?.code === '23505') {
throw new Error('Duplicate email address');
}
throw new Error('Database query failed');
}
if (error instanceof DrizzleError) {
console.error('Drizzle error:', error.message);
throw error;
}
// Unknown error
console.error('Unexpected error:', error);
throw error;
}
}Drizzle provides powerful type inference to ensure type safety between your schema and queries.
import { InferSelectModel } from 'drizzle-orm';
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull(),
age: integer('age'),
createdAt: timestamp('created_at').defaultNow()
});
// Infer SELECT result type
type User = InferSelectModel<typeof users>;
// {
// id: number;
// name: string;
// email: string;
// age: number | null;
// createdAt: Date | null;
// }
// Use in functions
function processUser(user: User) {
console.log(user.id); // number
console.log(user.name); // string
console.log(user.age); // number | null
}
// Result matches type
const users = await db.select().from(users);
users.forEach(processUser); // Type-safe!import { InferInsertModel } from 'drizzle-orm';
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull(),
age: integer('age'),
createdAt: timestamp('created_at').defaultNow()
});
// Infer INSERT input type
type NewUser = InferInsertModel<typeof users>;
// {
// id?: number; // Optional (auto-generated)
// name: string; // Required
// email: string; // Required
// age?: number | null; // Optional
// createdAt?: Date | null; // Optional (has default)
// }
// Use for type-safe inserts
function createUser(data: NewUser) {
return db.insert(users).values(data).returning();
}
// Type-checked at compile time
createUser({
name: 'John',
email: 'john@example.com'
}); // ✅ Valid
createUser({
name: 'John'
}); // ❌ Error: missing 'email'
createUser({
name: 'John',
email: 'john@example.com',
invalidField: true
}); // ❌ Error: unknown propertyconst users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull()
});
// Property-based inference
type User = typeof users.$inferSelect;
// Same as: InferSelectModel<typeof users>
// Available on all table instances
function getUsers(): Promise<User[]> {
return db.select().from(users);
}const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull()
});
// Property-based inference
type NewUser = typeof users.$inferInsert;
// Same as: InferInsertModel<typeof users>
// Use for insert operations
async function createUser(data: NewUser) {
const [user] = await db
.insert(users)
.values(data)
.returning();
return user;
}type User = InferSelectModel<typeof users>;
// Create partial types for updates
type UserUpdate = Partial<User>;
async function updateUser(id: number, data: UserUpdate) {
return db
.update(users)
.set(data)
.where(eq(users.id, id))
.returning();
}
// All fields optional
updateUser(1, { name: 'John' }); // ✅
updateUser(1, { age: 30 }); // ✅
updateUser(1, { name: 'John', email: 'john@example.com' }); // ✅type User = InferSelectModel<typeof users>;
// Pick only specific fields
type UserPreview = Pick<User, 'id' | 'name' | 'email'>;
async function getUserPreviews(): Promise<UserPreview[]> {
return db
.select({
id: users.id,
name: users.name,
email: users.email
})
.from(users);
}type User = InferSelectModel<typeof users>;
// Omit sensitive fields
type PublicUser = Omit<User, 'email' | 'createdAt'>;
function toPublicUser(user: User): PublicUser {
const { email, createdAt, ...publicData } = user;
return publicData;
}const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull()
});
const posts = pgTable('posts', {
id: serial('id').primaryKey(),
userId: integer('user_id').references(() => users.id),
title: text('title').notNull()
});
const usersRelations = relations(users, ({ many }) => ({
posts: many(posts)
}));
// Infer with relations
type User = InferSelectModel<typeof users>;
type Post = InferSelectModel<typeof posts>;
type UserWithPosts = User & {
posts: Post[];
};
const db = drizzle(client, {
schema: { users, posts, usersRelations }
});
// Fully typed result
const result: UserWithPosts[] = await db.query.users.findMany({
with: {
posts: true
}
});import { GetColumnData } from 'drizzle-orm';
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: text('name').notNull(),
age: integer('age'),
metadata: jsonb('metadata').$type<{ theme: string; lang: string }>()
});
// Infer individual column types
type UserId = GetColumnData<typeof users.id>; // number
type UserName = GetColumnData<typeof users.name>; // string
type UserAge = GetColumnData<typeof users.age>; // number | null
type UserMeta = GetColumnData<typeof users.metadata>; // { theme: string; lang: string } | nullBuild queries dynamically based on runtime conditions.
import { and, eq, gte, lte, like } from 'drizzle-orm';
import { SQL } from 'drizzle-orm';
interface UserFilters {
minAge?: number;
maxAge?: number;
namePattern?: string;
email?: string;
}
function buildUserQuery(filters: UserFilters) {
const conditions: SQL[] = [];
if (filters.minAge !== undefined) {
conditions.push(gte(users.age, filters.minAge));
}
if (filters.maxAge !== undefined) {
conditions.push(lte(users.age, filters.maxAge));
}
if (filters.namePattern) {
conditions.push(like(users.name, `%${filters.namePattern}%`));
}
if (filters.email) {
conditions.push(eq(users.email, filters.email));
}
// Build query
let query = db.select().from(users);
if (conditions.length > 0) {
query = query.where(and(...conditions));
}
return query;
}
// Usage
const users1 = await buildUserQuery({ minAge: 18, maxAge: 30 });
const users2 = await buildUserQuery({ namePattern: 'John' });
const users3 = await buildUserQuery({}); // No filtersfunction getUsersWithColumns<T extends (keyof typeof users._)>(
columns: T[]
) {
const selectedColumns = columns.reduce((acc, col) => {
acc[col] = users[col];
return acc;
}, {} as Record<string, any>);
return db.select(selectedColumns).from(users);
}
// Select specific columns
const users1 = await getUsersWithColumns(['id', 'name']);
// Result: { id: number; name: string }[]
const users2 = await getUsersWithColumns(['id', 'name', 'email']);
// Result: { id: number; name: string; email: string }[]import { asc, desc } from 'drizzle-orm';
type SortField = 'name' | 'email' | 'age' | 'createdAt';
type SortOrder = 'asc' | 'desc';
function getUsers(
sortField?: SortField,
sortOrder: SortOrder = 'asc'
) {
let query = db.select().from(users);
if (sortField) {
const column = users[sortField];
const orderFn = sortOrder === 'asc' ? asc : desc;
query = query.orderBy(orderFn(column));
}
return query;
}
// Usage
const users1 = await getUsers('name', 'asc');
const users2 = await getUsers('age', 'desc');
const users3 = await getUsers(); // No sortinginterface PaginationOptions {
page: number;
pageSize: number;
}
function getPaginatedUsers(options: PaginationOptions) {
const { page, pageSize } = options;
const offset = (page - 1) * pageSize;
return db
.select()
.from(users)
.limit(pageSize)
.offset(offset);
}
// Usage
const page1 = await getPaginatedUsers({ page: 1, pageSize: 10 });
const page2 = await getPaginatedUsers({ page: 2, pageSize: 10 });class UserQueryBuilder {
private query = db.select().from(users);
whereAge(min?: number, max?: number) {
if (min !== undefined) {
this.query = this.query.where(gte(users.age, min));
}
if (max !== undefined) {
this.query = this.query.where(lte(users.age, max));
}
return this;
}
whereName(pattern: string) {
this.query = this.query.where(like(users.name, `%${pattern}%`));
return this;
}
orderBy(field: 'name' | 'age', order: 'asc' | 'desc' = 'asc') {
const orderFn = order === 'asc' ? asc : desc;
this.query = this.query.orderBy(orderFn(users[field]));
return this;
}
paginate(page: number, pageSize: number) {
this.query = this.query
.limit(pageSize)
.offset((page - 1) * pageSize);
return this;
}
execute() {
return this.query;
}
}
// Usage
const results = await new UserQueryBuilder()
.whereAge(18, 30)
.whereName('John')
.orderBy('name', 'asc')
.paginate(1, 10)
.execute();function getUsersWithRelations(includePosts = false, includeComments = false) {
let query = db
.select({
user: users,
post: includePosts ? posts : undefined,
comment: includeComments ? comments : undefined
})
.from(users);
if (includePosts) {
query = query.leftJoin(posts, eq(users.id, posts.userId));
}
if (includeComments) {
query = query.leftJoin(comments, eq(users.id, comments.userId));
}
return query;
}
// Usage
const users1 = await getUsersWithRelations(); // Just users
const users2 = await getUsersWithRelations(true); // Users with posts
const users3 = await getUsersWithRelations(true, true); // Users with posts and commentsinterface SearchOptions {
query?: string;
filters?: {
category?: string;
minPrice?: number;
maxPrice?: number;
inStock?: boolean;
};
sort?: {
field: 'name' | 'price' | 'createdAt';
order: 'asc' | 'desc';
};
pagination?: {
page: number;
pageSize: number;
};
}
function searchProducts(options: SearchOptions) {
const conditions: SQL[] = [];
// Text search
if (options.query) {
conditions.push(
or(
like(products.name, `%${options.query}%`),
like(products.description, `%${options.query}%`)
)
);
}
// Filters
if (options.filters?.category) {
conditions.push(eq(products.category, options.filters.category));
}
if (options.filters?.minPrice !== undefined) {
conditions.push(gte(products.price, options.filters.minPrice));
}
if (options.filters?.maxPrice !== undefined) {
conditions.push(lte(products.price, options.filters.maxPrice));
}
if (options.filters?.inStock !== undefined) {
conditions.push(eq(products.inStock, options.filters.inStock));
}
// Build query
let query = db.select().from(products);
// Apply WHERE
if (conditions.length > 0) {
query = query.where(and(...conditions));
}
// Apply sorting
if (options.sort) {
const orderFn = options.sort.order === 'asc' ? asc : desc;
query = query.orderBy(orderFn(products[options.sort.field]));
}
// Apply pagination
if (options.pagination) {
const { page, pageSize } = options.pagination;
query = query
.limit(pageSize)
.offset((page - 1) * pageSize);
}
return query;
}Guidelines for efficient connection management across different drivers.
import { Pool } from 'pg';
import { drizzle } from 'drizzle-orm/node-postgres';
// Create connection pool
const pool = new Pool({
host: 'localhost',
port: 5432,
user: 'postgres',
password: 'password',
database: 'mydb',
max: 20, // Maximum pool size
min: 5, // Minimum pool size
idleTimeoutMillis: 30000, // Close idle clients after 30s
connectionTimeoutMillis: 2000, // Wait 2s for connection
maxUses: 7500, // Close connection after 7500 uses
});
const db = drizzle(pool);
// Use database
const users = await db.select().from(users);
// Graceful shutdown
process.on('SIGTERM', async () => {
await pool.end();
process.exit(0);
});import postgres from 'postgres';
import { drizzle } from 'drizzle-orm/postgres-js';
// Create connection with pooling
const sql = postgres('postgresql://user:pass@localhost:5432/db', {
max: 10, // Maximum connections
idle_timeout: 20, // Idle timeout in seconds
connect_timeout: 10, // Connection timeout
max_lifetime: 60 * 30, // Maximum connection lifetime (30 min)
prepare: true, // Use prepared statements
});
const db = drizzle(sql);
// Use database
const users = await db.select().from(users);
// Close all connections
await sql.end();import mysql from 'mysql2/promise';
import { drizzle } from 'drizzle-orm/mysql2';
// Create connection pool
const pool = mysql.createPool({
host: 'localhost',
user: 'root',
password: 'password',
database: 'mydb',
connectionLimit: 10, // Maximum connections
queueLimit: 0, // Unlimited queue
waitForConnections: true, // Wait if no connections available
enableKeepAlive: true, // Keep connections alive
keepAliveInitialDelay: 0,
});
const db = drizzle(pool);
// Use database
const users = await db.select().from(users);
// Close pool
await pool.end();import Database from 'better-sqlite3';
import { drizzle } from 'drizzle-orm/better-sqlite3';
// SQLite doesn't need pooling (single connection)
const sqlite = new Database('mydb.db', {
readonly: false,
fileMustExist: false,
timeout: 5000, // Busy timeout (5s)
verbose: console.log, // Log SQL queries
});
// Enable WAL mode for better concurrency
sqlite.pragma('journal_mode = WAL');
const db = drizzle(sqlite);
// Use database
const users = db.select().from(users).all();
// Close connection
sqlite.close();// ❌ Don't create new pool on every request
export default async function handler(req, res) {
const pool = new Pool({ ... }); // Bad - creates new pool each time
const db = drizzle(pool);
const users = await db.select().from(users);
return res.json(users);
}
// ✅ Reuse pool across requests
const pool = new Pool({ ... }); // Create once
const db = drizzle(pool);
export default async function handler(req, res) {
const users = await db.select().from(users);
return res.json(users);
}import { neon } from '@neondatabase/serverless';
import { drizzle } from 'drizzle-orm/neon-http';
// HTTP-based connection (no pooling needed)
const sql = neon(process.env.DATABASE_URL!);
const db = drizzle(sql);
// Each query is a separate HTTP request
const users = await db.select().from(users);import { connect } from '@planetscale/database';
import { drizzle } from 'drizzle-orm/planetscale-serverless';
// Serverless connection (pooling handled by platform)
const connection = connect({
host: process.env.DATABASE_HOST,
username: process.env.DATABASE_USERNAME,
password: process.env.DATABASE_PASSWORD
});
const db = drizzle(connection);
// Use database
const users = await db.select().from(users);import { Pool } from 'pg';
const pool = new Pool({ ... });
// Periodic health check
setInterval(async () => {
try {
const client = await pool.connect();
await client.query('SELECT 1');
client.release();
console.log('Pool health: OK');
} catch (error) {
console.error('Pool health: FAILED', error);
}
}, 30000); // Every 30 seconds
// Monitor pool statistics
pool.on('connect', () => {
console.log('New client connected');
});
pool.on('remove', () => {
console.log('Client removed from pool');
});
pool.on('error', (err) => {
console.error('Pool error:', err);
});import { Pool } from 'pg';
class DatabaseManager {
private pool: Pool | null = null;
async connect() {
if (!this.pool) {
this.pool = new Pool({
host: 'localhost',
database: 'mydb',
max: 20,
idleTimeoutMillis: 30000
});
// Setup error handlers
this.pool.on('error', (err) => {
console.error('Unexpected pool error', err);
});
}
return drizzle(this.pool);
}
async disconnect() {
if (this.pool) {
await this.pool.end();
this.pool = null;
}
}
async getPoolStats() {
if (!this.pool) return null;
return {
total: this.pool.totalCount,
idle: this.pool.idleCount,
waiting: this.pool.waitingCount
};
}
}
// Usage
const dbManager = new DatabaseManager();
const db = await dbManager.connect();
// Use database
const users = await db.select().from(users);
// Graceful shutdown
process.on('SIGTERM', async () => {
await dbManager.disconnect();
process.exit(0);
});// CPU-bound workloads
const cpuBoundPool = new Pool({
max: numCPUs * 2 + 1 // 2x CPUs + 1
});
// I/O-bound workloads
const ioBoundPool = new Pool({
max: numCPUs * 10 // 10x CPUs
});
// Default recommendation
const defaultPool = new Pool({
max: 10, // Good starting point
min: 2 // Keep minimum connections warm
});
// High-traffic applications
const highTrafficPool = new Pool({
max: 50, // Scale based on load testing
min: 10
});This comprehensive guide covers all major advanced features in Drizzle ORM. Each section includes complete type definitions, practical examples, and real-world patterns. Use these features to build robust, performant, and maintainable database applications.