Fastest full featured PostgreSQL client for Node.js and Deno with tagged template literals, transactions, streaming, and logical replication support
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Type-safe parameter binding, custom PostgreSQL types, and automatic type conversion.
Create explicitly typed parameters for precise PostgreSQL type control.
/**
* Create typed parameter with specific PostgreSQL type
* @param value - JavaScript value to type
* @param oid - PostgreSQL type OID
* @returns Typed parameter object
*/
typed<T>(value: T, oid: number): Parameter<T>;
// Alias for typed function
types: typeof typed;
interface Parameter<T> {
readonly type: number; // PostgreSQL OID
readonly value: string | null; // Serialized value
readonly raw: T | null; // Original JavaScript value
}Usage Examples:
// Explicit type casting
const userAge = sql.typed(25, 23); // 23 = INTEGER OID
const result = await sql`
INSERT INTO users (name, age)
VALUES (${userName}, ${userAge})
`;
// Using types alias
const timestamp = sql.types(new Date(), 1114); // 1114 = TIMESTAMP OID
await sql`UPDATE posts SET updated_at = ${timestamp} WHERE id = ${postId}`;
// Custom type handling
const point = sql.typed({ x: 10, y: 20 }, 600); // 600 = POINT OID
await sql`INSERT INTO locations (coordinates) VALUES (${point})`;Handle PostgreSQL arrays with automatic type inference and explicit typing.
/**
* Create array parameter with optional type specification
* @param value - JavaScript array to convert
* @param type - Optional PostgreSQL element type OID
* @returns Array parameter object
*/
array<T>(value: T[], type?: number): ArrayParameter<T>;
interface ArrayParameter<T> extends Parameter<T[]> {
readonly array: true;
}Usage Examples:
// Automatic type inference
const tags = sql.array(["javascript", "postgresql", "node"]);
await sql`
INSERT INTO posts (title, tags)
VALUES (${title}, ${tags})
`;
// Explicit array element type
const scores = sql.array([95, 87, 92], 23); // 23 = INTEGER
await sql`
INSERT INTO test_results (student_id, scores)
VALUES (${studentId}, ${scores})
`;
// Multi-dimensional arrays
const matrix = sql.array([[1, 2], [3, 4]], 23);
await sql`INSERT INTO matrices (data) VALUES (${matrix})`;
// Array queries
const userIds = [1, 2, 3, 4, 5];
const users = await sql`
SELECT * FROM users
WHERE id = ANY(${sql.array(userIds)})
`;Handle JSON and JSONB data types with automatic serialization.
/**
* Create JSON parameter
* @param value - JavaScript object/array to serialize as JSON
* @returns JSON parameter object
*/
json(value: any): Parameter;Usage Examples:
// JSON object storage
const metadata = { version: "1.0", features: ["auth", "api"] };
await sql`
INSERT INTO applications (name, metadata)
VALUES (${appName}, ${sql.json(metadata)})
`;
// Complex nested objects
const userProfile = {
preferences: {
theme: "dark",
notifications: {
email: true,
push: false
}
},
settings: {
timezone: "UTC",
language: "en"
}
};
await sql`
UPDATE users
SET profile = ${sql.json(userProfile)}
WHERE id = ${userId}
`;
// JSON arrays
const permissions = ["read", "write", "admin"];
await sql`
INSERT INTO roles (name, permissions)
VALUES (${roleName}, ${sql.json(permissions)})
`;Register custom PostgreSQL types for automatic conversion.
interface PostgresType<T = any> {
to: number; // PostgreSQL type OID to serialize to
from: number[]; // PostgreSQL type OIDs to parse from
serialize: (value: T) => unknown; // Convert JS value to PostgreSQL format
parse: (raw: any) => T; // Convert PostgreSQL value to JS format
}
// Built-in BigInt type example
const BigInt: PostgresType<bigint>;Usage Examples:
// Using built-in BigInt type
const largeMoney = BigInt("999999999999999999");
await sql`
INSERT INTO transactions (amount)
VALUES (${largeMoney})
`;
// Custom type definition (conceptual - would be configured in options)
const customPointType = {
to: 600, // POINT OID
from: [600],
serialize: (point) => `(${point.x},${point.y})`,
parse: (raw) => {
const [x, y] = raw.slice(1, -1).split(',');
return { x: parseFloat(x), y: parseFloat(y) };
}
};
// Using custom types through options
const sql = postgres({
types: {
point: customPointType
}
});
// Now point type is available
const location = sql.types.point({ x: 10.5, y: 20.3 });
await sql`INSERT INTO locations (coordinates) VALUES (${location})`;Understanding how postgres.js automatically converts between JavaScript and PostgreSQL types.
// Built-in type mappings (automatic conversion)
interface TypeMappings {
// JavaScript → PostgreSQL
string: 25; // TEXT
number: [21, 23, 26, 700, 701]; // SMALLINT, INTEGER, OID, REAL, DOUBLE
boolean: 16; // BOOLEAN
Date: [1082, 1114, 1184]; // DATE, TIMESTAMP, TIMESTAMPTZ
Buffer: 17; // BYTEA
Array: 'inferred'; // Array type based on elements
Object: [114, 3802]; // JSON, JSONB (via sql.json())
}Usage Examples:
// Automatic conversions (no explicit typing needed)
await sql`
INSERT INTO users (
name, -- string → TEXT
age, -- number → INTEGER
active, -- boolean → BOOLEAN
birth_date, -- Date → TIMESTAMP
avatar -- Buffer → BYTEA
) VALUES (
${userName},
${userAge},
${isActive},
${birthDate},
${avatarBuffer}
)
`;
// Arrays are auto-typed based on content
const tags = ["javascript", "database"]; // → TEXT[]
const scores = [95, 87, 92]; // → INTEGER[]
const flags = [true, false, true]; // → BOOLEAN[]
await sql`
INSERT INTO posts (tags, scores, flags)
VALUES (${sql.array(tags)}, ${sql.array(scores)}, ${sql.array(flags)})
`;
// Dates are automatically formatted
const now = new Date();
const yesterday = new Date(Date.now() - 86400000);
await sql`
SELECT * FROM events
WHERE created_at BETWEEN ${yesterday} AND ${now}
`;How postgres.js infers and validates types during query execution.
Usage Examples:
// Type inference for arrays
const mixedArray = [1, "2", 3]; // Will be treated as TEXT[] (most general type)
const numberArray = [1, 2, 3]; // Will be treated as INTEGER[]
const stringArray = ["a", "b"]; // Will be treated as TEXT[]
// Null handling
const nullableValue = maybeValue ?? null;
await sql`
INSERT INTO users (optional_field)
VALUES (${nullableValue})
`; // null is properly handled
// Undefined handling (transforms to null by default)
const result = await sql`
INSERT INTO users (name, email)
VALUES (${name}, ${email || undefined})
`; // undefined becomes null
// Buffer handling for binary data
const binaryData = Buffer.from("Hello, World!", "utf8");
await sql`
INSERT INTO files (content)
VALUES (${binaryData})
`; // Automatically converts to BYTEAConfigure custom transformations for columns, values, and rows.
interface TransformOptions {
undefined?: any; // Value to use for undefined
column?: ColumnTransform;
value?: ValueTransform;
row?: RowTransform;
}
interface ColumnTransform {
from?: (column: string) => string; // Transform result column names
to?: (column: string) => string; // Transform input column names
}
interface ValueTransform {
from?: (value: any, column?: Column) => any; // Transform result values
to?: (value: any) => any; // Transform input values (unused)
}
interface RowTransform {
from?: (row: Row) => any; // Transform entire result rows
to?: (row: Row) => any; // Transform input rows (unused)
}Usage Examples:
// Snake case to camel case transformation
const sql = postgres({
transform: {
column: {
from: postgres.toCamel, // Convert snake_case columns to camelCase
to: postgres.fromCamel // Convert camelCase to snake_case for queries
},
undefined: null // Convert undefined to null
}
});
// Now columns are automatically transformed
const users = await sql`SELECT user_id, first_name, last_name FROM users`;
// Result objects will have: userId, firstName, lastName
// Use camelCase in dynamic queries
const columnName = "firstName"; // Will be converted to first_name
await sql`SELECT ${sql(columnName)} FROM users`;
// Custom value transformation
const sql = postgres({
transform: {
value: {
from: (value, column) => {
// Custom date parsing for specific columns
if (column?.name.endsWith('_date') && typeof value === 'string') {
return new Date(value);
}
return value;
}
}
}
});Built-in utilities for converting between naming conventions.
// Standalone functions
function toPascal(str: string): string;
function fromPascal(str: string): string;
function toCamel(str: string): string;
function fromCamel(str: string): string;
function toKebab(str: string): string;
function fromKebab(str: string): string;
// Namespace objects with transformers
const pascal: {
column: { from: (str: string) => string; to: (str: string) => string };
value: { from: (str: unknown, column: Column) => string };
};
const camel: {
column: { from: (str: string) => string; to: (str: string) => string };
value: { from: (str: unknown, column: Column) => string };
};
const kebab: {
column: { from: (str: string) => string; to: (str: string) => string };
value: { from: (str: unknown, column: Column) => string };
};Usage Examples:
// Manual case conversion
const snakeCase = "user_profile_data";
const camelCase = postgres.toCamel(snakeCase); // "userProfileData"
const pascalCase = postgres.toPascal(snakeCase); // "UserProfileData"
const kebabCase = postgres.toKebab(snakeCase); // "user-profile-data"
// Using namespace transformers
const sql = postgres({
transform: {
column: postgres.camel.column, // Uses both from and to transformers
value: postgres.camel.value
}
});
// Or individual transformers
const sql = postgres({
transform: {
column: {
from: postgres.toCamel, // snake_case → camelCase for results
to: postgres.fromCamel // camelCase → snake_case for queries
}
}
});Install with Tessl CLI
npx tessl i tessl/npm-postgres