Warehouse-specific SQL builders that generate platform-specific SQL syntax for field quoting, string escaping, metric aggregations, and string concatenation. Each warehouse has a corresponding SQL builder that encapsulates its unique SQL dialect.
Most SQL builders are directly importable from @lightdash/warehouses, but one is only accessible via the factory function:
✅ Directly Importable:
BigquerySqlBuilder - Exported via BigqueryWarehouseClient moduleDatabricksSqlBuilder - Exported via DatabricksWarehouseClient modulePostgresSqlBuilder - Exported via PostgresWarehouseClient moduleRedshiftSqlBuilder - Exported via RedshiftWarehouseClient moduleSnowflakeSqlBuilder - Exported via SnowflakeWarehouseClient moduleTrinoSqlBuilder - Exported via TrinoWarehouseClient module❌ NOT Directly Importable:
ClickhouseSqlBuilder - Only accessible via warehouseSqlBuilderFromType(SupportedDbtAdapter.CLICKHOUSE)// ✅ CORRECT - Use factory function (works for all builders)
import { warehouseSqlBuilderFromType } from '@lightdash/warehouses';
import { SupportedDbtAdapter } from '@lightdash/common';
const clickhouseBuilder = warehouseSqlBuilderFromType(SupportedDbtAdapter.CLICKHOUSE);
const postgresBuilder = warehouseSqlBuilderFromType(SupportedDbtAdapter.POSTGRES);
// ✅ CORRECT - Direct import (except Clickhouse)
import { BigquerySqlBuilder, PostgresSqlBuilder } from '@lightdash/warehouses';
// ❌ WRONG - ClickhouseSqlBuilder is not exported
import { ClickhouseSqlBuilder } from '@lightdash/warehouses'; // Will fail!Create appropriate SQL builder instance based on adapter type.
/**
* Creates a SQL builder instance based on adapter type
* @param adapterType - DBT adapter type string or SupportedDbtAdapter enum value
* @param startOfWeek - Optional week start day (WeekDay enum value, null, or undefined)
* @returns SQL builder instance for the specified adapter type
* @throws {Error} If adapter type is not recognized
*/
function warehouseSqlBuilderFromType(
adapterType: string | SupportedDbtAdapter,
startOfWeek?: WeekDay | null
): BigquerySqlBuilder | ClickhouseSqlBuilder | DatabricksSqlBuilder | PostgresSqlBuilder | RedshiftSqlBuilder | SnowflakeSqlBuilder | TrinoSqlBuilder;Usage Examples:
import { warehouseSqlBuilderFromType } from '@lightdash/warehouses';
import { SupportedDbtAdapter, WeekDay } from '@lightdash/common';
// Create builder by enum
const pgBuilder = warehouseSqlBuilderFromType(SupportedDbtAdapter.POSTGRES);
const bqBuilder = warehouseSqlBuilderFromType(SupportedDbtAdapter.BIGQUERY);
// Create builder with week start configuration
const sfBuilder = warehouseSqlBuilderFromType(
SupportedDbtAdapter.SNOWFLAKE,
WeekDay.MONDAY
);
// Create builder by string
const pgBuilder2 = warehouseSqlBuilderFromType('postgres');
// Use builder
const fieldQuote = pgBuilder.getFieldQuoteChar(); // '"'
const escapedStr = pgBuilder.escapeString("O'Reilly"); // "O''Reilly"All SQL builders implement a common interface with these methods. The base class is not exported, but all builder instances returned by warehouseSqlBuilderFromType() implement these methods.
/**
* Common SQL builder interface
* All warehouse-specific builders implement these methods
* Note: The base class is not exported - use the factory function
* Note: Each concrete builder class also defines a `type` property
* (e.g., `readonly type = WarehouseTypes.BIGQUERY` or `type = WarehouseTypes.POSTGRES`).
* Most implementations use `readonly`, but PostgreSQL and Redshift use a non-readonly `type` property.
* This difference doesn't affect usage - the type property should not be modified in either case.
* This property is not part of the base class interface.
*/
interface WarehouseSqlBuilder {
/**
* Get configured week start day
* @returns WeekDay enum value, null, or undefined
*/
getStartOfWeek(): WeekDay | null | undefined;
/**
* Get DBT adapter type for this warehouse
* @returns SupportedDbtAdapter enum value
*/
getAdapterType(): SupportedDbtAdapter;
/**
* Get character used for quoting field/column names
* Default: double quote (")
* @returns Quote character
*/
getFieldQuoteChar(): string;
/**
* Get character used for quoting string literals
* Default: single quote (')
* @returns Quote character
*/
getStringQuoteChar(): string;
/**
* Get character used for escaping strings
* Default: backslash (\)
* @returns Escape character
*/
getEscapeStringQuoteChar(): string;
/**
* Get floating-point type name for this warehouse
* Default: 'FLOAT'
* @returns Type name
*/
getFloatingType(): string;
/**
* Generate SQL for metric aggregation
* Default: AVG, COUNT, COUNT DISTINCT, SUM, MIN, MAX
* PostgreSQL: AVERAGE casts to DOUBLE PRECISION for accurate results
* Percentile/median must be overridden by subclasses
* @param sql - Column SQL expression
* @param metric - Metric definition (type, sql, percentile)
* @returns SQL string for the metric aggregation
*/
getMetricSql(sql: string, metric: Metric): string;
/**
* Generate SQL for string concatenation
* Default: CONCAT(arg1, arg2, ...)
* @param args - String expressions to concatenate
* @returns SQL concatenation expression
*/
concatString(...args: string[]): string;
/**
* Escape string value for SQL query
* Default: doubles single quotes (O'Reilly -> O''Reilly)
* Also removes SQL comments, escapes backslashes, and removes null bytes
* @param value - Raw string value
* @returns Escaped string safe for SQL query
*/
escapeString(value: string): string;
}Usage Examples:
import { warehouseClientFromCredentials } from '@lightdash/warehouses';
import { SupportedDbtAdapter, MetricType } from '@lightdash/common';
// Use via warehouse client (clients implement WarehouseSqlBuilder interface)
const client = warehouseClientFromCredentials(credentials);
// Get SQL syntax elements
const fieldQuote = client.getFieldQuoteChar();
const quotedField = `${fieldQuote}user_id${fieldQuote}`;
const stringQuote = client.getStringQuoteChar();
const quotedString = `${stringQuote}Hello${stringQuote}`;
// Escape user input
const userInput = "O'Reilly's Book";
const escaped = client.escapeString(userInput);
const safeSql = `SELECT * FROM books WHERE title = '${escaped}'`;
// Generate metric SQL (requires complete Metric object with Field properties)
const sumMetric = client.getMetricSql('revenue', {
fieldType: FieldType.METRIC,
type: MetricType.SUM,
sql: 'revenue',
name: 'total_revenue',
label: 'Total Revenue',
table: 'orders',
tableLabel: 'Orders',
hidden: false,
});
// Result: 'SUM(revenue)'
const avgMetric = client.getMetricSql('price', {
fieldType: FieldType.METRIC,
type: MetricType.AVERAGE,
sql: 'price',
name: 'avg_price',
label: 'Average Price',
table: 'products',
tableLabel: 'Products',
hidden: false,
});
// Result: 'AVG(price)'
// Generate concatenation
const fullName = client.concatString('"first_name"', "' '", '"last_name"');
// PostgreSQL: "first_name" || ' ' || "last_name"
// BigQuery/Snowflake: CONCAT("first_name", ' ', "last_name")Each warehouse has a dedicated SQL builder with platform-specific implementations.
class BigquerySqlBuilder implements WarehouseSqlBuilder {
readonly type = WarehouseTypes.BIGQUERY;
getAdapterType(): SupportedDbtAdapter; // Returns BIGQUERY
getFieldQuoteChar(): string; // Returns '`' (backtick)
getFloatingType(): string; // Returns 'FLOAT64'
getMetricSql(sql: string, metric: Metric): string; // Uses APPROX_QUANTILES for percentile
escapeString(value: string): string; // Backslash escaping for quotes (', "), escapes backslashes, removes comments and null bytes
}BigQuery-Specific Features:
`field_name`// Note: ClickhouseSqlBuilder is NOT exported from index.ts
// Only accessible via warehouseSqlBuilderFromType()
class ClickhouseSqlBuilder implements WarehouseSqlBuilder {
readonly type = WarehouseTypes.CLICKHOUSE;
getAdapterType(): SupportedDbtAdapter; // Returns CLICKHOUSE
getEscapeStringQuoteChar(): string; // Returns "'" (single quote)
getMetricSql(sql: string, metric: Metric): string; // Uses quantile() for percentile, median() for median
escapeString(value: string): string; // Doubles single quotes, escapes backslashes, removes comments and null bytes
}Clickhouse-Specific Features:
`field_name` (inherited from base)quantile(0.95)(column)){paramName: Type} syntax in SQL queriesclass DatabricksSqlBuilder implements WarehouseSqlBuilder {
readonly type = WarehouseTypes.DATABRICKS;
getAdapterType(): SupportedDbtAdapter; // Returns DATABRICKS
getFieldQuoteChar(): string; // Returns '`' (backtick)
getMetricSql(sql: string, metric: Metric): string; // Uses PERCENTILE function
escapeString(value: string): string; // Spark-specific escaping
}Databricks-Specific Features:
`field_name`class PostgresSqlBuilder implements WarehouseSqlBuilder {
type = WarehouseTypes.POSTGRES;
getAdapterType(): SupportedDbtAdapter; // Returns POSTGRES
getEscapeStringQuoteChar(): string; // Returns "'" (single quote)
getMetricSql(sql: string, metric: Metric): string; // AVERAGE casts to DOUBLE PRECISION, uses PERCENTILE_CONT for percentile
concatString(...args: string[]): string; // Uses || operator
escapeString(value: string): string; // Doubles single quotes, escapes backslashes, removes comments and null bytes
}PostgreSQL-Specific Features:
"field_name"AVG(column::DOUBLE PRECISION)(a || b || c)getEscapeStringQuoteChar() returns "'" (single quote), meaning PostgreSQL escapes single quotes by doubling themclass RedshiftSqlBuilder extends PostgresSqlBuilder {
type = WarehouseTypes.REDSHIFT;
getAdapterType(): SupportedDbtAdapter; // Returns REDSHIFT
// Inherits all PostgreSQL methods
}Redshift-Specific Features:
class SnowflakeSqlBuilder implements WarehouseSqlBuilder {
readonly type = WarehouseTypes.SNOWFLAKE;
getAdapterType(): SupportedDbtAdapter; // Returns SNOWFLAKE
getMetricSql(sql: string, metric: Metric): string; // Uses PERCENTILE_CONT for percentile
escapeString(value: string): string; // Doubles single quotes, escapes backslashes, removes comments and null bytes
}Snowflake-Specific Features:
"field_name"class TrinoSqlBuilder implements WarehouseSqlBuilder {
readonly type = WarehouseTypes.TRINO;
getAdapterType(): SupportedDbtAdapter; // Returns TRINO
getEscapeStringQuoteChar(): string; // Returns "'" (single quote)
getFloatingType(): string; // Returns 'DOUBLE'
getMetricSql(sql: string, metric: Metric): string; // Uses APPROX_PERCENTILE for percentile
escapeString(value: string): string; // Doubles single quotes, escapes backslashes, removes comments and null bytes
}Trino-Specific Features:
"field_name"SQL builders generate warehouse-specific SQL for metric aggregations:
Standard metrics use the same SQL across all warehouses:
// SUM
MetricType.SUM -> 'SUM(column)'
// COUNT
MetricType.COUNT -> 'COUNT(column)'
// COUNT DISTINCT
MetricType.COUNT_DISTINCT -> 'COUNT(DISTINCT column)'
// AVERAGE
MetricType.AVERAGE -> 'AVG(column)'
// PostgreSQL: 'AVG(column::DOUBLE PRECISION)' (ensures accurate floating-point results)
// MIN
MetricType.MIN -> 'MIN(column)'
// MAX
MetricType.MAX -> 'MAX(column)'Percentile metrics vary by warehouse:
BigQuery (approximate):
// Median
MetricType.MEDIAN -> 'APPROX_QUANTILES(column, 100)[OFFSET(50)]'
// 95th percentile (percentile values are 0-100, not 0-1)
{ type: MetricType.PERCENTILE, percentile: 95 }
-> 'APPROX_QUANTILES(column, 100)[OFFSET(95)]'Clickhouse:
// Median
MetricType.MEDIAN -> 'median(column)'
// 95th percentile (input as 95, converted to 0.95)
{ type: MetricType.PERCENTILE, percentile: 95 }
-> 'quantile(0.95)(column)'Databricks:
// Median
MetricType.MEDIAN -> 'PERCENTILE(column, 0.5)'
// 95th percentile (input as 95, converted to 0.95)
{ type: MetricType.PERCENTILE, percentile: 95 }
-> 'PERCENTILE(column, 0.95)'PostgreSQL/Snowflake (exact):
// Median
MetricType.MEDIAN -> 'PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY column)'
// 95th percentile (input as 95, converted to 0.95)
{ type: MetricType.PERCENTILE, percentile: 95 }
-> 'PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY column)'Trino (approximate):
// Median
MetricType.MEDIAN -> 'APPROX_PERCENTILE(column, 0.5)'
// 95th percentile (input as 95, converted to 0.95)
{ type: MetricType.PERCENTILE, percentile: 95 }
-> 'APPROX_PERCENTILE(column, 0.95)'Note: Percentile values are specified as 0-100 (e.g., 95 for 95th percentile). The SQL builders automatically convert to 0-1 range (0.95) for warehouses that require it.
SQL builders generate warehouse-specific concatenation syntax:
PostgreSQL/Redshift (|| operator with parentheses):
concatString('"first_name"', "' '", '"last_name"')
-> '("first_name" || \' \' || "last_name")'BigQuery/Databricks/Snowflake/Trino (CONCAT function):
concatString('"first_name"', "' '", '"last_name"')
-> 'CONCAT("first_name", \' \', "last_name")'Different warehouses use different quote characters for field names:
| Warehouse | Quote Char | Example |
|---|---|---|
| BigQuery | ` | `user_id` |
| Databricks | ` | `user_id` |
| PostgreSQL | " | "user_id" |
| Redshift | " | "user_id" |
| Snowflake | " | "user_id" |
| Trino | " | "user_id" |
| Clickhouse | " | "user_id" |
All warehouses use single quotes for string literals, but escaping varies:
PostgreSQL/Redshift/Snowflake/Trino (double single quotes + backslash escaping):
escapeString("O'Reilly") -> "O''Reilly"
escapeString("path\\file") -> "path\\\\file"
// Doubles single quotes, escapes backslashes for LIKE wildcards
// Also removes SQL comments and null bytesBigQuery (backslash escaping + cleanup):
escapeString("O'Reilly /* comment */") -> "O\'Reilly "
// Escapes single and double quotes with backslashes
// Also removes SQL comments, escapes backslashes, and removes null bytesDatabricks (Spark-specific escaping):
// Uses Spark SQL escaping rules with backslash handling
// Removes SQL comments and null bytes/**
* Metric interface used by SQL builders (from @lightdash/common)
*
* IMPORTANT: The full Metric interface extending Field is required when calling getMetricSql().
* While SQL builders only use the `type`, `sql`, and `percentile` properties,
* callers must provide complete Metric objects with all required Field properties.
*
* Properties actively used by SQL builders (marked with *):
* - type* - Determines which aggregation function to use (SUM, AVG, COUNT, etc.)
* - sql* - The column expression to aggregate
* - percentile* - Used for PERCENTILE and MEDIAN metric types
*/
interface Metric extends Field {
/** Always FieldType.METRIC for metrics */
fieldType: FieldType.METRIC;
/** Metric aggregation type - USED BY SQL BUILDERS */
type: MetricType;
/** SQL expression for the metric - USED BY SQL BUILDERS */
sql: string;
/** Optional percentile value (0-100) - USED BY SQL BUILDERS */
percentile?: number;
/** Column IDs to show as underlying values (optional) */
showUnderlyingValues?: string[];
/** Filter rules to apply to the metric (optional) */
filters?: MetricFilterRule[];
/** Custom format options for display (optional) */
formatOptions?: CustomFormat;
/** Reference to dimension this metric is based on (optional) */
dimensionReference?: string;
/** Required attributes for the dimension (optional) */
requiredAttributes?: Record<string, string | string[]>;
/** Default time dimension for the metric (optional) */
defaultTimeDimension?: DefaultTimeDimension;
/** Spotlight configuration (optional) */
spotlight?: {
visibility: 'visible' | 'hidden';
categories?: string[];
};
/** AI hint for metric usage (optional) */
aiHint?: string | string[];
}
/**
* Base Field interface (from @lightdash/common)
* All metrics, dimensions, and table calculations extend this interface
*/
interface Field {
/** Field type discriminator (METRIC, DIMENSION, or TABLE_CALCULATION) */
fieldType: FieldType;
/** Type discriminator string */
type: string;
/** Unique field name within the table */
name: string;
/** Human-readable label */
label: string;
/** Table name this field belongs to */
table: string;
/** Human-readable table label */
tableLabel: string;
/** Templated SQL expression */
sql: string;
/** Optional description */
description?: string;
/** Whether this field is hidden */
hidden: boolean;
/** Optional format specification */
format?: Format | string;
/** Optional field groups */
groups?: string[];
/** Optional field URLs */
urls?: FieldUrl[];
/** Optional index for ordering */
index?: number;
/** Optional tags */
tags?: string[];
// ... plus additional optional fields
}
/**
* Field type enum
* Discriminates between metrics, dimensions, and table calculations
*/
enum FieldType {
METRIC = 'metric',
DIMENSION = 'dimension',
}
/**
* Supported metric aggregation types
*/
enum MetricType {
SUM = 'sum',
COUNT = 'count',
COUNT_DISTINCT = 'count_distinct',
AVERAGE = 'average',
MIN = 'min',
MAX = 'max',
MEDIAN = 'median',
PERCENTILE = 'percentile',
}
/**
* DBT adapter type enum
*/
enum SupportedDbtAdapter {
BIGQUERY = 'bigquery',
DATABRICKS = 'databricks',
POSTGRES = 'postgres',
REDSHIFT = 'redshift',
SNOWFLAKE = 'snowflake',
TRINO = 'trino',
CLICKHOUSE = 'clickhouse',
}
/**
* Week start day configuration
*/
enum WeekDay {
MONDAY = 0,
TUESDAY = 1,
WEDNESDAY = 2,
THURSDAY = 3,
FRIDAY = 4,
SATURDAY = 5,
SUNDAY = 6,
}
/**
* Warehouse type enum
*/
enum WarehouseTypes {
BIGQUERY = 'bigquery',
CLICKHOUSE = 'clickhouse',
DATABRICKS = 'databricks',
POSTGRES = 'postgres',
REDSHIFT = 'redshift',
SNOWFLAKE = 'snowflake',
TRINO = 'trino',
}Use SQL builders to generate warehouse-specific SQL dynamically:
import { warehouseClientFromCredentials } from '@lightdash/warehouses';
const client = warehouseClientFromCredentials(credentials);
// Build dynamic query with proper quoting
const fieldQuote = client.getFieldQuoteChar();
const fields = ['user_id', 'email', 'created_at'];
const quotedFields = fields.map(f => `${fieldQuote}${f}${fieldQuote}`).join(', ');
const sql = `SELECT ${quotedFields} FROM ${fieldQuote}users${fieldQuote}`;
// PostgreSQL: SELECT "user_id", "email", "created_at" FROM "users"
// BigQuery: SELECT `user_id`, `email`, `created_at` FROM `users`
// Escape user input safely
const searchTerm = client.escapeString(userInput);
const whereSql = `WHERE ${fieldQuote}name${fieldQuote} = '${searchTerm}'`;
// Generate aggregation SQL (requires complete Metric objects)
const revenueMetric = client.getMetricSql('revenue', {
fieldType: FieldType.METRIC,
type: MetricType.SUM,
sql: 'revenue',
name: 'total_revenue',
label: 'Total Revenue',
table: 'products',
tableLabel: 'Products',
hidden: false,
});
const medianPriceMetric = client.getMetricSql('price', {
fieldType: FieldType.METRIC,
type: MetricType.MEDIAN,
sql: 'price',
name: 'median_price',
label: 'Median Price',
table: 'products',
tableLabel: 'Products',
hidden: false,
});
const aggregationSql = `
SELECT
${fieldQuote}category${fieldQuote},
${revenueMetric} as total_revenue,
${medianPriceMetric} as median_price
FROM ${fieldQuote}products${fieldQuote}
GROUP BY ${fieldQuote}category${fieldQuote}
`;Configure week start day for date functions:
import { warehouseSqlBuilderFromType } from '@lightdash/warehouses';
import { SupportedDbtAdapter, WeekDay } from '@lightdash/common';
// Monday as week start (ISO standard)
const mondayBuilder = warehouseSqlBuilderFromType(
SupportedDbtAdapter.SNOWFLAKE,
WeekDay.MONDAY
);
// Sunday as week start (US standard)
const sundayBuilder = warehouseSqlBuilderFromType(
SupportedDbtAdapter.SNOWFLAKE,
WeekDay.SUNDAY
);
// Check configuration
console.log(mondayBuilder.getStartOfWeek()); // 0 (Monday)
console.log(sundayBuilder.getStartOfWeek()); // 6 (Sunday)Detect warehouse type from client:
import { SupportedDbtAdapter } from '@lightdash/common';
const client = warehouseClientFromCredentials(credentials);
const adapterType = client.getAdapterType();
switch (adapterType) {
case SupportedDbtAdapter.BIGQUERY:
console.log('Using BigQuery');
break;
case SupportedDbtAdapter.SNOWFLAKE:
console.log('Using Snowflake');
break;
case SupportedDbtAdapter.POSTGRES:
console.log('Using PostgreSQL');
break;
case SupportedDbtAdapter.DATABRICKS:
console.log('Using Databricks');
break;
case SupportedDbtAdapter.REDSHIFT:
console.log('Using Redshift');
break;
case SupportedDbtAdapter.TRINO:
console.log('Using Trino');
break;
}