The compiler system compiles DBT models into Lightdash explores, generates SQL queries, and handles dimension/metric transformations.
The compiler is responsible for:
import {
ExploreCompiler,
type UncompiledExplore,
type Explore,
type WarehouseSqlBuilder,
WeekDay,
} from '@lightdash/common';The main class for compiling explores from DBT models.
class ExploreCompiler {
constructor(warehouseClient: WarehouseSqlBuilder);
compileExplore(uncompiledExplore: UncompiledExplore): Explore;
compileTable(
table: Table,
tables: Record<string, Table>,
availableParameters: string[]
): CompiledTable;
compileDimension(
dimension: Dimension,
tables: Record<string, Table>,
availableParameters: string[]
): CompiledDimension;
compileDimensionSql(
dimension: Dimension,
tables: Record<string, Table>
): { sql: string; tablesReferences: Set<string> };
compileMetric(
metric: Metric,
tables: Record<string, Table>,
availableParameters: string[]
): CompiledMetric;
compileMetricSql(
metric: Metric,
tables: Record<string, Table>,
availableParameters: string[]
): { sql: string; tablesReferences: Set<string> };
compileCustomDimension(
customDimension: CustomDimension,
tables: Record<string, Table>,
availableParameters: string[]
): CompiledCustomDimension;
compileCustomDimensionSql(
dimension: CustomSqlDimension,
tables: Record<string, Table>,
availableParameters: string[]
): Pick<CompiledCustomSqlDimension, 'compiledSql' | 'tablesReferences' | 'parameterReferences'>;
compileDimensionReference(
ref: string,
tables: Record<string, Table>,
currentTable: string
): { sql: string; tablesReferences: Set<string> };
compileMetricReference(
ref: string,
tables: Record<string, Table>,
currentTable: string,
availableParameters: string[]
): { sql: string; tablesReferences: Set<string> };
compileJoin(
join: ExploreJoin,
tables: Record<string, Table>,
availableParameters: string[]
): CompiledExploreJoin;
compileExploreJoinSql(
join: Pick<ExploreJoin, 'sqlOn' | 'table'>,
tables: Record<string, Table>
): { sql: string; tablesReferences: Set<string> };
}Example:
import { ExploreCompiler } from '@lightdash/common';
// Create compiler with warehouse client
const compiler = new ExploreCompiler(warehouseClient);
// Compile an uncompiled explore
const explore = compiler.compileExplore(uncompiledExplore);
// The compiled explore now has:
// - Resolved SQL for all dimensions and metrics
// - Compiled field references
// - Table relationships
console.log(`Compiled explore: ${explore.name}`);
console.log(`Tables: ${Object.keys(explore.tables).join(', ')}`);interface UncompiledExplore {
name: string;
label: string;
tags: string[];
baseTable: string;
groupLabel?: string;
joinedTables: ExploreJoin[];
tables: Record<string, Table>;
targetDatabase: SupportedDbtAdapter;
sqlWhere?: string;
warehouse?: string;
ymlPath?: string;
sqlPath?: string;
joinAliases?: Record<string, Record<string, string>>;
spotlightConfig?: LightdashProjectConfig['spotlight'];
aiHint?: string | string[];
meta: DbtRawModelNode['meta'];
databricksCompute?: string;
projectParameters?: LightdashProjectConfig['parameters'];
}The uncompiled explore structure before SQL compilation. All properties shown without ? are required: name, label, tags, baseTable, joinedTables, tables, targetDatabase, and meta. Other properties are optional and used for additional configuration or metadata.
interface CompilationHistoryReport {
totalExploresCount: number;
successfulExploresCount: number;
errorExploresCount: number;
metricsCount: number;
dimensionsCount: number;
exploresWithErrors: ExploreError[];
baseTableNames: string[];
}
function calculateCompilationReport(args: {
explores: (Explore | ExploreError)[];
}): CompilationHistoryReport;Example:
import { calculateCompilationReport } from '@lightdash/common';
const report = calculateCompilationReport({ explores });
console.log(`Total explores: ${report.totalExploresCount}`);
console.log(`Successful: ${report.successfulExploresCount}`);
console.log(`Failed: ${report.errorExploresCount}`);
console.log(`Total metrics: ${report.metricsCount}`);
console.log(`Total dimensions: ${report.dimensionsCount}`);
console.log(`Base tables: ${report.baseTableNames.join(', ')}`);
if (report.exploresWithErrors.length > 0) {
console.log('Compilation errors:');
report.exploresWithErrors.forEach((error) => {
console.log(` ${error.name}: ${error.errors.length} errors`);
});
}Pattern for matching Lightdash variable references in SQL.
const lightdashVariablePattern: RegExp;
// Matches ${variable} pattern (excludes lightdash. and ld. prefixes)
// Pattern: /\$\{((?!(lightdash|ld)\.)[a-zA-Z0-9_.]+)\}/gUsed internally by the compiler to find and replace variable references like ${table.field} in SQL expressions.
Example:
import { lightdashVariablePattern } from '@lightdash/common';
const sql = 'SELECT ${orders.customer_id}, ${amount} FROM orders';
const matches = sql.match(lightdashVariablePattern);
// Returns: ['${orders.customer_id}', '${amount}']Parses a dimension reference string into a structured reference.
function getParsedReference(
ref: string,
currentTable: string
): Reference;Parameters:
Example:
import { getParsedReference } from '@lightdash/common';
// Relative reference (same table)
const ref1 = getParsedReference('customer_id', 'orders');
// Returns: { refTable: 'orders', refName: 'customer_id' }
// Absolute reference (different table)
const ref2 = getParsedReference('customers.name', 'orders');
// Returns: { refTable: 'customers', refName: 'name' }Extracts all variable references from a SQL expression.
function getAllReferences(raw: string): string[];Finds all ${...} variable references in SQL.
Example:
import { getAllReferences } from '@lightdash/common';
const sql = 'SELECT ${orders.customer_id}, ${customers.name} FROM orders';
const refs = getAllReferences(sql);
// Returns: ['orders.customer_id', 'customers.name']Parses all references in a SQL expression into structured references.
function parseAllReferences(
raw: string,
currentTable: string
): Reference[];Example:
import { parseAllReferences } from '@lightdash/common';
const sql = 'SELECT ${customer_id}, ${customers.name}';
const refs = parseAllReferences(sql, 'orders');
// Returns: [
// { refTable: 'orders', refName: 'customer_id' },
// { refTable: 'customers', refName: 'name' }
// ]Generates SQL for a filter rule from a compiled field.
function renderFilterRuleSqlFromField(
filterRule: FilterRule,
field: CompiledField | CompiledCustomSqlDimension,
fieldQuoteChar: string,
stringQuoteChar: string,
escapeString: (string: string) => string,
startOfWeek: WeekDay | null | undefined,
adapterType: SupportedDbtAdapter,
timezone?: string
): string;Parameters:
` for BigQuery, " for PostgreSQL)')Returns SQL WHERE clause expression for the filter.
Example:
import {
renderFilterRuleSqlFromField,
FilterOperator,
SupportedDbtAdapter,
WeekDay,
} from '@lightdash/common';
const filterRule: FilterRule = {
id: 'f1',
target: { fieldId: 'customers.name' },
operator: FilterOperator.STARTS_WITH,
values: ['John'],
};
// Assuming you have a compiled field from an explore
const compiledField: CompiledField = explore.tables['customers'].dimensions['name'];
const sql = renderFilterRuleSqlFromField(
filterRule,
compiledField,
'"', // PostgreSQL field quote character
"'", // String quote character
(str) => str.replace(/'/g, "''"), // Escape function for PostgreSQL
WeekDay.MONDAY,
SupportedDbtAdapter.POSTGRES,
'UTC'
);
// Returns SQL like: "customers"."name" LIKE 'John%'Renders SQL for string filter operators.
function renderStringFilterSql(
dimensionSql: string,
filter: FilterRule,
stringQuoteChar: string
): string;Example:
import { renderStringFilterSql, FilterOperator } from '@lightdash/common';
const filter: FilterRule = {
id: 'f1',
target: { fieldId: 'name' },
operator: FilterOperator.INCLUDE,
values: ['test'],
};
const sql = renderStringFilterSql('customers.name', filter, "'");
// Returns: "customers.name LIKE '%test%'"Extracts parameter references from SQL.
function getParameterReferences(
sql: string,
regex?: RegExp
): string[];
const parameterRegex: RegExp;Example:
import { getParameterReferences, parameterRegex } from '@lightdash/common';
const sql = 'WHERE date >= ${lightdash.parameters.start_date} AND date <= ${lightdash.parameters.end_date}';
const params = getParameterReferences(sql);
// Returns: ['start_date', 'end_date']
// Using short prefix
const sql2 = 'WHERE amount > ${ld.parameters.threshold}';
const params2 = getParameterReferences(sql2);
// Returns: ['threshold']
// Custom regex
const customSql = 'WHERE date >= {{ start_date }} AND date <= {{ end_date }}';
const customParams = getParameterReferences(customSql, /\{\{([^}]+)\}\}/g);Extracts parameters from both SQL and format strings.
function getParameterReferencesFromSqlAndFormat(
compiledSql: string,
format?: string
): string[];Example:
import { getParameterReferencesFromSqlAndFormat } from '@lightdash/common';
const sql = 'SELECT * WHERE amount > ${lightdash.parameters.threshold}';
const format = 'Amount: ${lightdash.parameters.threshold}';
const params = getParameterReferencesFromSqlAndFormat(sql, format);
// Returns: ['threshold']Validates that all parameter references exist in available parameters.
function validateParameterReferences(
tableName: string,
parameterReferences: string[],
availableParameters: string[]
): void;Throws a CompileError if any parameter references are not available.
Example:
import { validateParameterReferences } from '@lightdash/common';
const paramRefs = ['start_date', 'end_date', 'threshold'];
const available = ['start_date', 'end_date'];
try {
validateParameterReferences('orders', paramRefs, available);
} catch (error) {
console.error('Missing parameter: threshold');
}Gets list of available parameter names from project and explore parameter records.
function getAvailableParameterNames(
projectParameters: Record<string, LightdashProjectParameter> | undefined,
exploreParameters: Record<string, LightdashProjectParameter> | undefined
): string[];Parameters:
Returns an array of parameter names from both project and explore parameters combined.
Example:
import { getAvailableParameterNames } from '@lightdash/common';
const projectParams = {
'start_date': { type: 'date', defaultValue: '2024-01-01' },
'end_date': { type: 'date', defaultValue: '2024-12-31' }
};
const exploreParams = {
'min_revenue': { type: 'number', defaultValue: 1000 }
};
const availableParams = getAvailableParameterNames(
projectParams,
exploreParams
);
// Returns: ['start_date', 'end_date', 'min_revenue']import {
renderFilterRuleSqlFromField,
FilterOperator,
UnitOfTime,
SupportedDbtAdapter,
WeekDay,
type FilterRule,
type CompiledField,
} from '@lightdash/common';
// Helper function to get warehouse-specific parameters
const getWarehouseParams = (adapter: SupportedDbtAdapter) => {
const fieldQuote = adapter === SupportedDbtAdapter.BIGQUERY ? '`' : '"';
const stringQuote = "'";
const escapeString = (str: string) => str.replace(/'/g, "''");
return { fieldQuote, stringQuote, escapeString };
};
// String filter
const nameFilter: FilterRule = {
id: 'f1',
target: { fieldId: 'customers.name' },
operator: FilterOperator.STARTS_WITH,
values: ['A'],
};
const nameField: CompiledField = explore.tables['customers'].dimensions['name'];
const { fieldQuote, stringQuote, escapeString } = getWarehouseParams(SupportedDbtAdapter.POSTGRES);
const nameSql = renderFilterRuleSqlFromField(
nameFilter,
nameField,
fieldQuote,
stringQuote,
escapeString,
WeekDay.MONDAY,
SupportedDbtAdapter.POSTGRES,
'UTC'
);
// Returns SQL with proper quoting: "customers"."name" LIKE 'A%'
// Numeric filter
const amountFilter: FilterRule = {
id: 'f2',
target: { fieldId: 'orders.amount' },
operator: FilterOperator.GREATER_THAN,
values: [100],
};
const amountField: CompiledField = explore.tables['orders'].dimensions['amount'];
const bigqueryParams = getWarehouseParams(SupportedDbtAdapter.BIGQUERY);
const amountSql = renderFilterRuleSqlFromField(
amountFilter,
amountField,
bigqueryParams.fieldQuote,
bigqueryParams.stringQuote,
bigqueryParams.escapeString,
null,
SupportedDbtAdapter.BIGQUERY,
'UTC'
);
// Returns: `orders`.`amount` > 100
// Date filter (relative)
const dateFilter: FilterRule = {
id: 'f3',
target: { fieldId: 'orders.created_at' },
operator: FilterOperator.IN_THE_PAST,
values: [30],
settings: { unitOfTime: UnitOfTime.days },
};
const dateField: CompiledField = explore.tables['orders'].dimensions['created_at'];
const snowflakeParams = getWarehouseParams(SupportedDbtAdapter.SNOWFLAKE);
const dateSql = renderFilterRuleSqlFromField(
dateFilter,
dateField,
snowflakeParams.fieldQuote,
snowflakeParams.stringQuote,
snowflakeParams.escapeString,
WeekDay.MONDAY,
SupportedDbtAdapter.SNOWFLAKE,
'America/New_York'
);
// Returns warehouse-specific SQL for "date in the past 30 days"import {
getParameterReferences,
validateParameterReferences,
getAvailableParameterNames,
} from '@lightdash/common';
const sql = `
SELECT
date,
amount
FROM orders
WHERE
date >= {{ start_date }}
AND date <= {{ end_date }}
AND amount >= {{ min_amount }}
`;
// Extract parameters
const paramRefs = getParameterReferences(sql);
// Returns: ['start_date', 'end_date', 'min_amount']
// Get available parameters
const availableParams = getAvailableParameterNames(
projectParameters,
exploreParameters
);
// Validate
try {
validateParameterReferences('orders', paramRefs, availableParams);
console.log('All parameters are available');
} catch (error) {
console.error('Missing parameters:', error.message);
}import {
getAllReferences,
parseAllReferences,
getParsedReference,
} from '@lightdash/common';
// Metric SQL with references
const metricSql = `
SUM(${orders.amount} * ${products.price})
`;
// Get all reference strings
const refStrings = getAllReferences(metricSql);
// Returns: ['orders.amount', 'products.price']
// Parse into structured references
const refs = parseAllReferences(metricSql, 'orders');
// Returns: [
// { refTable: 'orders', refName: 'amount' },
// { refTable: 'products', refName: 'price' }
// ]
// Parse individual reference
const ref = getParsedReference('customer_id', 'orders');
// Returns: { table: 'orders', field: 'customer_id' }The compiler uses a warehouse SQL builder interface to generate warehouse-specific SQL.
interface WarehouseSqlBuilder {
getSqlForMetricQuery(
explore: Explore,
metricQuery: CompiledMetricQuery,
warehouseClient?: WarehouseClient
): string;
getFieldQuoteChar(): string;
getStringQuoteChar(): string;
getEscapeStringQuoteChar(): string;
getAdapterType(): SupportedDbtAdapter;
getStartOfWeek(): WeekDay | null | undefined;
escapeString(str: string): string;
getMetricSql(sql: string, metric: Metric): string;
}Example:
import { type WarehouseSqlBuilder } from '@lightdash/common';
// Warehouse client implements WarehouseSqlBuilder
const quoteChar = warehouseClient.getFieldQuoteChar();
const stringQuote = warehouseClient.getStringQuoteChar();
// Generate SQL for metric query
const sql = warehouseClient.getSqlForMetricQuery(explore, compiledQuery);
console.log('Generated SQL:', sql);enum SupportedDbtAdapter {
BIGQUERY = 'bigquery',
POSTGRES = 'postgres',
REDSHIFT = 'redshift',
SNOWFLAKE = 'snowflake',
DATABRICKS = 'databricks',
TRINO = 'trino',
// ... and more
}Each adapter has specific SQL generation rules for date functions, string operations, and other operations.
import {
ExploreCompiler,
calculateCompilationReport,
getAllReferences,
validateParameterReferences,
renderFilterRuleSqlFromField,
type UncompiledExplore,
type Explore,
} from '@lightdash/common';
async function compileAndValidateExplore(
uncompiledExplore: UncompiledExplore,
warehouseClient: WarehouseClient
): Promise<Explore> {
// Create compiler
const compiler = new ExploreCompiler(warehouseClient);
// Compile explore
const explore = compiler.compileExplore(uncompiledExplore);
// Validate dimensions
Object.values(explore.tables).forEach(table => {
Object.values(table.dimensions).forEach(dimension => {
const refs = getAllReferences(dimension.compiledSql);
console.log(`Dimension ${dimension.name} references:`, refs);
});
// Validate metrics
Object.values(table.metrics).forEach(metric => {
const refs = getAllReferences(metric.compiledSql);
console.log(`Metric ${metric.name} references:`, refs);
});
});
return explore;
}
// Compile multiple explores and generate report
async function compileProject(
explores: UncompiledExplore[],
warehouseClient: WarehouseClient
) {
const compiler = new ExploreCompiler(warehouseClient);
const compiled = explores.map(exp => {
try {
return compiler.compileExplore(exp);
} catch (error) {
return {
name: exp.name,
errors: [{ type: 'COMPILE_ERROR', message: error.message }],
};
}
});
const report = calculateCompilationReport({ explores: compiled });
console.log('Compilation Report:');
console.log(` Total: ${report.totalExploresCount}`);
console.log(` Success: ${report.successfulExploresCount}`);
console.log(` Failed: ${report.errorExploresCount}`);
return { explores: compiled, report };
}Creates a dimension with a specific time granularity from a base time dimension.
function createDimensionWithGranularity(
dimensionName: string,
baseTimeDimension: CompiledDimension,
explore: Explore,
warehouseSqlBuilder: WarehouseSqlBuilder,
granularity: DateGranularity,
availableParameters: string[]
): CompiledDimension;Parameters:
Returns a compiled dimension with SQL adapted for the specified time granularity.
Example:
import {
createDimensionWithGranularity,
DateGranularity,
} from '@lightdash/common';
// Create a weekly dimension from a timestamp dimension
const weeklyDimension = createDimensionWithGranularity(
'created_at_week',
explore.tables['orders'].dimensions['created_at'],
explore,
warehouseSqlBuilder,
DateGranularity.WEEK,
availableParameters
);Expands field references that include set references (ending with *) and applies exclusions (starting with -).
The filter compiler provides additional functions for generating SQL from filter rules.
Generates SQL for numeric filter operators.
function renderNumberFilterSql(
dimensionSql: string,
filter: FilterRule<FilterOperator, unknown>
): string;Parameters:
Supports operators: equals, notEquals, greaterThan, greaterThanOrEqual, lessThan, lessThanOrEqual, inBetween, notInBetween, isNull, notNull.
Example:
import { renderNumberFilterSql, FilterOperator } from '@lightdash/common';
const filter = {
id: 'f1',
target: { fieldId: 'amount' },
operator: FilterOperator.GREATER_THAN,
values: [100],
};
const sql = renderNumberFilterSql('orders.amount', filter);
// Returns: "(orders.amount) > (100)"Generates SQL for date and timestamp filter operators.
function renderDateFilterSql(
dimensionSql: string,
filter: DateFilterRule,
adapterType: SupportedDbtAdapter,
timezone: string,
dateFormatter?: (date: Date) => string,
startOfWeek?: WeekDay | null | undefined
): string;Parameters:
Supports operators: equals, notEquals, greaterThan, lessThan, inThePast, inTheNext, inTheCurrent, notInTheCurrent, inBetween, isNull, notNull.
Example:
import {
renderDateFilterSql,
FilterOperator,
UnitOfTime,
SupportedDbtAdapter,
WeekDay,
} from '@lightdash/common';
const filter = {
id: 'f1',
target: { fieldId: 'created_at' },
operator: FilterOperator.IN_THE_PAST,
values: [30],
settings: { unitOfTime: UnitOfTime.days },
};
const sql = renderDateFilterSql(
'orders.created_at',
filter,
SupportedDbtAdapter.POSTGRES,
'UTC',
undefined,
WeekDay.MONDAY
);
// Returns SQL for "date in the past 30 days"Generates SQL for boolean filter operators.
function renderBooleanFilterSql(
dimensionSql: string,
filter: FilterRule<FilterOperator, unknown>
): string;Parameters:
Supports operators: equals, notEquals, isNull, notNull.
Example:
import { renderBooleanFilterSql, FilterOperator } from '@lightdash/common';
const filter = {
id: 'f1',
target: { fieldId: 'is_active' },
operator: FilterOperator.EQUALS,
values: [true],
};
const sql = renderBooleanFilterSql('users.is_active', filter);
// Returns: "(users.is_active) = TRUE"Generates SQL for filters on table calculations.
function renderTableCalculationFilterRuleSql(
filterRule: FilterRule<FilterOperator, unknown>,
field: TableCalculation | undefined,
fieldQuoteChar: string,
stringQuoteChar: string,
escapeString: (string: string) => string,
adapterType: SupportedDbtAdapter,
startOfWeek: WeekDay | null | undefined,
timezone?: string
): string;Parameters:
Returns SQL WHERE clause for filtering table calculation results.
General-purpose filter SQL generator for any field type.
function renderFilterRuleSql(
filterRule: FilterRule<FilterOperator, unknown>,
fieldType: DimensionType | MetricType,
fieldSql: string,
stringQuoteChar: string,
escapeString: (string: string) => string,
startOfWeek: WeekDay | null | undefined,
adapterType: SupportedDbtAdapter,
timezone?: string
): string;Parameters:
Routes to the appropriate type-specific filter function based on fieldType.
Example:
import {
renderFilterRuleSql,
DimensionType,
FilterOperator,
SupportedDbtAdapter,
WeekDay,
} from '@lightdash/common';
const filter = {
id: 'f1',
target: { fieldId: 'status' },
operator: FilterOperator.EQUALS,
values: ['active', 'pending'],
};
const sql = renderFilterRuleSql(
filter,
DimensionType.STRING,
'orders.status',
"'",
(str) => str.replace(/'/g, "''"),
WeekDay.MONDAY,
SupportedDbtAdapter.POSTGRES,
'UTC'
);
// Returns: "(orders.status) IN ('active','pending')"Extracts all available parameters from a set of tables.
function getAvailableParametersFromTables(
includedTables: (Table | CompiledTable)[]
): Record<string, LightdashProjectParameter>;Parameters:
Returns a record of parameter names (qualified with table name) to parameter definitions.
Example:
import { getAvailableParametersFromTables } from '@lightdash/common';
const params = getAvailableParametersFromTables(Object.values(explore.tables));
// Returns: { 'orders.date_param': { type: 'date', ... }, ... }Validates that parameter names follow naming conventions.
function validateParameterNames(
parameters: Record<string, LightdashProjectParameter> | undefined
): {
isInvalid: boolean;
invalidParameters: string[];
};Parameters:
Returns validation result with list of invalid parameter names. Valid names contain only letters, numbers, underscores, and hyphens.
Example:
import { validateParameterNames } from '@lightdash/common';
const result = validateParameterNames({
valid_param: { type: 'string' },
'invalid param': { type: 'string' }, // Contains space
});
if (result.isInvalid) {
console.error('Invalid parameters:', result.invalidParameters);
}Functions for managing spotlight visibility and category configuration for explores and metrics.
Gets the spotlight configuration for a resource (explore or metric).
/**
* Get the spotlight configuration for a resource
* @param visibility - The visibility of the resource
* @param categories - The categories of the resource
* @returns The spotlight configuration for the resource
*/
function getSpotlightConfigurationForResource(
visibility?: LightdashProjectConfig['spotlight']['default_visibility'],
categories?: string[]
): Pick<Explore, 'spotlight'> | Pick<Metric, 'spotlight'>;Parameters:
Returns spotlight configuration object with visibility and categories, or empty object if visibility is undefined.
Example:
import { getSpotlightConfigurationForResource } from '@lightdash/common';
// Set resource as public with categories
const config = getSpotlightConfigurationForResource('public', ['sales', 'revenue']);
// Returns: { spotlight: { visibility: 'public', categories: ['sales', 'revenue'] } }
// No visibility specified
const emptyConfig = getSpotlightConfigurationForResource();
// Returns: {}Validates and retrieves categories from a resource, checking against project configuration.
/**
* Get the categories from the resource and validate them against the project config
* @param resourceType - The type of the resource
* @param resourceName - The name of the resource
* @param spotlightConfig - The spotlight config
* @param resourceCategories - The categories from the resource
* @returns The categories from the resource
*/
function getCategoriesFromResource(
resourceType: 'metric' | 'explore',
resourceName: string,
spotlightConfig: LightdashProjectConfig['spotlight'] | undefined,
resourceCategories?: string[]
): string[];Parameters:
Returns validated array of categories. Throws ParseError if any categories are not defined in the project configuration.
Example:
import { getCategoriesFromResource } from '@lightdash/common';
const spotlightConfig = {
categories: {
sales: { label: 'Sales' },
revenue: { label: 'Revenue' },
},
};
// Valid categories
const validCategories = getCategoriesFromResource(
'metric',
'total_revenue',
spotlightConfig,
['sales', 'revenue']
);
// Returns: ['sales', 'revenue']
// Invalid category throws error
try {
getCategoriesFromResource(
'metric',
'total_revenue',
spotlightConfig,
['invalid_category']
);
} catch (error) {
// ParseError: Invalid spotlight categories found in metric 'total_revenue': invalid_category
}Types for defining models in Lightdash YAML format (dbt-free model definitions). These types extend the existing dbt types to enable model definitions without requiring a dbt project.
import {
type LightdashModel,
type LightdashModelDimension,
type LightdashModelAdditionalDimension,
type LightdashModelMetric,
type LightdashModelExplore,
} from '@lightdash/common';Root structure for a Lightdash Model YAML file. Defines a model with dimensions, metrics, and explores without requiring dbt.
type LightdashModel = {
type: 'model' | 'model/v1beta' | 'model/v1';
name: string;
sql_from: string; // SQL query or table reference (e.g., 'schema.table' or 'SELECT ...')
label?: string;
description?: string;
joins?: DbtModelJoin[];
explores?: Record<string, LightdashModelExplore>;
dimensions: LightdashModelDimension[]; // Required array of dimensions
// Inherits other fields from DbtModelLightdashConfig
};Dimension definition in a Lightdash Model. Extends dbt dimension configuration with required fields for standalone model definitions.
type LightdashModelDimension = DbtColumnLightdashDimension & {
name: string; // Required (optional in dbt because it comes from column name)
type: DimensionType; // Required dimension type
sql: string; // Required - custom SQL expression for this dimension
metrics?: Record<string, DbtColumnLightdashMetric>;
additional_dimensions?: Record<string, LightdashModelAdditionalDimension>;
};Additional dimension derived from a base dimension in a Lightdash Model.
type LightdashModelAdditionalDimension = DbtColumnLightdashAdditionalDimension & {
type: DimensionType; // Required in YAML
sql: string; // Required in YAML
};Metric definition in a Lightdash Model. Directly reuses the dbt metric type.
type LightdashModelMetric = DbtColumnLightdashMetric;Explore configuration for a Lightdash Model, defining how the model can be explored with joins and filters.
type LightdashModelExplore = {
label?: string;
description?: string;
group_label?: string;
joins?: DbtModelJoin[]; // Reuses DbtModelJoin
required_filters?: RequiredFilter[];
default_filters?: RequiredFilter[];
/**
* Explore-scoped custom dimensions.
* These dimensions are only available within this specific explore
* and can reference fields from any joined table.
*/
additional_dimensions?: Record<string, DbtExploreLightdashAdditionalDimension>;
};Functions for converting Lightdash YAML models to DBT model format, enabling them to be processed through the standard translation pipeline.
Converts a single Lightdash YAML model to DbtModelNode format.
function convertLightdashModelToDbtModel(
model: LightdashModel
): DbtModelNode;Parameters:
Returns a DbtModelNode that can be processed by the standard translator functions.
Example:
import { convertLightdashModelToDbtModel } from '@lightdash/common';
const lightdashModel = {
name: 'orders',
sql_from: 'SELECT * FROM raw.orders',
dimensions: [
{ name: 'order_id', type: 'string', sql: '${TABLE}.order_id' },
{ name: 'amount', type: 'number', sql: '${TABLE}.amount' },
],
metrics: [
{ name: 'total_revenue', type: 'sum', sql: '${amount}' },
],
};
const dbtModel = convertLightdashModelToDbtModel(lightdashModel);
// Can now be used with convertTable(), convertExplores(), etc.Converts multiple Lightdash models to DbtModelNode format.
function convertLightdashModelsToDbtModels(
models: LightdashModel[]
): DbtModelNode[];Parameters:
Returns array of DbtModelNodes ready for translation.
Example:
import { convertLightdashModelsToDbtModels } from '@lightdash/common';
const dbtModels = convertLightdashModelsToDbtModels([model1, model2, model3]);The translator module converts DBT models into Lightdash explores and tables, handling dimension and metric compilation, time intervals, joins, and type enrichment.
Converts a single DBT model into a Lightdash table with dimensions and metrics.
function convertTable(
adapterType: SupportedDbtAdapter,
model: DbtModelNode,
dbtMetrics: DbtMetric[],
spotlightConfig: LightdashProjectConfig['spotlight'],
startOfWeek?: WeekDay | null,
disableTimestampConversion?: boolean
): Omit<Table, 'lineageGraph'>;Parameters:
Returns a Lightdash table with compiled dimensions and metrics (lineage graph excluded).
Example:
import { convertTable, SupportedDbtAdapter } from '@lightdash/common';
const table = convertTable(
SupportedDbtAdapter.POSTGRES,
dbtModel,
[],
spotlightConfig,
WeekDay.MONDAY
);
console.log(`Table: ${table.name}`);
console.log(`Dimensions: ${Object.keys(table.dimensions).length}`);
console.log(`Metrics: ${Object.keys(table.metrics).length}`);Converts DBT models into fully compiled Lightdash explores with joins.
function convertExplores(
models: DbtModelNode[],
loadSources: boolean,
adapterType: SupportedDbtAdapter,
metrics: DbtMetric[],
warehouseSqlBuilder: WarehouseSqlBuilder,
lightdashProjectConfig: LightdashProjectConfig,
disableTimestampConversion?: boolean
): Promise<(Explore | ExploreError)[]>;Parameters:
Returns a Promise resolving to an array of compiled Explore objects or ExploreError objects for models that failed compilation.
Example:
import { convertExplores } from '@lightdash/common';
const explores = await convertExplores(
dbtModels,
false,
SupportedDbtAdapter.POSTGRES,
[],
warehouseSqlBuilder,
lightdashProjectConfig
);
explores.forEach((explore) => {
if ('errors' in explore) {
console.error(`Failed to compile ${explore.name}:`, explore.errors);
} else {
console.log(`Successfully compiled explore: ${explore.name}`);
}
});Enriches DBT models with actual column types from warehouse catalog.
function attachTypesToModels(
models: DbtModelNode[],
warehouseCatalog: {
[database: string]: {
[schema: string]: {
[table: string]: { [column: string]: DimensionType };
};
};
},
throwOnMissingCatalogEntry?: boolean,
caseSensitiveMatching?: boolean
): DbtModelNode[];Parameters:
Returns DBT models with updated column types from warehouse catalog.
Example:
import { attachTypesToModels } from '@lightdash/common';
const catalog = {
'my_database': {
'public': {
'orders': {
'order_id': DimensionType.STRING,
'amount': DimensionType.NUMBER,
'created_at': DimensionType.TIMESTAMP
}
}
}
};
const enrichedModels = attachTypesToModels(
dbtModels,
catalog,
true,
true
);Extracts database schema structure from DBT models.
function getSchemaStructureFromDbtModels(
dbtModels: DbtModelNode[]
): { database: string; schema: string; table: string }[];Parameters:
Returns array of objects containing database, schema, and table names from each model.
Example:
import { getSchemaStructureFromDbtModels } from '@lightdash/common';
const structure = getSchemaStructureFromDbtModels(dbtModels);
// Returns: [
// { database: 'analytics', schema: 'public', table: 'orders' },
// { database: 'analytics', schema: 'public', table: 'customers' }
// ]Expands field references that include set references (ending with *) and applies exclusions (starting with -).
/**
* Expands field references that include set references (ending with *)
* and applies exclusions (starting with -)
*
* Supports one level of set nesting: a set can reference another set,
* but the referenced set cannot contain set references.
*
* @param fields - Array of field names, set references (ending with *), or exclusions (starting with -)
* @param currentTable - The table containing the sets being expanded
* @returns Expanded field list with exclusions applied and duplicates removed
*/
function expandFieldsWithSets(
fields: string[],
currentTable: Table
): string[];Parameters:
*), or exclusions (starting with -)Returns expanded field list with exclusions applied and duplicates removed.
Features:
*)-)Example:
import { expandFieldsWithSets } from '@lightdash/common';
// Define a table with field sets
const table: Table = {
name: 'orders',
sets: {
'basic': {
fields: ['order_id', 'customer_id', 'amount']
},
'detailed': {
fields: ['basic*', 'created_at', 'status', '-amount'] // Reference basic set, exclude amount
},
'all': {
fields: ['order_id', 'customer_id', 'amount', 'created_at', 'status']
}
},
// ... other table properties
};
// Expand a basic set reference
const basicFields = expandFieldsWithSets(['basic*'], table);
// Returns: ['order_id', 'customer_id', 'amount']
// Expand set with nested reference and exclusion
const detailedFields = expandFieldsWithSets(['detailed*'], table);
// Returns: ['order_id', 'customer_id', 'created_at', 'status']
// (amount excluded by the set definition)
// Mix regular fields with set references and exclusions
const customFields = expandFieldsWithSets(
['all*', '-status', 'updated_at'],
table
);
// Returns: ['order_id', 'customer_id', 'amount', 'created_at', 'updated_at']
// Handle exclusions at top level
const filteredFields = expandFieldsWithSets(
['basic*', 'status', '-customer_id'],
table
);
// Returns: ['order_id', 'amount', 'status']Complete example showing the full translation pipeline from DBT models to compiled explores:
import {
attachTypesToModels,
convertExplores,
getSchemaStructureFromDbtModels,
SupportedDbtAdapter,
} from '@lightdash/common';
async function translateDbtProject(
dbtModels: DbtModelNode[],
warehouseClient: WarehouseClient,
lightdashProjectConfig: LightdashProjectConfig
) {
// Step 1: Get schema structure
const schemaStructure = getSchemaStructureFromDbtModels(dbtModels);
console.log('Schema structure:', schemaStructure);
// Step 2: Fetch warehouse catalog
const catalog = await warehouseClient.getCatalog(schemaStructure);
// Step 3: Attach types from warehouse
const enrichedModels = attachTypesToModels(
dbtModels,
catalog,
true,
true
);
// Step 4: Convert to explores
const explores = await convertExplores(
enrichedModels,
false,
SupportedDbtAdapter.POSTGRES,
[],
warehouseClient,
lightdashProjectConfig
);
// Step 5: Separate successes from errors
const successful = explores.filter((e) => !('errors' in e));
const failed = explores.filter((e) => 'errors' in e);
console.log(`Successfully compiled: ${successful.length} explores`);
console.log(`Failed: ${failed.length} explores`);
return { successful, failed };
}Constants for parameter prefixes used in SQL templates.
enum LightdashParameters {
PREFIX = 'lightdash.parameters',
PREFIX_SHORT = 'ld.parameters',
}Extracts parameter references from SQL strings, finding all ${lightdash.parameters.x} or ${ld.parameters.x} patterns.
/**
* Extract parameter references from SQL or template strings
* @param sql - SQL string to scan for parameter references
* @param regex - Optional regex pattern (defaults to parameterRegex)
* @returns Array of unique parameter names referenced
*
* @example
* getParameterReferences('SELECT * FROM table WHERE status = ${lightdash.parameters.order_status}')
* // Returns: ['order_status']
*/
function getParameterReferences(
sql: string,
regex?: RegExp
): string[];Combines parameter references from both SQL and format strings.
/**
* Extract parameter references from both compiled SQL and format strings
* @param compiledSql - Compiled SQL expression
* @param format - Optional custom format string
* @returns Array of unique parameter names from both sources
*
* @example
* getParameterReferencesFromSqlAndFormat(
* 'SELECT ${ld.parameters.field}',
* '${lightdash.parameters.format_type}'
* )
* // Returns: ['field', 'format_type']
*/
function getParameterReferencesFromSqlAndFormat(
compiledSql: string,
format?: string
): string[];Gets all available parameter names by combining project-level and explore-level parameters.
/**
* Get all available parameter names for a project and explore
* @param projectParameters - Project-level parameters
* @param exploreParameters - Explore-level parameters
* @returns Array of all available parameter names
*/
function getAvailableParameterNames(
projectParameters: Record<string, LightdashProjectParameter> | undefined,
exploreParameters: Record<string, LightdashProjectParameter> | undefined
): string[];Validates that all referenced parameters are available, throwing a CompileError if any are missing.
/**
* Validate that all parameter references exist in available parameters
* @param tableName - Name of table being validated (for error messages)
* @param parameterReferences - Parameter names referenced in SQL
* @param availableParameters - Parameter names that are defined
* @throws CompileError if any referenced parameters are missing
*
* @example
* validateParameterReferences(
* 'orders',
* ['status', 'region'],
* ['status'] // 'region' is missing
* )
* // Throws: CompileError("Failed to compile explore \"orders\". Missing parameters: region")
*/
function validateParameterReferences(
tableName: string,
parameterReferences: string[],
availableParameters: string[]
): void;Regular expression for matching parameter syntax in SQL strings.
/**
* Regex pattern for SQL parameter replacement
* Matches: ${lightdash.parameters.name} or ${ld.parameters.name}
*/
const parameterRegex: RegExp;