Shared TypeScript library for the Lightdash platform containing common types, utilities, and business logic for analytics workflows
Overall
score
72%
Evaluation — 72%
↑ 1.09xAgent success when using this tile
This document contains types for project configuration, warehouse connections, warehouse client interfaces, and query result types.
enum ProjectType {
DEFAULT = 'DEFAULT',
PREVIEW = 'PREVIEW',
}
interface Project {
organizationUuid: string;
projectUuid: string;
name: string;
type: ProjectType;
dbtConnection: DbtProjectConfig;
warehouseConnection?: CreateWarehouseCredentials;
dbtVersion?: SupportedDbtVersions;
pinnedListUuid?: string;
upstreamProjectUuid?: string | null;
}
interface ProjectSummary {
organizationUuid: string;
projectUuid: string;
name: string;
type: ProjectType;
dbtConnection: Pick<DbtProjectConfig, 'type'>;
warehouseConnection?: Pick<WarehouseCredentials, 'type'>;
}
enum DbtProjectType {
DBT = 'dbt',
GITHUB = 'github',
GITLAB = 'gitlab',
BITBUCKET = 'bitbucket',
AZURE_DEVOPS = 'azure_devops',
DBT_CLOUD_IDE = 'dbt_cloud_ide',
NONE = 'none',
MANIFEST = 'manifest',
}
type DbtProjectConfig =
| DbtLocalProjectConfig
| DbtGithubProjectConfig
| DbtGitlabProjectConfig
| DbtBitBucketProjectConfig
| DbtAzureDevOpsProjectConfig
| DbtCloudIDEProjectConfig
| DbtNoneProjectConfig;
enum SupportedDbtVersions {
V1_4 = 'v1_4',
V1_5 = 'v1_5',
V1_6 = 'v1_6',
V1_7 = 'v1_7',
V1_8 = 'v1_8',
V1_9 = 'v1_9',
V1_10 = 'v1_10',
}
const DefaultSupportedDbtVersion: SupportedDbtVersions;
function isGitProjectType(type: DbtProjectType): boolean;
function isDbtVersion110OrHigher(dbtVersion: SupportedDbtVersions): boolean;
function getLatestSupportDbtVersion(): SupportedDbtVersions;enum WarehouseTypes {
BIGQUERY = 'bigquery',
POSTGRES = 'postgres',
REDSHIFT = 'redshift',
SNOWFLAKE = 'snowflake',
DATABRICKS = 'databricks',
TRINO = 'trino',
CLICKHOUSE = 'clickhouse',
}
enum BigqueryAuthenticationType {
SSO = 'sso',
PRIVATE_KEY = 'private_key',
ADC = 'adc',
}
enum DatabricksAuthenticationType {
PERSONAL_ACCESS_TOKEN = 'personal_access_token',
OAUTH_M2M = 'oauth_m2m',
OAUTH_U2M = 'oauth_u2m',
}
enum SnowflakeAuthenticationType {
PASSWORD = 'password',
PRIVATE_KEY = 'private_key',
SSO = 'sso',
EXTERNAL_BROWSER = 'external_browser',
}
type WarehouseCredentials =
| BigqueryCredentials
| PostgresCredentials
| RedshiftCredentials
| SnowflakeCredentials
| DatabricksCredentials
| TrinoCredentials
// ... and 17 more credential types
type CreateWarehouseCredentials = WarehouseCredentials & {
// Includes sensitive fields like passwords
};
interface SshTunnelConfiguration {
useSshTunnel?: boolean;
sshTunnelHost?: string;
sshTunnelPort?: number;
sshTunnelUser?: string;
sshTunnelPublicKey?: string;
}
interface SslConfiguration {
sslmode?: string;
sslcertFileName?: string;
sslcert?: string | null;
sslkeyFileName?: string;
sslkey?: string | null;
sslrootcertFileName?: string;
sslrootcert?: string | null;
}
const sensitiveCredentialsFieldNames: string[];
function mergeWarehouseCredentials(
target: WarehouseCredentials,
update: Partial<CreateWarehouseCredentials>
): CreateWarehouseCredentials;interface WarehouseSqlBuilder {
getSqlForMetricQuery(
explore: Explore,
metricQuery: CompiledMetricQuery,
warehouseClient?: WarehouseClient
): string;
getFieldQuoteChar(): string;
getStringQuoteChar(): string;
getEscapeStringQuoteChar(): string;
getFloatingType(): string;
concatString(...args: string[]): string;
escapeString(value: string): string;
getAdapterType(): SupportedDbtAdapter;
getStartOfWeek(): WeekDay | null | undefined;
}
interface WarehouseClient extends WarehouseSqlBuilder {
credentials: WarehouseCredentials;
test(): Promise<void>;
getCatalog(
config: Pick<WarehouseCatalog, 'database' | 'schema' | 'table'>
): Promise<WarehouseCatalog>;
streamQuery(
sql: string,
streamCallback: (data: WarehouseResults) => void,
options?: {
tags?: RunQueryTags;
timezone?: string;
values?: any[];
}
): Promise<void>;
runQuery(
sql: string,
tags?: RunQueryTags,
options?: {
timezone?: string;
values?: any[];
}
): Promise<WarehouseResults>;
}
interface WarehouseResults {
fields: Record<string, { type: DimensionType }>;
rows: Record<string, unknown>[];
}
interface IWarehouseQueryMetadata {
type: WarehouseTypes;
}
interface BigQueryWarehouseQueryMetadata extends IWarehouseQueryMetadata {
type: WarehouseTypes.BIGQUERY;
jobLocation: string;
}
type WarehouseQueryMetadata = BigQueryWarehouseQueryMetadata;
interface RunQueryTags {
project_uuid?: string;
organization_uuid?: string;
user_uuid?: string;
query_type?: string;
}type SqlResultsRow = { [columnName: string]: unknown };
type SqlResultsField = {
name: string;
type: string;
};
type SqlQueryResults = {
fields: SqlResultsField[];
rows: SqlResultsRow[];
};These types represent the structure of raw SQL query results from warehouse clients.
type ResultValue = {
raw: unknown;
formatted: string;
};
type ResultRow = Record<string, { value: ResultValue }>;
type RawResultRow = Record<string, unknown>;
type PopColumnMetadata = {
baseFieldId: string;
};
type ResultColumn = {
reference: string;
type: DimensionType;
popMetadata?: PopColumnMetadata;
};
type ResultColumns = Record<string, ResultColumn>;
function isResultValue(value: unknown): value is { value: ResultValue };
function isRawResultRow(value: unknown): value is unknown;
function convertItemTypeToDimensionType(item: Item): DimensionType;These types represent formatted query results returned by Lightdash queries.
ResultValue: Contains both raw and formatted representations of a valueResultRow: A row of results with formatted valuesRawResultRow: A row of raw unformatted resultsPopColumnMetadata: Metadata for period-over-period comparison columnsResultColumn: Column metadata including type and optional PoP informationisResultValue(): Type guard for checking if a value is a ResultValueisRawResultRow(): Type guard for checking if a value is a RawResultValueconvertItemTypeToDimensionType(): Converts any item type to a DimensionTypeUser-specific warehouse authentication credentials that allow individual users to connect to warehouses with their own credentials instead of using project-level credentials.
/**
* User warehouse credentials without sensitive data
*/
type UserWarehouseCredentials = {
/** Unique identifier for the credentials */
uuid: string;
/** UUID of the user who owns these credentials */
userUuid: string;
/** Display name for these credentials */
name: string;
/** When the credentials were created */
createdAt: Date;
/** When the credentials were last updated */
updatedAt: Date;
/** Warehouse-specific credentials (without secrets) */
credentials:
| Pick<CreateRedshiftCredentials, 'type' | 'user'>
| Pick<CreatePostgresCredentials, 'type' | 'user'>
| Pick<CreateSnowflakeCredentials, 'type' | 'user'>
| Pick<CreateTrinoCredentials, 'type' | 'user'>
| Pick<CreateClickhouseCredentials, 'type' | 'user'>
| Pick<CreateBigqueryCredentials, 'type'>
| Pick<CreateDatabricksCredentials, 'type'>;
};
/**
* User warehouse credentials including sensitive authentication data
*/
type UserWarehouseCredentialsWithSecrets = Pick<UserWarehouseCredentials, 'uuid'> & {
/** Warehouse-specific credentials including secrets */
credentials:
| Pick<CreateRedshiftCredentials, 'type' | 'user' | 'password'>
| Pick<CreatePostgresCredentials, 'type' | 'user' | 'password'>
| Pick<
CreateSnowflakeCredentials,
'type' | 'user' | 'password' | 'authenticationType' | 'token'
>
| Pick<CreateTrinoCredentials, 'type' | 'user' | 'password'>
| Pick<CreateClickhouseCredentials, 'type' | 'user' | 'password'>
| Pick<
CreateBigqueryCredentials,
'type' | 'keyfileContents' | 'authenticationType'
>
| (Pick<
CreateDatabricksCredentials,
| 'type'
| 'personalAccessToken'
| 'authenticationType'
| 'refreshToken'
| 'token'
> &
Partial<
Pick<
CreateDatabricksCredentials,
'database' | 'serverHostName' | 'httpPath'
>
>);
};
/**
* Payload for creating or updating user warehouse credentials
*/
type UpsertUserWarehouseCredentials = {
/** Display name for the credentials */
name: string;
/** Warehouse-specific credentials including secrets */
credentials: UserWarehouseCredentialsWithSecrets['credentials'];
};Usage Example:
import {
type UserWarehouseCredentials,
type UpsertUserWarehouseCredentials,
WarehouseTypes
} from '@lightdash/common';
// Create user-specific warehouse credentials
const newCredentials: UpsertUserWarehouseCredentials = {
name: 'My Personal Snowflake Connection',
credentials: {
type: WarehouseTypes.SNOWFLAKE,
user: 'john.doe@company.com',
password: 'my-secure-password',
authenticationType: SnowflakeAuthenticationType.PASSWORD
}
};
// Store credentials (API returns without secrets)
const stored: UserWarehouseCredentials = await api.createUserWarehouseCredentials(
newCredentials
);Comprehensive types for DBT manifest parsing, model configuration, and Lightdash-specific extensions.
enum SupportedDbtAdapter {
BIGQUERY = 'bigquery',
DATABRICKS = 'databricks',
SNOWFLAKE = 'snowflake',
REDSHIFT = 'redshift',
POSTGRES = 'postgres',
TRINO = 'trino',
CLICKHOUSE = 'clickhouse',
}Note on WarehouseTypes vs SupportedDbtAdapter:
Both enums define the same set of warehouse values (bigquery, postgres, redshift, snowflake, databricks, trino, clickhouse), but serve different purposes:
WarehouseTypes (see above): Used for warehouse credential/connection types. It's the discriminator field in credential type definitions (e.g., CreateBigqueryCredentials.type: WarehouseTypes.BIGQUERY).
SupportedDbtAdapter: Used in the dbt compilation layer. Represents which dbt adapter parses dbt models and handles database-specific SQL compilation. Used in Explore.targetDatabase, DbtManifestMetadata.adapter_type, and adapter-specific normalization functions.
The two enums are parallel type systems that must stay synchronized to ensure correct warehouse types flow from credentials through compilation to query execution.
enum DbtManifestVersion {
V7 = 'v7',
V8 = 'v8',
V9 = 'v9',
V10 = 'v10',
V11 = 'v11',
V12 = 'v12',
V20 = 'v20', // DBT Fusion
}
function getDbtManifestVersion(manifest: DbtManifest): DbtManifestVersion;
function getLatestSupportedDbtManifestVersion(): DbtManifestVersion;
function isSupportedDbtAdapter(x: DbtRawManifestMetadata): x is DbtManifestMetadata;
function isSupportedDbtAdapterType(x: string): x is SupportedDbtAdapter;interface DbtManifest {
nodes: Record<string, DbtNode>;
metadata: DbtRawManifestMetadata;
metrics: Record<string, DbtMetric>;
docs: Record<string, DbtDoc>;
sources?: Record<string, ParsedSourceDefinition>;
macros?: Record<string, ParsedMacro>;
exposures?: Record<string, ParsedExposure>;
parent_map?: Record<string, string[]>;
child_map?: Record<string, string[]>;
}
interface DbtRawManifestMetadata {
dbt_schema_version: string;
generated_at: string;
adapter_type: string;
}
interface DbtManifestMetadata extends DbtRawManifestMetadata {
adapter_type: SupportedDbtAdapter; // Validated
}
function getModelsFromManifest(manifest: DbtManifest): DbtModelNode[];
function getCompiledModels(
manifestModels: DbtModelNode[],
compiledModelIds?: string[]
): DbtModelNode[];type DbtNode =
| DbtModelNode
| DbtRawModelNode
| CompiledModelNode
| ParsedModelNode
// ... and many more node types (tests, seeds, snapshots, etc.)
interface DbtNodeConfig {
materialized?: string; // 'view', 'table', 'incremental', 'ephemeral'
snowflake_warehouse?: string;
databricks_compute?: string;
[key: string]: unknown;
}
interface DbtRawModelNode {
unique_id: string;
resource_type: 'model';
database?: string | null;
schema: string;
name: string;
alias: string;
columns: Record<string, DbtModelColumn>;
config?: CompiledModelNode['config'] & { meta?: DbtModelMetadata };
meta: DbtModelMetadata;
depends_on?: { nodes?: string[]; macros?: string[] };
description?: string;
tags?: string[];
package_name: string;
path: string;
original_file_path: string;
raw_code: string;
compiled_code?: string;
fqn: string[];
relation_name?: string;
unrendered_config?: { meta?: { joins?: Array<{ join: string }> } };
}
interface DbtModelNode extends DbtRawModelNode {
database: string; // Required after normalization
}
function normaliseModelDatabase(
model: DbtRawModelNode,
targetWarehouse: SupportedDbtAdapter
): DbtModelNode;interface DbtModelColumn {
name: string;
description?: string;
meta?: DbtColumnMetadata;
data_type?: DimensionType;
tags?: string[];
config?: { meta?: DbtColumnMetadata };
}
type DbtColumnMetadata = DbtColumnLightdashConfig;
interface DbtColumnLightdashConfig {
dimension?: DbtColumnLightdashDimension;
metrics?: Record<string, DbtColumnLightdashMetric>;
}
interface DbtColumnLightdashDimension {
name?: string;
label?: string;
type?: DimensionType;
description?: string;
sql?: string;
time_intervals?: boolean | 'default' | 'OFF' | TimeFrames[];
hidden?: boolean;
format?: Format | string;
group_label?: string;
groups?: string[] | string;
colors?: Record<string, string>;
urls?: FieldUrl[];
required_attributes?: Record<string, string | string[]>;
ai_hint?: string | string[];
tags?: string | string[];
}
interface DbtColumnLightdashMetric {
label?: string;
type: MetricType; // Required
description?: string;
sql?: string;
hidden?: boolean;
format?: Format | string;
group_label?: string;
groups?: string[];
urls?: FieldUrl[];
show_underlying_values?: string[];
filters?: { [key: string]: any }[];
percentile?: number;
ai_hint?: string | string[];
tags?: string | string[];
}interface DbtModelMetadata extends DbtModelLightdashConfig {}
interface DbtModelLightdashConfig extends ExploreConfig, SharedDbtModelLightdashConfig {
metrics?: Record<string, DbtModelLightdashMetric>;
sets?: Record<string, FieldSetDefinition>;
order_fields_by?: OrderFieldsByStrategy;
group_label?: string;
sql_filter?: string;
sql_where?: string; // Alias for sql_filter
sql_from?: string;
required_attributes?: Record<string, string | string[]>;
group_details?: Record<string, DbtModelGroup>;
default_time_dimension?: { field: string; interval: TimeFrames };
spotlight?: {
visibility?: 'visible' | 'hidden';
categories?: string[];
};
explores?: Record<string, ExploreConfig & SharedDbtModelLightdashConfig>;
ai_hint?: string | string[];
parameters?: LightdashProjectConfig['parameters'];
primary_key?: string | string[];
}
interface SharedDbtModelLightdashConfig {
default_filters?: RequiredFilter[];
required_filters?: RequiredFilter[]; // Deprecated
}
interface DbtModelGroup {
label: string;
description?: string;
}
interface FieldSetDefinition {
fields: string[];
}
interface DbtModelLightdashMetric extends DbtColumnLightdashMetric {
sql: string; // Required for model-level metrics
}type DbtModelJoinType = 'inner' | 'full' | 'left' | 'right';
interface DbtModelJoin {
join: string; // Name of model to join
sql_on: string; // JOIN ON clause
alias?: string;
label?: string;
type?: DbtModelJoinType; // Default: 'left'
hidden?: boolean;
fields?: string[]; // Limit to specific fields
always?: boolean; // Always include in queries
relationship?: JoinRelationship;
description?: string;
}interface DbtMetric {
fqn: string[];
unique_id: string;
package_name: string;
name: string;
description: string;
label: string;
calculation_method: string;
expression: string;
timestamp: string;
filters: MetricFilter[];
time_grains: string[];
dimensions: string[];
window?: MetricTime;
model?: string;
meta?: Record<string, any> & DbtMetricLightdashMetadata;
refs?: string[][] | V9MetricRef[];
tags?: string[];
}
interface DbtMetricLightdashMetadata {
hidden?: boolean;
group_label?: string;
groups?: string[];
show_underlying_values?: string[];
filters: Record<string, any>[];
}
type V9MetricRef = {
name: string;
package?: string | null;
version?: string | number | null;
};
function convertModelMetric(args: {
modelName: string;
metricName: string;
metric: DbtModelLightdashMetric;
// ... other args
}): Metric;
function convertColumnMetric(args: {
columnName: string;
metricName: string;
metric: DbtColumnLightdashMetric;
// ... other args
}): Metric;interface ParsedSourceDefinition {
fqn: string[];
database?: string | null;
schema: string;
unique_id: string;
package_name: string;
name: string;
source_name: string;
identifier: string;
resource_type: 'source';
description?: string;
columns?: Record<string, ColumnInfo>;
meta?: Record<string, unknown>;
tags?: string[];
loaded_at_field?: string;
freshness?: FreshnessThreshold;
}
interface DbtDoc {
unique_id: string;
name: string;
block_contents: string; // Markdown content
}
interface FreshnessThreshold {
warn_after?: { count?: number; period?: 'minute' | 'hour' | 'day' };
error_after?: { count?: number; period?: 'minute' | 'hour' | 'day' };
}enum DbtExposureType {
DASHBOARD = 'dashboard',
NOTEBOOK = 'notebook',
ANALYSIS = 'analysis',
ML = 'ml',
APPLICATION = 'application',
}
interface DbtExposure {
name: string;
owner: { name: string; email: string };
type: DbtExposureType;
dependsOn: string[]; // ref() expressions
label?: string;
description?: string;
url?: string;
tags?: string[];
}interface DbtCatalogNode {
metadata: DbtCatalogNodeMetadata;
columns: Record<string, DbtCatalogNodeColumn>;
}
interface DbtCatalogNodeMetadata {
type: string; // 'table', 'view', etc.
database: string | null;
schema: string;
name: string;
comment?: string;
owner?: string;
}
interface DbtCatalogNodeColumn {
type: string; // Data type
comment?: string;
index: number;
name: string;
}type LineageGraph = Record<string, LineageNodeDependency[]>;
interface LineageNodeDependency {
type: 'model' | 'seed' | 'source';
name: string;
}
function buildModelGraph(
allModels: Pick<DbtModelNode, 'unique_id' | 'name' | 'depends_on'>[]
): DepGraph<LineageNodeDependency>;function convertToGroups(
dbtGroups: string | string[] | undefined,
dbtGroupLabel: string | undefined
): string[];
function convertToAiHints(
aiHint: string | string[] | undefined
): string[] | undefined;
function patchPathParts(patchPath: string): { project: string; path: string };
// Type guards
function isV9MetricRef(x: string[] | V9MetricRef): x is V9MetricRef;
function isDbtRpcManifestResults(
results: Record<string, any>
): results is DbtRpcGetManifestResults;These DBT types enable Lightdash to parse DBT projects, extend them with business intelligence capabilities, and maintain type safety throughout the compilation and query execution process.
Types for SSH key management, used for secure connections to Git repositories and other SSH-enabled services.
/** SSH public/private key pair */
type SshKeyPair = {
/** Private key (PEM format) */
privateKey: string;
/** Public key (OpenSSH format) */
publicKey: string;
};
/** API response containing SSH public key */
type ApiSshKeyPairResponse = {
status: 'ok';
/** Only returns the public key for security */
results: Pick<SshKeyPair, 'publicKey'>;
};Usage Example:
import { type SshKeyPair, type ApiSshKeyPairResponse } from '@lightdash/common';
// API response when generating SSH keys for a project
async function generateSshKeys(projectUuid: string): Promise<string> {
const response: ApiSshKeyPairResponse = await api.post(
`/projects/${projectUuid}/ssh-keys/generate`
);
// Returns only the public key; private key is stored securely on server
return response.results.publicKey;
}Types for BigQuery Single Sign-On authentication and dataset discovery.
/** BigQuery dataset information */
type BigqueryDataset = {
/** Google Cloud project ID */
projectId: string;
/** Dataset location (region), undefined if not specified */
location: string | undefined;
/** Dataset ID */
datasetId: string;
};
/** API response containing available BigQuery datasets */
type ApiBigqueryDatasets = {
status: 'ok';
/** List of accessible datasets */
results: BigqueryDataset[];
};Usage Example:
import { type ApiBigqueryDatasets, type BigqueryDataset } from '@lightdash/common';
// Fetch available BigQuery datasets for a user
async function getBigQueryDatasets(): Promise<BigqueryDataset[]> {
const response: ApiBigqueryDatasets = await api.get('/bigquery/datasets');
// Filter datasets by location
const usDatasets = response.results.filter(
dataset => dataset.location?.startsWith('us-')
);
return usDatasets;
}Types for Git repository integration configuration, used for connecting Lightdash projects to version control systems.
/** Git integration configuration */
type GitIntegrationConfiguration = {
/** Whether Git integration is enabled */
enabled: boolean;
/** GitHub App installation ID (for GitHub integration) */
installationId?: string;
};
/** Git repository information */
type GitRepo = {
/** Repository name */
name: string;
/** Full repository name (e.g., 'lightdash/lightdash') */
fullName: string;
/** Owner username or organization */
ownerLogin: string;
};
/** Pull request creation response */
type PullRequestCreated = {
/** Title of the created pull request */
prTitle: string;
/** URL to the pull request */
prUrl: string;
};Usage Example:
import {
type GitIntegrationConfiguration,
type GitRepo,
type PullRequestCreated
} from '@lightdash/common';
// Get Git configuration for a project
async function getGitConfig(projectUuid: string): Promise<GitIntegrationConfiguration> {
const response = await api.get(`/projects/${projectUuid}/git-integration`);
return response.results;
}
// List available repositories
async function listRepositories(): Promise<GitRepo[]> {
const response = await api.get('/git/repos');
return response.results;
}
// Create a pull request for DBT model changes
async function createModelPR(
projectUuid: string,
modelChanges: any
): Promise<PullRequestCreated> {
const response = await api.post(
`/projects/${projectUuid}/git/pull-request`,
modelChanges
);
return response.results;
}Install with Tessl CLI
npx tessl i tessl/npm-lightdash--commondocs
evals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10
scenario-11
scenario-12
scenario-13
scenario-14
scenario-15
scenario-16
scenario-17
scenario-18
scenario-19
scenario-20