or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

docs

bigquery-client.mdclickhouse-client.mdcore-types.mdcredentials.mddatabricks-client.mdfactory-functions.mdindex.mdpostgres-client.mdredshift-client.mdsnowflake-client.mdsql-builders.mdssh-tunnel.mdtrino-client.md
tile.json

credentials.mddocs/

Credentials Reference

All credential types for warehouse connections. All credential types are defined in and imported from @lightdash/common (not @lightdash/warehouses).

TypeScript/ESM Import Pattern:

// All credential types imported from @lightdash/common
import {
  type CreateBigqueryCredentials,
  type CreateClickhouseCredentials,
  type CreateDatabricksCredentials,
  type CreatePostgresCredentials,
  type CreateRedshiftCredentials,
  type CreateSnowflakeCredentials,
  type CreateTrinoCredentials,
  type CreateWarehouseCredentials,  // Union of all types
  WarehouseTypes,  // Enum for type discriminator
  WeekDay,         // Enum for startOfWeek configuration
} from '@lightdash/common';

CommonJS Import Pattern:

// All credential types imported from @lightdash/common
const {
  WarehouseTypes,
  WeekDay,
} = require('@lightdash/common');

/**
 * @typedef {import('@lightdash/common').CreateBigqueryCredentials} CreateBigqueryCredentials
 * @typedef {import('@lightdash/common').CreatePostgresCredentials} CreatePostgresCredentials
 * @typedef {import('@lightdash/common').CreateWarehouseCredentials} CreateWarehouseCredentials
 */

BigQuery

TypeScript/ESM:

// Import from @lightdash/common (NOT @lightdash/warehouses)
import { type CreateBigqueryCredentials, WarehouseTypes, WeekDay } from '@lightdash/common';

const credentials: CreateBigqueryCredentials = {
  type: 'bigquery', // or WarehouseTypes.BIGQUERY
  project: 'my-project-id',
  dataset: 'my_dataset',
  location: 'US', // or 'EU', 'us-west1', etc.
  keyfileContents: {
    type: 'service_account',
    project_id: 'my-project-id',
    private_key: '-----BEGIN PRIVATE KEY-----\n...',
    client_email: 'service@project.iam.gserviceaccount.com',
  },

  // Optional
  timeoutSeconds: 300,
  priority: 'interactive', // or 'batch'
  maximumBytesBilled: 1000000000,
  retries: 3,
  executionProject: 'execution-project-id',
  startOfWeek: WeekDay.MONDAY, // or null
};

ClickHouse

import { type CreateClickhouseCredentials, WarehouseTypes } from '@lightdash/common';

const credentials: CreateClickhouseCredentials = {
  type: 'clickhouse', // or WarehouseTypes.CLICKHOUSE
  host: 'localhost',
  port: 8123, // HTTP port
  user: 'default',
  password: 'mypassword',
  schema: 'analytics', // Database name in ClickHouse

  // Optional
  secure: true, // Use HTTPS
  timeoutSeconds: 30,
  startOfWeek: WeekDay.MONDAY,
};

Databricks

import { type CreateDatabricksCredentials, WarehouseTypes } from '@lightdash/common';

// With Personal Access Token
const credentials: CreateDatabricksCredentials = {
  type: 'databricks', // or WarehouseTypes.DATABRICKS
  serverHostName: 'my-workspace.cloud.databricks.com',
  httpPath: '/sql/1.0/warehouses/abc123',
  database: 'analytics', // Schema name
  catalog: 'production', // Optional Unity Catalog
  personalAccessToken: 'dapi1234567890abcdef',

  // Optional
  startOfWeek: WeekDay.MONDAY,
};

// With OAuth M2M (Machine-to-Machine)
const m2mCredentials: CreateDatabricksCredentials = {
  type: 'databricks',
  serverHostName: 'my-workspace.cloud.databricks.com',
  httpPath: '/sql/1.0/warehouses/abc123',
  database: 'analytics',
  catalog: 'production',
  oauthClientId: 'client-id',
  oauthClientSecret: 'client-secret',
  authenticationType: 'oauth_m2m',
  startOfWeek: null,
};

// With OAuth U2M (User-to-Machine)
const u2mCredentials: CreateDatabricksCredentials = {
  type: 'databricks',
  serverHostName: 'my-workspace.cloud.databricks.com',
  httpPath: '/sql/1.0/warehouses/abc123',
  database: 'analytics',
  oauthClientId: 'client-id',
  refreshToken: 'refresh-token',
  authenticationType: 'oauth_u2m',
  startOfWeek: null,
};

PostgreSQL

TypeScript/ESM:

// Import from @lightdash/common (NOT @lightdash/warehouses)
import { type CreatePostgresCredentials, WarehouseTypes, WeekDay } from '@lightdash/common';

const credentials: CreatePostgresCredentials = {
  type: 'postgres', // or WarehouseTypes.POSTGRES
  host: 'localhost',
  port: 5432,
  user: 'myuser',
  password: 'mypassword',
  dbname: 'analytics',
  schema: 'public',
  sslmode: 'disable', // or 'require', 'verify-full', etc.

  // Optional
  timeoutSeconds: 300,
  keepalivesIdle: 30,
  searchPath: 'public,other_schema',
  role: 'analyst',
  startOfWeek: WeekDay.MONDAY,

  // SSH Tunnel (optional)
  useSshTunnel: true,
  sshTunnelHost: 'bastion.example.com',
  sshTunnelPort: 22,
  sshTunnelUser: 'sshuser',
  sshTunnelPrivateKey: '-----BEGIN RSA PRIVATE KEY-----\n...',
};

CommonJS:

// Import from @lightdash/common
const { WarehouseTypes, WeekDay } = require('@lightdash/common');

/** @type {import('@lightdash/common').CreatePostgresCredentials} */
const credentials = {
  type: WarehouseTypes.POSTGRES, // or 'postgres'
  host: 'localhost',
  port: 5432,
  user: 'myuser',
  password: 'mypassword',
  dbname: 'analytics',
  schema: 'public',
  sslmode: 'disable',

  // Optional
  timeoutSeconds: 300,
  keepalivesIdle: 30,
  searchPath: 'public,other_schema',
  role: 'analyst',
  startOfWeek: WeekDay.MONDAY,

  // SSH Tunnel (optional)
  useSshTunnel: true,
  sshTunnelHost: 'bastion.example.com',
  sshTunnelPort: 22,
  sshTunnelUser: 'sshuser',
  sshTunnelPrivateKey: '-----BEGIN RSA PRIVATE KEY-----\n...',
};

SSL Modes: disable, no-verify, allow, prefer, require, verify-ca, verify-full

Redshift

import { type CreateRedshiftCredentials, WarehouseTypes } from '@lightdash/common';

const credentials: CreateRedshiftCredentials = {
  type: 'redshift', // or WarehouseTypes.REDSHIFT
  host: 'my-cluster.abc123.us-east-1.redshift.amazonaws.com',
  port: 5439,
  user: 'admin',
  password: 'mypassword',
  dbname: 'analytics',
  schema: 'public',
  sslmode: 'require',

  // Optional (same as PostgreSQL)
  timeoutSeconds: 300,
  keepalivesIdle: 30,
  ra3Node: true, // Redshift RA3 optimizations
  startOfWeek: WeekDay.MONDAY,

  // SSH Tunnel (optional, same as PostgreSQL)
  useSshTunnel: true,
  sshTunnelHost: 'bastion.example.com',
  sshTunnelPort: 22,
  sshTunnelUser: 'sshuser',
  sshTunnelPrivateKey: '-----BEGIN RSA PRIVATE KEY-----\n...',
};

Snowflake

import { type CreateSnowflakeCredentials, WarehouseTypes } from '@lightdash/common';

// With Password
const credentials: CreateSnowflakeCredentials = {
  type: 'snowflake', // or WarehouseTypes.SNOWFLAKE
  account: 'my-account', // May include region: 'my-account.us-east-1'
  user: 'myuser',
  password: 'mypassword',
  database: 'ANALYTICS',
  warehouse: 'COMPUTE_WH',
  schema: 'PUBLIC',

  // Optional
  role: 'ANALYST',
  clientSessionKeepAlive: true,
  queryTag: 'lightdash',
  quotedIdentifiersIgnoreCase: false,
  startOfWeek: WeekDay.MONDAY,
};

// With Key Pair
const keyPairCredentials: CreateSnowflakeCredentials = {
  type: 'snowflake',
  account: 'my-account',
  user: 'myuser',
  database: 'ANALYTICS',
  warehouse: 'COMPUTE_WH',
  schema: 'PUBLIC',
  privateKey: '-----BEGIN PRIVATE KEY-----\n...',
  privateKeyPass: 'passphrase', // Optional
  authenticationType: 'private_key',
  startOfWeek: null,
};

// With OAuth SSO
const oauthCredentials: CreateSnowflakeCredentials = {
  type: 'snowflake',
  account: 'my-account',
  user: 'myuser',
  database: 'ANALYTICS',
  warehouse: 'COMPUTE_WH',
  schema: 'PUBLIC',
  token: 'short-lived-access-token',
  refreshToken: 'refresh-token',
  authenticationType: 'sso',
  startOfWeek: null,
};

// With External Browser
const browserCredentials: CreateSnowflakeCredentials = {
  type: 'snowflake',
  account: 'my-account',
  user: 'myuser',
  database: 'ANALYTICS',
  warehouse: 'COMPUTE_WH',
  schema: 'PUBLIC',
  authenticationType: 'external_browser',
  startOfWeek: null,
};

Auth Types: password, private_key, sso, external_browser

Trino

import { type CreateTrinoCredentials, WarehouseTypes } from '@lightdash/common';

const credentials: CreateTrinoCredentials = {
  type: 'trino', // or WarehouseTypes.TRINO
  host: 'trino.example.com',
  port: 8080,
  user: 'analyst',
  password: 'mypassword',
  dbname: 'hive', // Catalog name
  schema: 'default',
  http_scheme: 'https', // or 'http'

  // Optional
  source: 'lightdash', // Query source identifier
  startOfWeek: WeekDay.MONDAY,
};

Credential Type Union

import { type CreateWarehouseCredentials } from '@lightdash/common';

// Union of all credential types (for factory function)
type CreateWarehouseCredentials =
  | CreateBigqueryCredentials
  | CreateClickhouseCredentials
  | CreateDatabricksCredentials
  | CreatePostgresCredentials
  | CreateRedshiftCredentials
  | CreateSnowflakeCredentials
  | CreateTrinoCredentials;

Common Fields

All credential types have:

  • type: Warehouse type discriminator (required)
  • startOfWeek: Week start day for date calculations (optional)
  • requireUserCredentials: Require user-provided credentials (optional, boolean)

Warehouse Types Enum

import { WarehouseTypes } from '@lightdash/common';

enum WarehouseTypes {
  BIGQUERY = 'bigquery',
  CLICKHOUSE = 'clickhouse',
  DATABRICKS = 'databricks',
  POSTGRES = 'postgres',
  REDSHIFT = 'redshift',
  SNOWFLAKE = 'snowflake',
  TRINO = 'trino',
}

Week Day Enum

import { WeekDay } from '@lightdash/common';

enum WeekDay {
  MONDAY = 0,
  TUESDAY = 1,
  WEDNESDAY = 2,
  THURSDAY = 3,
  FRIDAY = 4,
  SATURDAY = 5,
  SUNDAY = 6,
}