CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-pystan

Python interface to Stan, a package for Bayesian inference

Pending
Overview
Eval results
Files

parameter-operations.mddocs/

Parameter Operations

Advanced functionality for parameter space transformations, log probability calculations, and gradient computations. These operations provide low-level access to Stan's automatic differentiation capabilities and parameter space transformations.

Capabilities

Parameter Transformation

Transform parameters between constrained and unconstrained parameter spaces.

def constrain_pars(self, unconstrained_parameters: Sequence[float], include_tparams: bool = True, include_gqs: bool = True) -> Sequence[float]:
    """
    Transform a sequence of unconstrained parameters to their defined support,
    optionally including transformed parameters and generated quantities.
    
    Args:
        unconstrained_parameters: A sequence of unconstrained parameters
        include_tparams: Boolean to control whether we include transformed parameters. Default: True
        include_gqs: Boolean to control whether we include generated quantities. Default: True
        
    Returns:
        Sequence[float]: A sequence of constrained parameters, optionally including transformed parameters
        
    Notes:
        - The unconstrained parameters are passed to the write_array method of the model_base instance
        - See model_base.hpp in the Stan C++ library for details
        - Parameter order matches constrained_param_names when include options are default
    """

def unconstrain_pars(self, constrained_parameters: Sequence[float]) -> Sequence[float]:
    """
    Transform constrained parameters to unconstrained scale.
    
    Converts parameters from their constrained (natural) scale to the unconstrained
    scale used internally by Stan's samplers. This is the inverse of the
    constraining transformation applied during sampling.
    
    Args:
        constrained_parameters: Sequence of parameter values on constrained scale
        
    Returns:
        Sequence[float]: Parameter values on unconstrained scale
        
    Notes:
        - Input parameter order matches constrained_param_names
        - Output can be used with log_prob and grad_log_prob
        - Transformations handle bounds, simplex, correlation matrices, etc.
    """

Log Probability Evaluation

Calculate log probability density and its gradient at specific parameter values.

def log_prob(self, unconstrained_parameters: Sequence[float], adjust_transform: bool = True) -> float:
    """
    Calculate log probability density.
    
    Evaluates the log probability density of the model at given parameter values.
    This includes the log likelihood and log prior density.
    
    Args:
        unconstrained_parameters: Parameter values on unconstrained scale
        adjust_transform: Whether to include Jacobian adjustment for parameter
                         transformations. Default: True
                         
    Returns:
        float: Log probability density value
        
    Notes:
        - Parameters must be on unconstrained scale (use unconstrain_pars if needed)
        - When adjust_transform=True, includes Jacobian determinant for transformations
        - Used internally by sampling algorithms
        - Useful for model comparison and custom inference algorithms
    """

def grad_log_prob(self, unconstrained_parameters: Sequence[float]) -> Sequence[float]:
    """
    Calculate gradient of log probability density.
    
    Computes the gradient (first derivatives) of the log probability density
    with respect to unconstrained parameters using Stan's automatic differentiation.
    
    Args:
        unconstrained_parameters: Parameter values on unconstrained scale
        
    Returns:
        Sequence[float]: Gradient of log probability density with respect to each parameter
        
    Notes:
        - Parameters must be on unconstrained scale
        - Returns gradient with respect to unconstrained parameters
        - Used by gradient-based sampling algorithms like HMC-NUTS
        - Computed using Stan's reverse-mode automatic differentiation
        - The unconstrained parameters are passed to the log_prob_grad function in stan::model
    """

Usage Examples

Parameter Space Transformations

import stan
import numpy as np

# Model with constrained parameters
program_code = """
parameters {
    real<lower=0> sigma;
    real<lower=-1, upper=1> rho;
    simplex[3] theta;
}
model {
    sigma ~ exponential(1);
    rho ~ normal(0, 0.5);
    theta ~ dirichlet([1, 1, 1]);
}
"""

model = stan.build(program_code)

# Example constrained parameter values
constrained_params = [
    2.0,        # sigma (positive)
    0.3,        # rho (between -1 and 1)
    0.4, 0.3, 0.3  # theta (simplex: sums to 1)
]

# Transform to unconstrained scale
unconstrained_params = model.unconstrain_pars(constrained_params)
print(f"Unconstrained: {unconstrained_params}")

# Transform back to constrained scale
back_to_constrained = model.constrain_pars(unconstrained_params)
print(f"Back to constrained: {back_to_constrained}")

Log Probability Evaluation

import stan
import numpy as np

program_code = """
data {
    int<lower=0> N;
    vector[N] y;
}
parameters {
    real mu;
    real<lower=0> sigma;
}
model {
    mu ~ normal(0, 10);
    sigma ~ exponential(1);
    y ~ normal(mu, sigma);
}
"""

# Generate synthetic data
N = 50
y = np.random.normal(2.0, 1.5, N)
data = {'N': N, 'y': y.tolist()}

model = stan.build(program_code, data=data)

# Evaluate log probability at specific parameter values
constrained_params = [2.0, 1.5]  # mu=2.0, sigma=1.5
unconstrained_params = model.unconstrain_pars(constrained_params)

# Calculate log probability
log_prob = model.log_prob(unconstrained_params)
print(f"Log probability: {log_prob}")

# Calculate log probability without transformation adjustment
log_prob_no_adjust = model.log_prob(unconstrained_params, adjust_transform=False)
print(f"Log probability (no adjustment): {log_prob_no_adjust}")

# Calculate gradient
gradient = model.grad_log_prob(unconstrained_params)
print(f"Gradient: {gradient}")

Custom Optimization

import stan
import numpy as np
from scipy.optimize import minimize

program_code = """
data {
    int<lower=0> N;
    vector[N] x;
    vector[N] y;
}
parameters {
    real alpha;
    real beta;
    real<lower=0> sigma;
}
model {
    alpha ~ normal(0, 10);
    beta ~ normal(0, 10);
    sigma ~ exponential(1);
    y ~ normal(alpha + beta * x, sigma);
}
"""

# Generate regression data
N = 100
x = np.random.normal(0, 1, N)
y = 1.5 + 2.0 * x + np.random.normal(0, 0.8, N)

data = {'N': N, 'x': x.tolist(), 'y': y.tolist()}
model = stan.build(program_code, data=data)

# Define objective function for optimization
def neg_log_prob(unconstrained_params):
    return -model.log_prob(unconstrained_params)

def neg_grad_log_prob(unconstrained_params):
    return -model.grad_log_prob(unconstrained_params)

# Starting point (unconstrained scale)
initial_constrained = [0.0, 0.0, 1.0]  # alpha, beta, sigma
initial_unconstrained = model.unconstrain_pars(initial_constrained)

# Optimize using gradient information
result = minimize(
    neg_log_prob,
    initial_unconstrained,
    jac=neg_grad_log_prob,
    method='BFGS'
)

# Transform result back to constrained scale
optimal_constrained = model.constrain_pars(result.x)
print(f"Optimal parameters: alpha={optimal_constrained[0]:.3f}, "
      f"beta={optimal_constrained[1]:.3f}, sigma={optimal_constrained[2]:.3f}")

Model Comparison

import stan
import numpy as np

# Compare two models using log probability
program_code_1 = """
data {
    int<lower=0> N;
    vector[N] y;
}
parameters {
    real mu;
    real<lower=0> sigma;
}
model {
    mu ~ normal(0, 1);
    sigma ~ exponential(1);
    y ~ normal(mu, sigma);
}
"""

program_code_2 = """
data {
    int<lower=0> N;
    vector[N] y;
}
parameters {
    real<lower=0> lambda;
}
model {
    lambda ~ gamma(2, 1);
    y ~ exponential(lambda);
}
"""

# Data (clearly not exponential)
y = np.random.normal(2.0, 1.0, 100)
data = {'N': len(y), 'y': y.tolist()}

# Build both models
model1 = stan.build(program_code_1, data=data)
model2 = stan.build(program_code_2, data=data)

# Evaluate at reasonable parameter values
params1_const = [2.0, 1.0]  # mu, sigma
params1_uncon = model1.unconstrain_pars(params1_const)
log_prob1 = model1.log_prob(params1_uncon)

params2_const = [0.5]  # lambda
params2_uncon = model2.unconstrain_pars(params2_const)
log_prob2 = model2.log_prob(params2_uncon)

print(f"Normal model log probability: {log_prob1:.2f}")
print(f"Exponential model log probability: {log_prob2:.2f}")
print(f"Normal model fits better: {log_prob1 > log_prob2}")

Install with Tessl CLI

npx tessl i tessl/pypi-pystan

docs

bayesian-sampling.md

index.md

model-building.md

parameter-operations.md

results-analysis.md

tile.json