CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-pymc3

Probabilistic Programming in Python: Bayesian Modeling and Probabilistic Machine Learning with Theano

68

0.94x
Overview
Eval results
Files

sampling.mddocs/

MCMC Sampling Methods

PyMC3 provides state-of-the-art Markov Chain Monte Carlo (MCMC) algorithms for sampling from posterior distributions. The library includes advanced samplers like the No-U-Turn Sampler (NUTS), various Metropolis methods, and specialized algorithms, with automatic step method assignment and adaptive tuning.

Capabilities

Main Sampling Functions

Core functions for MCMC sampling and posterior analysis.

def sample(draws=1000, step=None, init='auto', n_init=200000, 
           initvals=None, trace=None, chain_idx=0, chains=None, cores=None, 
           tune=1000, progressbar=True, model=None, random_seed=None, 
           discard_tuned_samples=True, compute_convergence_checks=True, 
           callback=None, jitter_max_retries=10, start=None, 
           return_inferencedata=None, idata_kwargs=None, mp_ctx=None, 
           pickle_backend='pickle', **kwargs):
    """
    Draw samples from the posterior distribution using MCMC.
    
    The main interface for MCMC sampling in PyMC3. Automatically selects
    appropriate step methods, performs adaptive tuning, and returns results
    in ArviZ InferenceData format for analysis.
    
    Parameters:
    - draws: int, number of samples to draw from posterior
    - step: step method or list of step methods (auto-assigned if None)
    - init: str or dict, initialization method ('auto', 'adapt_diag', 'jitter+adapt_diag', 'advi', 'advi_map', 'map')
    - n_init: int, number of initialization attempts (default 200000)
    - initvals: PointType or Sequence, initial values for variables
    - trace: Backend, trace backend for storing samples
    - chain_idx: int, chain index for multiprocessing
    - chains: int, number of parallel chains (defaults to number of cores)
    - cores: int, number of CPU cores to use (all available if None)
    - tune: int, number of tuning steps before sampling
    - progressbar: bool, display progress bar
    - model: Model, model to sample from (current context if None)
    - random_seed: int or list, random seeds for chains
    - discard_tuned_samples: bool, exclude tuning samples from results
    - compute_convergence_checks: bool, compute R-hat and effective sample size
    - callback: function, callback function called after each sample
    - jitter_max_retries: int, maximum retries for jittered initialization
    - start: dict, starting values for variables (deprecated, use initvals)
    - return_inferencedata: bool, return ArviZ InferenceData object
    - idata_kwargs: dict, arguments for InferenceData construction
    - mp_ctx: multiprocessing context
    - pickle_backend: str, backend for pickling ('pickle' or 'dill')
    
    Returns:
    - InferenceData or MultiTrace: posterior samples and diagnostics
    """

def sample_posterior_predictive(trace, samples=None, model=None, 
                               var_names=None, size=None, 
                               keep_size=False, random_seed=None, 
                               progressbar=True, return_inferencedata=True, 
                               extend_inferencedata=True, 
                               predictions=False, **kwargs):
    """
    Generate posterior predictive samples from model.
    
    Uses posterior parameter samples to generate predictions from the
    likelihood, enabling model checking and out-of-sample prediction.
    
    Parameters:
    - trace: InferenceData or MultiTrace, posterior samples
    - samples: int, number of posterior predictive samples (all posterior samples if None)
    - model: Model, model for prediction (current context if None)
    - var_names: list, variables to sample (all observed RVs if None)
    - size: int, size of each posterior predictive sample
    - keep_size: bool, preserve original variable sizes
    - random_seed: int, random seed for reproducibility
    - progressbar: bool, display progress bar
    - return_inferencedata: bool, return as InferenceData
    - extend_inferencedata: bool, add to existing InferenceData
    - predictions: bool, sample from out-of-sample predictions
    
    Returns:
    - dict or InferenceData: posterior predictive samples
    """

def sample_posterior_predictive_w(traces, samples=None, models=None, 
                                 weights=None, **kwargs):
    """
    Generate weighted posterior predictive samples from multiple models.
    
    Combines posterior predictive samples from multiple models using
    specified weights, useful for Bayesian model averaging.
    
    Parameters:
    - traces: list, posterior traces from multiple models  
    - samples: int, number of samples per model
    - models: list, models corresponding to traces
    - weights: array, model weights (uniform if None)
    - kwargs: additional arguments for sample_posterior_predictive
    
    Returns:
    - dict: weighted posterior predictive samples
    """

def sample_prior_predictive(samples=500, model=None, var_names=None, 
                           random_seed=None):
    """
    Generate samples from the prior predictive distribution.
    
    Samples from the prior distributions of variables to simulate data
    before observing any evidence, useful for prior predictive checks
    and model validation.
    
    Parameters:
    - samples: int, number of samples from the prior predictive (default 500)
    - model: Model, model to sample from (current context if None)
    - var_names: list, variables to sample (all observed and unobserved RVs if None)
    - random_seed: int, random seed for reproducibility
    
    Returns:
    - dict: prior predictive samples for each variable
    """

def iter_sample(draws, step, start=None, trace=None, chain=0, 
               tune=None, model=None, random_seed=None, callback=None):
    """
    Generator for iterative MCMC sampling.
    
    Provides fine-grained control over sampling process, yielding
    individual samples for custom processing or online analysis.
    
    Parameters:
    - draws: int, number of samples to draw
    - step: step method for sampling
    - start: dict, starting parameter values
    - trace: BaseTrace, trace object to store samples
    - chain: int, chain identifier
    - tune: int, number of tuning steps (0 if None)
    - model: Model, model to sample from
    - random_seed: int, random seed
    - callback: function, called after each sample
    
    Yields:
    - MultiTrace: updated trace after each sample
    """

def stop_tuning(step):
    """
    Stop tuning phase for step methods.
    
    Parameters:
    - step: step method or CompoundStep
    """

Step Method Assignment

Functions for automatic and manual step method assignment.

def assign_step_methods(model, step=None, methods=None, 
                       step_kwargs=None, **kwargs):
    """
    Assign appropriate step methods to model variables.
    
    Automatically selects optimal step methods based on variable properties,
    or uses user-specified methods for custom sampling strategies.
    
    Parameters:
    - model: Model, model to assign step methods for
    - step: step method or list, user-specified step methods
    - methods: list, available step method classes
    - step_kwargs: dict, keyword arguments for step methods
    
    Returns:
    - list: assigned step methods for each variable
    """

def instantiate_steppers(model, steps, selected, step_kwargs=None):
    """
    Create step method instances for assigned variables.
    
    Parameters:
    - model: Model, model context
    - steps: list, step method classes
    - selected: list, variables assigned to each step method
    - step_kwargs: dict, keyword arguments for step methods
    
    Returns:
    - list: instantiated step method objects
    """

Hamiltonian Monte Carlo Methods

Gradient-based MCMC methods that use Hamiltonian dynamics for efficient exploration of parameter space.

class NUTS:
    """
    No-U-Turn Sampler (NUTS) - adaptive Hamiltonian Monte Carlo.
    
    NUTS automatically tunes step size and path length to efficiently
    sample from complex posterior distributions. Default sampler for
    continuous variables in PyMC3.
    """
    
    def __init__(self, vars=None, Emax=1000, target_accept=0.8, 
                 gamma=0.05, k=0.75, t0=10, model=None, 
                 scaling=None, is_cov=False, potential=None, 
                 integrator='leapfrog', **kwargs):
        """
        Initialize NUTS sampler.
        
        Parameters:
        - vars: list, variables to sample (all continuous if None)
        - Emax: float, maximum energy change for U-turn detection
        - target_accept: float, target acceptance probability for adaptation
        - gamma: float, adaptation parameter for step size
        - k: float, adaptation parameter for step size
        - t0: float, adaptation parameter for step size
        - model: Model, model context
        - scaling: array, scaling matrix for mass matrix
        - is_cov: bool, whether scaling is covariance (precision if False)
        - potential: Potential, custom potential function
        - integrator: str, integration method ('leapfrog')
        """

class HamiltonianMC:
    """
    Hamiltonian Monte Carlo sampler with fixed path length.
    
    Classical HMC implementation with user-specified step size
    and path length parameters.
    """
    
    def __init__(self, vars=None, path_length=2, step_rand=None, 
                 step_scale=0.25, model=None, blocked=True, 
                 potential=None, integrator='leapfrog', **kwargs):
        """
        Initialize HMC sampler.
        
        Parameters:
        - vars: list, variables to sample
        - path_length: float, length of Hamiltonian trajectory
        - step_rand: function, random step size generator
        - step_scale: float, step size scaling factor
        - model: Model, model context
        - blocked: bool, block variables together
        - potential: Potential, custom potential function
        - integrator: str, integration method
        """

Metropolis Methods

Random walk and proposal-based MCMC methods for discrete and constrained variables.

class Metropolis:
    """
    Metropolis-Hastings sampler with customizable proposals.
    
    General-purpose MCMC method using random walk or custom
    proposal distributions for parameter updates.
    """
    
    def __init__(self, vars=None, S=None, proposal_dist=None, 
                 lambda_=None, model=None, blocked=True, **kwargs):
        """
        Initialize Metropolis sampler.
        
        Parameters:
        - vars: list, variables to sample
        - S: array, scaling matrix for proposals
        - proposal_dist: distribution, proposal distribution class
        - lambda_: float, scaling parameter for proposals
        - model: Model, model context
        - blocked: bool, update variables jointly
        """

class BinaryMetropolis:
    """
    Metropolis sampler for binary variables.
    
    Specialized for Bernoulli and binary discrete variables
    using bit-flip proposals.
    """

class BinaryGibbsMetropolis:
    """
    Gibbs-Metropolis sampler for binary variables.
    
    Combines Gibbs sampling with Metropolis updates for
    efficient sampling of binary variable vectors.
    """

class CategoricalGibbsMetropolis:
    """
    Gibbs-Metropolis sampler for categorical variables.
    
    Efficient sampling for discrete categorical variables
    using category-wise Gibbs updates.
    """

class DEMetropolis:
    """
    Differential Evolution Metropolis sampler.
    
    Population-based MCMC method that uses differential
    evolution for proposal generation.
    """
    
    def __init__(self, vars=None, lamb=None, gamma=1, epsilon=1e-6, 
                 save_history=False, model=None, **kwargs):
        """
        Initialize DE Metropolis sampler.
        
        Parameters:
        - vars: list, variables to sample
        - lamb: float, lambda parameter (auto-tuned if None)
        - gamma: float, differential weight parameter
        - epsilon: float, noise parameter
        - save_history: bool, save population history
        - model: Model, model context
        """

class DEMetropolisZ:
    """
    Differential Evolution Metropolis with Z adaptation.
    
    Enhanced DE-MCMC with adaptive Z parameter tuning
    for improved mixing and convergence.
    """

Metropolis Proposal Distributions

Proposal distributions for Metropolis-Hastings updates.

class NormalProposal:
    """
    Multivariate normal proposal distribution.
    
    Standard random walk proposal using multivariate normal
    distribution centered at current state.
    """
    
    def __init__(self, s):
        """
        Parameters:
        - s: array, scaling matrix or standard deviations
        """

class CauchyProposal:
    """
    Multivariate Cauchy proposal distribution.
    
    Heavy-tailed proposals for robust exploration of
    posterior distributions with long tails.
    """

class LaplaceProposal:
    """
    Multivariate Laplace proposal distribution.
    
    Symmetric double exponential proposals for sparse
    or regularized parameter estimation.
    """

class PoissonProposal:
    """
    Poisson proposal distribution for count variables.
    
    Specialized proposals for discrete count parameters
    using Poisson distribution.
    """

class MultivariateNormalProposal:
    """
    Multivariate normal proposal with full covariance.
    
    Correlated proposals that adapt to posterior geometry
    for efficient exploration of complex parameter spaces.
    """

class UniformProposal:
    """
    Uniform proposal distribution.
    
    Uniform random proposals within specified bounds
    for constrained parameter spaces.
    """

Specialized Samplers

Advanced and specialized sampling methods for specific model types.

class Slice:
    """
    Slice sampler for univariate distributions.
    
    Adaptive sampler that automatically adjusts to local
    posterior structure without tuning parameters.
    """
    
    def __init__(self, vars=None, w=1.0, tune=True, model=None, **kwargs):
        """
        Initialize slice sampler.
        
        Parameters:
        - vars: list, variables to sample (one variable only)
        - w: float, initial width parameter
        - tune: bool, adapt width parameter during tuning
        - model: Model, model context
        """

class EllipticalSlice:
    """
    Elliptical slice sampler for Gaussian process priors.
    
    Specialized sampler for models with Gaussian process
    priors or multivariate normal latent variables.
    """
    
    def __init__(self, vars=None, prior_cov=None, model=None, **kwargs):
        """
        Initialize elliptical slice sampler.
        
        Parameters:
        - vars: list, variables with Gaussian prior
        - prior_cov: array, prior covariance matrix
        - model: Model, model context
        """

class ElemwiseCategorical:
    """
    Element-wise categorical sampler.
    
    Efficient Gibbs sampling for categorical variables
    by updating each element independently.
    """

class PGBART:
    """
    Particle Gibbs sampler for BART models.
    
    Specialized sampler for Bayesian Additive Regression
    Trees using particle Gibbs methods.
    """

Compound Step Methods

Methods for combining multiple step methods and handling complex model structures.

class CompoundStep:
    """
    Compound step method combining multiple samplers.
    
    Coordinates multiple step methods for models with
    different variable types or sampling requirements.
    """
    
    def __init__(self, methods):
        """
        Initialize compound step method.
        
        Parameters:
        - methods: list, individual step method objects
        """
    
    @property
    def vars(self):
        """Variables handled by all component methods."""
    
    @property
    def generates_stats(self):
        """Whether any component generates sampling statistics."""

Multi-Level Delayed Acceptance

Advanced methods for expensive likelihood evaluations and multi-level modeling.

class MLDA:
    """
    Multi-Level Delayed Acceptance sampler.
    
    Hierarchical sampling method for models with expensive
    likelihood evaluations using coarse approximations.
    """
    
    def __init__(self, vars=None, coarse_models=None, base_sampler=None, 
                 base_kwargs=None, model=None, **kwargs):
        """
        Initialize MLDA sampler.
        
        Parameters:
        - vars: list, variables to sample
        - coarse_models: list, coarse model approximations
        - base_sampler: step method class for base level
        - base_kwargs: dict, arguments for base sampler
        - model: Model, fine model context
        """

class DEMetropolisZMLDA:
    """
    DE Metropolis Z with Multi-Level Delayed Acceptance.
    
    Combines differential evolution with multi-level
    approximation for expensive population-based sampling.
    """

class MetropolisMLDA:
    """
    Metropolis with Multi-Level Delayed Acceptance.
    
    Multi-level Metropolis sampling using hierarchical
    model approximations for computational efficiency.
    """

class RecursiveDAProposal:
    """
    Recursive delayed acceptance proposal mechanism.
    
    Implements recursive evaluation of proposals through
    hierarchy of model approximations.
    """

Usage Examples

Basic MCMC Sampling

import pymc3 as pm
import numpy as np

# Basic model sampling
with pm.Model() as model:
    mu = pm.Normal('mu', mu=0, sigma=10)
    sigma = pm.HalfNormal('sigma', sigma=5)
    y_obs = pm.Normal('y_obs', mu=mu, sigma=sigma, observed=data)
    
    # Sample with default settings (NUTS)
    trace = pm.sample(draws=1000, tune=1000, chains=4)
    
    # Sample with specific parameters
    trace = pm.sample(draws=2000, tune=1500, target_accept=0.9,
                     chains=4, cores=2, random_seed=42)

Custom Step Method Assignment

# Manual step method assignment
with pm.Model() as mixed_model:
    # Continuous parameters
    theta = pm.Beta('theta', alpha=1, beta=1)
    
    # Discrete parameters  
    k = pm.Poisson('k', mu=5)
    
    # Binary parameter
    switch = pm.Bernoulli('switch', p=0.5)
    
    # Likelihood
    y_obs = pm.NegativeBinomial('y_obs', mu=theta*k*switch, 
                               alpha=1, observed=count_data)
    
    # Assign specific step methods
    step1 = pm.NUTS([theta])  # NUTS for continuous
    step2 = pm.Metropolis([k])  # Metropolis for discrete
    step3 = pm.BinaryMetropolis([switch])  # Binary Metropolis
    
    trace = pm.sample(1000, step=[step1, step2, step3])

Advanced NUTS Configuration

# Customized NUTS sampler
with pm.Model() as nuts_model:
    x = pm.Normal('x', mu=0, sigma=1, shape=10)
    y_obs = pm.Normal('y_obs', mu=x.sum(), sigma=0.1, observed=5.0)
    
    # NUTS with custom settings
    nuts_step = pm.NUTS(vars=[x], 
                       target_accept=0.95,  # Higher acceptance rate
                       max_treedepth=12)    # Deeper trees
    
    trace = pm.sample(1000, step=nuts_step, 
                     init='advi+adapt_diag_grad')

Differential Evolution Metropolis

# Population-based sampling with DE-MCMC
with pm.Model() as de_model:
    # Multi-modal posterior
    mu1 = pm.Normal('mu1', mu=-2, sigma=1)
    mu2 = pm.Normal('mu2', mu=2, sigma=1) 
    
    # Mixture likelihood creating multi-modality
    components = [pm.Normal.dist(mu=mu1, sigma=0.5),
                  pm.Normal.dist(mu=mu2, sigma=0.5)]
    
    y_obs = pm.Mixture('y_obs', w=[0.3, 0.7], 
                      comp_dists=components, observed=data)
    
    # DE-MCMC for multi-modal exploration
    step = pm.DEMetropolis(vars=[mu1, mu2], lamb=2.38)
    
    # Multiple chains for population
    trace = pm.sample(1000, step=step, chains=10, cores=1)

Slice Sampling

# Slice sampler for univariate parameters
with pm.Model() as slice_model:
    # Parameter with complex posterior shape
    tau = pm.Gamma('tau', alpha=1, beta=1)
    
    # Hierarchical structure
    sigmas = pm.InverseGamma('sigmas', alpha=2, beta=tau, shape=5)
    
    y_obs = pm.Normal('y_obs', mu=0, sigma=sigmas, observed=group_data)
    
    # Slice sampling for tau (univariate)
    step_tau = pm.Slice([tau])
    
    # NUTS for sigmas (multivariate)  
    step_sigmas = pm.NUTS([sigmas])
    
    trace = pm.sample(1000, step=[step_tau, step_sigmas])

Posterior Predictive Sampling

# Comprehensive posterior predictive workflow
with pm.Model() as pred_model:
    alpha = pm.Normal('alpha', mu=0, sigma=1)
    beta = pm.Normal('beta', mu=0, sigma=1)
    sigma = pm.HalfNormal('sigma', sigma=1)
    
    mu = alpha + beta * x_data
    y_obs = pm.Normal('y_obs', mu=mu, sigma=sigma, observed=y_data)
    
    # Sample posterior
    trace = pm.sample(1000, tune=1000)
    
    # In-sample posterior predictive checks
    ppc = pm.sample_posterior_predictive(trace, samples=500)
    
    # Out-of-sample prediction with new data
    pm.set_data({'x_data': x_new})
    ppc_new = pm.sample_posterior_predictive(trace, samples=500)
    
    # Custom variables for prediction
    with pred_model:
        # Add prediction variable
        y_pred = pm.Normal('y_pred', mu=mu, sigma=sigma)
    
    ppc_custom = pm.sample_posterior_predictive(trace, 
                                               var_names=['y_pred'],
                                               samples=1000)

Sequential and Online Sampling

# Online/sequential sampling with iter_sample
with pm.Model() as online_model:
    theta = pm.Beta('theta', alpha=1, beta=1)
    
    # Initial sampling
    step = pm.Metropolis([theta])
    trace = pm.backends.NDArray(model=online_model)
    
    # Sequential sampling with processing
    for i, current_trace in enumerate(pm.iter_sample(100, step, 
                                                    trace=trace, 
                                                    tune=50)):
        if i % 10 == 0:
            # Process intermediate results
            current_mean = current_trace['theta'].mean()
            print(f"Step {i}: current mean = {current_mean:.3f}")
            
            # Optional: early stopping based on convergence
            if i > 50 and abs(current_mean - 0.5) < 0.01:
                break

Custom Initialization

# Custom initialization strategies  
with pm.Model() as init_model:
    mu = pm.Normal('mu', mu=0, sigma=10)
    sigma = pm.HalfNormal('sigma', sigma=5)
    y_obs = pm.Normal('y_obs', mu=mu, sigma=sigma, observed=data)
    
    # MAP initialization
    map_estimate = pm.find_MAP()
    trace_map = pm.sample(1000, init=map_estimate)
    
    # ADVI initialization
    trace_advi = pm.sample(1000, init='advi', n_init=50000)
    
    # Custom starting values
    start = {'mu': 2.0, 'sigma_log__': np.log(1.5)}
    trace_custom = pm.sample(1000, init=start)
    
    # Multiple chain initialization
    n_chains = 4
    starts = []
    for i in range(n_chains):
        starts.append({
            'mu': np.random.normal(0, 2),
            'sigma_log__': np.random.normal(0, 1)
        })
    
    trace_multi = pm.sample(1000, init=starts, chains=n_chains)

Install with Tessl CLI

npx tessl i tessl/pypi-pymc3

docs

data-handling.md

distributions.md

gaussian-processes.md

glm.md

index.md

math-functions.md

modeling.md

sampling.md

stats-plots.md

step-methods.md

variational.md

tile.json