A flexible, scalable deep probabilistic programming library built on PyTorch for universal probabilistic modeling and inference
—
Comprehensive collection of probability distributions for probabilistic modeling, including continuous, discrete, multivariate, and specialized distributions with automatic differentiation support.
Fundamental continuous probability distributions for modeling real-valued random variables.
class Normal(dist.Distribution):
"""
Normal (Gaussian) distribution with location and scale parameters.
Parameters:
- loc (Tensor): Mean of the distribution
- scale (Tensor): Standard deviation (must be positive)
Examples:
>>> normal = dist.Normal(0.0, 1.0) # Standard normal
>>> normal = dist.Normal(torch.zeros(5), torch.ones(5)) # Multivariate
"""
def __init__(self, loc: torch.Tensor, scale: torch.Tensor): ...
class Beta(dist.Distribution):
"""
Beta distribution parameterized by concentration parameters.
Parameters:
- concentration1 (Tensor): First concentration parameter (alpha > 0)
- concentration0 (Tensor): Second concentration parameter (beta > 0)
Examples:
>>> beta = dist.Beta(1.0, 1.0) # Uniform on [0, 1]
>>> beta = dist.Beta(2.0, 5.0) # Skewed toward 0
"""
def __init__(self, concentration1: torch.Tensor, concentration0: torch.Tensor): ...
class Gamma(dist.Distribution):
"""
Gamma distribution parameterized by concentration and rate.
Parameters:
- concentration (Tensor): Shape parameter (alpha > 0)
- rate (Tensor): Rate parameter (beta > 0)
Examples:
>>> gamma = dist.Gamma(2.0, 1.0) # Concentration=2, Rate=1
>>> gamma = dist.Gamma(torch.ones(5), torch.ones(5))
"""
def __init__(self, concentration: torch.Tensor, rate: torch.Tensor): ...
class Exponential(dist.Distribution):
"""
Exponential distribution with rate parameter.
Parameters:
- rate (Tensor): Rate parameter (lambda > 0)
"""
def __init__(self, rate: torch.Tensor): ...
class Uniform(dist.Distribution):
"""
Uniform distribution over interval [low, high).
Parameters:
- low (Tensor): Lower bound (inclusive)
- high (Tensor): Upper bound (exclusive)
"""
def __init__(self, low: torch.Tensor, high: torch.Tensor): ...
class Laplace(dist.Distribution):
"""
Laplace distribution with location and scale parameters.
Parameters:
- loc (Tensor): Location parameter (median)
- scale (Tensor): Scale parameter (> 0)
"""
def __init__(self, loc: torch.Tensor, scale: torch.Tensor): ...
class Cauchy(dist.Distribution):
"""
Cauchy distribution with location and scale parameters.
Parameters:
- loc (Tensor): Location parameter
- scale (Tensor): Scale parameter (> 0)
"""
def __init__(self, loc: torch.Tensor, scale: torch.Tensor): ...
class StudentT(dist.Distribution):
"""
Student's t-distribution with degrees of freedom.
Parameters:
- df (Tensor): Degrees of freedom (> 0)
- loc (Tensor): Location parameter (default 0)
- scale (Tensor): Scale parameter (default 1, > 0)
"""
def __init__(self, df: torch.Tensor, loc: torch.Tensor = 0.0, scale: torch.Tensor = 1.0): ...Discrete probability distributions for modeling categorical and count data.
class Bernoulli(dist.Distribution):
"""
Bernoulli distribution for binary outcomes.
Parameters:
- probs (Tensor, optional): Probability of success (0 <= p <= 1)
- logits (Tensor, optional): Log-odds of success (alternative to probs)
Examples:
>>> bernoulli = dist.Bernoulli(0.3) # 30% probability of 1
>>> bernoulli = dist.Bernoulli(logits=torch.tensor([-1.0, 0.0, 1.0]))
"""
def __init__(self, probs: torch.Tensor = None, logits: torch.Tensor = None): ...
class Categorical(dist.Distribution):
"""
Categorical distribution over discrete categories.
Parameters:
- probs (Tensor, optional): Category probabilities (must sum to 1)
- logits (Tensor, optional): Log probabilities (alternative to probs)
Examples:
>>> cat = dist.Categorical(torch.tensor([0.25, 0.25, 0.5]))
>>> cat = dist.Categorical(logits=torch.tensor([1.0, 1.0, 2.0]))
"""
def __init__(self, probs: torch.Tensor = None, logits: torch.Tensor = None): ...
class Binomial(dist.Distribution):
"""
Binomial distribution for count data with fixed number of trials.
Parameters:
- total_count (Tensor): Number of trials
- probs (Tensor, optional): Success probability per trial
- logits (Tensor, optional): Log-odds of success (alternative to probs)
"""
def __init__(self, total_count: torch.Tensor, probs: torch.Tensor = None, logits: torch.Tensor = None): ...
class Poisson(dist.Distribution):
"""
Poisson distribution for count data.
Parameters:
- rate (Tensor): Rate parameter (lambda > 0)
"""
def __init__(self, rate: torch.Tensor): ...
class NegativeBinomial(dist.Distribution):
"""
Negative binomial distribution for overdispersed count data.
Parameters:
- total_count (Tensor): Number of failures until stopping
- probs (Tensor, optional): Success probability
- logits (Tensor, optional): Log-odds of success
"""
def __init__(self, total_count: torch.Tensor, probs: torch.Tensor = None, logits: torch.Tensor = None): ...
class Geometric(dist.Distribution):
"""
Geometric distribution for number of trials until first success.
Parameters:
- probs (Tensor, optional): Success probability
- logits (Tensor, optional): Log-odds of success
"""
def __init__(self, probs: torch.Tensor = None, logits: torch.Tensor = None): ...Distributions for modeling correlated multi-dimensional random variables.
class MultivariateNormal(dist.Distribution):
"""
Multivariate normal distribution with mean vector and covariance matrix.
Parameters:
- loc (Tensor): Mean vector of shape (..., n)
- covariance_matrix (Tensor, optional): Covariance matrix (..., n, n)
- precision_matrix (Tensor, optional): Precision matrix (inverse covariance)
- scale_tril (Tensor, optional): Lower triangular Cholesky factor
Examples:
>>> mvn = dist.MultivariateNormal(torch.zeros(3), torch.eye(3))
>>> mvn = dist.MultivariateNormal(torch.zeros(3), scale_tril=torch.tril(torch.randn(3, 3)))
"""
def __init__(self, loc: torch.Tensor, covariance_matrix: torch.Tensor = None,
precision_matrix: torch.Tensor = None, scale_tril: torch.Tensor = None): ...
class Dirichlet(dist.Distribution):
"""
Dirichlet distribution over probability simplices.
Parameters:
- concentration (Tensor): Concentration parameters (all > 0)
Examples:
>>> dirichlet = dist.Dirichlet(torch.ones(5)) # Uniform over 4-simplex
>>> dirichlet = dist.Dirichlet(torch.tensor([1.0, 2.0, 3.0]))
"""
def __init__(self, concentration: torch.Tensor): ...
class LowRankMultivariateNormal(dist.Distribution):
"""
Low-rank multivariate normal distribution for high-dimensional data.
Parameters:
- loc (Tensor): Mean vector
- cov_factor (Tensor): Low-rank covariance factor
- cov_diag (Tensor): Diagonal covariance component
"""
def __init__(self, loc: torch.Tensor, cov_factor: torch.Tensor, cov_diag: torch.Tensor): ...
class Multinomial(dist.Distribution):
"""
Multinomial distribution for multi-category count data.
Parameters:
- total_count (Tensor): Total number of trials
- probs (Tensor, optional): Category probabilities
- logits (Tensor, optional): Log probabilities
"""
def __init__(self, total_count: torch.Tensor = 1, probs: torch.Tensor = None, logits: torch.Tensor = None): ...Specialized distributions unique to Pyro for advanced probabilistic modeling.
class Delta(dist.Distribution):
"""
Point mass (Dirac delta) distribution for deterministic variables.
Parameters:
- v (Tensor): Point mass location
- log_density (Tensor, optional): Log density at the point
- event_dim (int): Number of event dimensions
Examples:
>>> delta = dist.Delta(torch.tensor(5.0)) # Point mass at 5.0
>>> delta = dist.Delta(torch.zeros(3, 3), event_dim=2) # Matrix point mass
"""
def __init__(self, v: torch.Tensor, log_density: torch.Tensor = 0.0, event_dim: int = 0): ...
class Empirical(dist.Distribution):
"""
Empirical distribution from samples with optional weights.
Parameters:
- samples (Tensor): Sample values
- weights (Tensor, optional): Sample weights (default uniform)
Examples:
>>> samples = torch.randn(1000, 5)
>>> empirical = dist.Empirical(samples)
>>>
>>> # Weighted samples
>>> weights = torch.rand(1000)
>>> empirical = dist.Empirical(samples, weights)
"""
def __init__(self, samples: torch.Tensor, weights: torch.Tensor = None): ...
class ImproperUniform(dist.Distribution):
"""
Improper uniform distribution with infinite support.
Parameters:
- support (Constraint): Support constraint
- batch_shape (torch.Size): Batch shape
- event_shape (torch.Size): Event shape
"""
def __init__(self, support, batch_shape: torch.Size = torch.Size(), event_shape: torch.Size = torch.Size()): ...
class Unit(dist.Distribution):
"""
Unit distribution that always returns empty tensor.
Useful as a null distribution or for testing purposes.
"""
def __init__(self): ...Enhanced versions of standard distributions with additional functionality.
class ExtendedBinomial(dist.Distribution):
"""
Extended binomial distribution allowing non-integer total_count.
Parameters:
- total_count (Tensor): Total count (can be non-integer)
- probs (Tensor, optional): Success probability
- logits (Tensor, optional): Log-odds of success
"""
def __init__(self, total_count: torch.Tensor, probs: torch.Tensor = None, logits: torch.Tensor = None): ...
class ExtendedBetaBinomial(dist.Distribution):
"""
Extended beta-binomial distribution with continuous total count.
Parameters:
- concentration1 (Tensor): First beta parameter
- concentration0 (Tensor): Second beta parameter
- total_count (Tensor): Total count
"""
def __init__(self, concentration1: torch.Tensor, concentration0: torch.Tensor, total_count: torch.Tensor): ...
class SkewLogistic(dist.Distribution):
"""
Skewed logistic distribution.
Parameters:
- loc (Tensor): Location parameter
- scale (Tensor): Scale parameter
- skewness (Tensor): Skewness parameter
"""
def __init__(self, loc: torch.Tensor, scale: torch.Tensor, skewness: torch.Tensor): ...Distributions for modeling multi-modal and heterogeneous data.
class MixtureSameFamily(dist.Distribution):
"""
Mixture distribution with same family components.
Parameters:
- mixture_distribution (Categorical): Mixing distribution
- component_distribution (Distribution): Component distribution
Examples:
>>> mix = dist.Categorical(torch.ones(3))
>>> comp = dist.Normal(torch.tensor([0.0, 5.0, -5.0]), torch.ones(3))
>>> mixture = dist.MixtureSameFamily(mix, comp)
"""
def __init__(self, mixture_distribution, component_distribution): ...
class MixtureOfDiagNormals(dist.Distribution):
"""
Mixture of diagonal multivariate normal distributions.
Parameters:
- locs (Tensor): Component means
- coord_scale (Tensor): Component scales
- component_logits (Tensor): Component log probabilities
"""
def __init__(self, locs: torch.Tensor, coord_scale: torch.Tensor, component_logits: torch.Tensor): ...
class GaussianScaleMixture(dist.Distribution):
"""
Gaussian scale mixture distribution.
Parameters:
- loc (Tensor): Location parameter
- mixture_distribution (Distribution): Scale mixing distribution
- component_distribution (Distribution): Component distribution
"""
def __init__(self, loc: torch.Tensor, mixture_distribution, component_distribution): ...Distributions for modeling data with excess zeros.
class ZeroInflatedDistribution(dist.Distribution):
"""
Zero-inflated version of any distribution.
Parameters:
- base_dist (Distribution): Base distribution
- gate (Tensor): Probability of zero inflation
Examples:
>>> base = dist.Poisson(3.0)
>>> zi = dist.ZeroInflatedDistribution(base, gate=0.2)
"""
def __init__(self, base_dist, gate: torch.Tensor): ...
class ZeroInflatedPoisson(dist.Distribution):
"""
Zero-inflated Poisson distribution.
Parameters:
- rate (Tensor): Poisson rate parameter
- gate (Tensor): Zero-inflation probability
"""
def __init__(self, rate: torch.Tensor, gate: torch.Tensor): ...
class ZeroInflatedNegativeBinomial(dist.Distribution):
"""
Zero-inflated negative binomial distribution.
Parameters:
- total_count (Tensor): Number of failures
- probs (Tensor, optional): Success probability
- logits (Tensor, optional): Log-odds of success
- gate (Tensor): Zero-inflation probability
"""
def __init__(self, total_count: torch.Tensor, probs: torch.Tensor = None,
logits: torch.Tensor = None, gate: torch.Tensor = None): ...Distributions for modeling temporal and sequential data.
class DiscreteHMM(dist.Distribution):
"""
Discrete Hidden Markov Model distribution.
Parameters:
- initial_logits (Tensor): Initial state log probabilities
- transition_logits (Tensor): Transition log probabilities
- observation_dist (Distribution): Observation distribution
"""
def __init__(self, initial_logits: torch.Tensor, transition_logits: torch.Tensor, observation_dist): ...
class GaussianHMM(dist.Distribution):
"""
Gaussian Hidden Markov Model with continuous observations.
Parameters:
- initial_dist (Distribution): Initial state distribution
- transition_matrix (Tensor): Transition probabilities
- observation_matrix (Tensor): Observation matrix
- observation_dist (Distribution): Observation noise distribution
"""
def __init__(self, initial_dist, transition_matrix: torch.Tensor,
observation_matrix: torch.Tensor, observation_dist): ...
class LinearHMM(dist.Distribution):
"""
Linear Gaussian state space model.
Parameters:
- initial_dist (Distribution): Initial state distribution
- transition_matrix (Tensor): State transition matrix
- transition_dist (Distribution): Transition noise
- observation_matrix (Tensor): Observation matrix
- observation_dist (Distribution): Observation noise
"""
def __init__(self, initial_dist, transition_matrix: torch.Tensor, transition_dist,
observation_matrix: torch.Tensor, observation_dist): ...def enable_validation(is_validate: bool = True):
"""
Enable or disable validation for all distributions.
Parameters:
- is_validate (bool): Whether to enable validation
"""
def is_validation_enabled() -> bool:
"""
Check if validation is currently enabled.
Returns:
bool: True if validation is enabled
"""
def validation_enabled() -> bool:
"""
Context manager for temporarily enabling/disabling validation.
Returns:
bool: Current validation state
"""import pyro.distributions as dist
import torch
# Continuous distributions
normal = dist.Normal(0.0, 1.0)
sample = normal.sample()
log_prob = normal.log_prob(sample)
# Discrete distributions
categorical = dist.Categorical(torch.tensor([0.2, 0.3, 0.5]))
category = categorical.sample()
# Multivariate distributions
mvn = dist.MultivariateNormal(torch.zeros(3), torch.eye(3))
vector_sample = mvn.sample()# Learnable distribution parameters
mu = pyro.param("mu", torch.tensor(0.0))
sigma = pyro.param("sigma", torch.tensor(1.0), constraint=dist.constraints.positive)
# Use in model
def model():
return pyro.sample("x", dist.Normal(mu, sigma))def mixture_model():
# Mixing weights
weights = pyro.sample("weights", dist.Dirichlet(torch.ones(3)))
# Component parameters
locs = pyro.sample("locs", dist.Normal(0, 5).expand([3]).to_event(1))
scales = pyro.sample("scales", dist.LogNormal(0, 1).expand([3]).to_event(1))
# Mixture distribution
mix = dist.Categorical(weights)
comp = dist.Normal(locs, scales).to_event(1)
mixture = dist.MixtureSameFamily(mix, comp)
return pyro.sample("obs", mixture)Install with Tessl CLI
npx tessl i tessl/pypi-pyro-ppl