Jupyter-friendly Python frontend for MINUIT2 C++ library for function minimization and error analysis
—
Statistical cost functions for maximum likelihood estimation and least-squares fitting. These functions automatically set the correct errordef and support gradients, Numba acceleration, and various data formats.
Predefined errordef values for different types of cost functions.
CHISQUARE: float = 1.0 # Chi-square errordef constant
NEGATIVE_LOG_LIKELIHOOD: float = 0.5 # Negative log-likelihood errordef constantLeast-squares cost function for fitting models to (x, y, yerror) data with support for outlier-robust loss functions.
class LeastSquares:
"""
Least-squares cost function with optional loss functions for outlier robustness.
"""
def __init__(self, x, y, yerror, model, *, loss="linear", verbose=0, grad=None, name=None):
"""
Initialize least-squares cost function.
Args:
x: Independent variable data (array-like)
y: Dependent variable data (array-like)
yerror: Uncertainties on y values (array-like or scalar)
model: Model function callable(x, *params) -> y_predicted
loss: Loss function ("linear", "soft_l1", "huber", "cauchy", "arctan")
"linear" = standard least-squares
"soft_l1" = robust to outliers
Other options provide different outlier treatments
verbose: Verbosity level (int, default: 0)
grad: Model gradient function (optional)
name: Parameter names (optional)
"""
@property
def errordef(self) -> float:
"""Errordef value (automatically 1.0 for least-squares)."""
def __call__(self, *args) -> float:
"""Evaluate cost function at parameter values."""Maximum likelihood estimation for normalized probability density functions using binned data.
class BinnedNLL:
"""
Binned negative log-likelihood for fitting normalized PDFs to binned data.
"""
def __init__(self, n, xe, model, use_pdf=True):
"""
Initialize binned NLL cost function.
Args:
n: Bin counts (array-like)
xe: Bin edges (array-like, length = len(n) + 1)
model: Model function callable(x, *params) -> probability_density
use_pdf: If True, model is a PDF; if False, model returns bin content
"""
@property
def errordef(self) -> float:
"""Errordef value (automatically 0.5 for likelihood)."""Maximum likelihood estimation for normalized probability density functions using unbinned data.
class UnbinnedNLL:
"""
Unbinned negative log-likelihood for fitting normalized PDFs to unbinned data.
"""
def __init__(self, data, model, use_pdf=True):
"""
Initialize unbinned NLL cost function.
Args:
data: Data points (array-like)
model: Model function callable(x, *params) -> probability_density
use_pdf: If True, model is normalized PDF; if False, model is unnormalized
"""
@property
def errordef(self) -> float:
"""Errordef value (automatically 0.5 for likelihood)."""Maximum likelihood estimation for unnormalized densities with Poisson fluctuations in total yield.
class ExtendedBinnedNLL:
"""
Extended binned NLL for fitting unnormalized densities to binned data.
"""
def __init__(self, n, xe, model):
"""
Initialize extended binned NLL cost function.
Args:
n: Bin counts (array-like)
xe: Bin edges (array-like, length = len(n) + 1)
model: Model function callable(x, *params) -> density_value
(not normalized, total integral becomes a fit parameter)
"""
class ExtendedUnbinnedNLL:
"""
Extended unbinned NLL for fitting unnormalized densities to unbinned data.
"""
def __init__(self, data, model):
"""
Initialize extended unbinned NLL cost function.
Args:
data: Data points (array-like)
model: Model function callable(x, *params) -> density_value
(not normalized, total integral becomes a fit parameter)
"""Template fitting with bin-wise uncertainties on template histograms.
class Template:
"""
Template fitting with uncertainties on template histograms.
"""
def __init__(self, n, xe, templates):
"""
Initialize template cost function.
Args:
n: Data histogram bin counts (array-like)
xe: Bin edges (array-like, length = len(n) + 1)
templates: List of template histograms, each can be:
- Simple array of bin contents
- (bin_contents, bin_errors) tuple
- Histogram object with values and errors
"""
@property
def errordef(self) -> float:
"""Errordef value (automatically 0.5 for likelihood-based template fit)."""Gaussian penalty terms for incorporating external constraints or regularization.
class NormalConstraint:
"""
Gaussian penalty terms for parameter constraints.
"""
def __init__(self, params, values, errors):
"""
Initialize normal constraint.
Args:
params: Parameter names (list of strings)
values: Central values for constraints (array-like)
errors: Uncertainties on constraint values (array-like)
"""
@property
def errordef(self) -> float:
"""Errordef value (automatically 1.0 for chi-square-like constraint)."""Combine multiple cost functions with shared parameters.
class CostSum:
"""
Combined cost function from multiple cost functions.
"""
def __init__(self, *costs):
"""
Initialize combined cost function.
Args:
*costs: Cost function objects to combine
"""
@property
def errordef(self) -> float:
"""Errordef value (computed from component cost functions)."""
def __call__(self, *args) -> float:
"""Evaluate combined cost function."""Protocol definitions for custom cost functions.
class Cost:
"""
Base protocol for cost functions.
"""
def __call__(self, *args) -> float:
"""Evaluate cost function at parameter values."""
@property
def errordef(self) -> float:
"""Error definition for this cost function type."""
class Constant:
"""
Constant cost function (always returns same value).
"""
def __init__(self, value):
"""
Initialize constant cost function.
Args:
value: Constant value to return
"""Chi-square-like cost functions for common statistical scenarios.
def chi2(y, ye, ym):
"""
Compute chi2-distributed cost for normally distributed data.
Args:
y: Observed values (array-like)
ye: Standard deviations of observed values (array-like)
ym: Expected values (array-like)
Returns:
float: Chi-square value
"""
def poisson_chi2(n, mu):
"""
Compute asymptotically chi2-distributed cost for Poisson-distributed data.
Args:
n: Observed counts (array-like)
mu: Expected counts (array-like)
Returns:
float: Asymptotic chi-square value
"""
def multinomial_chi2(n, mu):
"""
Compute asymptotically chi2-distributed cost for multinomially-distributed data.
Args:
n: Observed counts (array-like)
mu: Expected counts (array-like)
Returns:
float: Asymptotic chi-square value
"""from iminuit import Minuit
from iminuit.cost import LeastSquares
import numpy as np
# Generate sample data
x = np.linspace(0, 10, 50)
y_true = 2 * x + 1
y_data = y_true + np.random.normal(0, 0.5, len(x))
y_errors = np.full(len(x), 0.5)
# Define linear model
def linear_model(x, slope, intercept):
return slope * x + intercept
# Create cost function
cost = LeastSquares(x, y_data, y_errors, linear_model)
# Minimize
m = Minuit(cost, slope=1, intercept=0)
m.migrad()
m.hesse()
print(f"Best fit: slope={m.values['slope']:.3f}, intercept={m.values['intercept']:.3f}")# Use soft_l1 loss for outlier robustness
cost_robust = LeastSquares(x, y_data, y_errors, linear_model, loss="soft_l1")
m_robust = Minuit(cost_robust, slope=1, intercept=0)
m_robust.migrad()from iminuit.cost import BinnedNLL
# Histogram data (bin counts and edges)
n = np.array([5, 12, 9, 15, 8, 6, 3])
xe = np.linspace(0, 7, 8) # 8 edges for 7 bins
# Gaussian PDF model
def gaussian_pdf(x, mu, sigma):
return np.exp(-0.5 * ((x - mu) / sigma)**2) / (sigma * np.sqrt(2 * np.pi))
# Create binned NLL
cost = BinnedNLL(n, xe, gaussian_pdf)
# Fit
m = Minuit(cost, mu=3.5, sigma=1.0)
m.migrad()from iminuit.cost import UnbinnedNLL
# Unbinned data points
data = np.random.normal(2.0, 1.5, 1000)
# Gaussian PDF model
def gaussian_pdf(x, mu, sigma):
return np.exp(-0.5 * ((x - mu) / sigma)**2) / (sigma * np.sqrt(2 * np.pi))
# Create unbinned NLL
cost = UnbinnedNLL(data, gaussian_pdf)
# Fit
m = Minuit(cost, mu=0, sigma=1)
m.migrad()from iminuit.cost import Template
# Data histogram
data_counts = np.array([10, 25, 35, 28, 15, 8])
bin_edges = np.linspace(0, 6, 7)
# Template histograms (signal and background)
signal_template = np.array([0, 5, 15, 12, 3, 0])
background_template = np.array([8, 12, 10, 8, 6, 4])
# Create template cost function
cost = Template(data_counts, bin_edges, [signal_template, background_template])
# Fit template amplitudes
m = Minuit(cost, x0=1.0, x1=1.0) # x0=signal scale, x1=background scale
m.migrad()from iminuit.cost import NormalConstraint, CostSum
# Main fitting cost function
main_cost = LeastSquares(x, y_data, y_errors, linear_model)
# External constraint on slope parameter
constraint = NormalConstraint(['slope'], [2.1], [0.1])
# Combine cost functions
total_cost = CostSum(main_cost, constraint)
# Fit with constraint
m = Minuit(total_cost, slope=2.0, intercept=1.0)
m.migrad()# Define custom cost function
class MyCustomCost:
errordef = 1.0 # For chi-square-like cost
def __init__(self, data):
self.data = data
def __call__(self, param):
# Custom cost calculation
return np.sum((self.data - param)**2)
# Use with Minuit
cost = MyCustomCost(data)
m = Minuit(cost, param=1.0)
m.migrad()Install with Tessl CLI
npx tessl i tessl/pypi-iminuit