Non-linear least-squares minimization and curve-fitting with enhanced parameter management and confidence interval estimation
—
LMFIT provides comprehensive minimization capabilities supporting 15+ optimization methods from scipy.optimize, enhanced with Parameter objects, improved error estimation, and MCMC sampling. The Minimizer class offers fine-grained control while the minimize function provides a convenient high-level interface.
Core optimization engine providing detailed control over the fitting process.
class Minimizer:
"""General minimizer for curve fitting and optimization"""
def __init__(self, userfcn, params, fcn_args=None, fcn_kws=None,
iter_cb=None, scale_covar=True, nan_policy='raise',
reduce_fcn=None, calc_covar=True, max_nfev=None, **kws):
"""
Create a Minimizer instance.
Args:
userfcn: Objective function to minimize
params (Parameters): Parameters for the fit
fcn_args (tuple): Additional arguments for objective function
fcn_kws (dict): Additional keyword arguments for objective function
iter_cb: Callback function called at each iteration
scale_covar (bool): Scale covariance matrix for uncertainty estimation
nan_policy (str): How to handle NaN values ('raise', 'propagate', 'omit')
reduce_fcn: Function to reduce residual array to scalar
calc_covar (bool): Calculate covariance matrix
max_nfev (int): Maximum number of function evaluations
"""
def minimize(self, method='leastsq', params=None, **kws):
"""
Perform minimization.
Args:
method (str): Optimization method
params (Parameters): Parameters to use for fit
**kws: Method-specific keyword arguments
Returns:
MinimizerResult: Fit results and statistics
"""
def scalar_minimize(self, method='Nelder-Mead', params=None, **kws):
"""
Use scalar minimization methods from scipy.optimize.
Args:
method (str): Scipy scalar method name
params (Parameters): Parameters for fit
**kws: Method-specific arguments
Returns:
MinimizerResult: Fit results
"""
def emcee(self, params=None, steps=1000, nwalkers=100, burn=0, thin=1,
ntemps=1, **kws):
"""
Markov Chain Monte Carlo sampling using emcee.
Args:
params (Parameters): Parameters for sampling
steps (int): Number of MCMC steps
nwalkers (int): Number of walkers
burn (int): Number of burn-in steps
thin (int): Thinning factor for chain
ntemps (int): Number of temperatures for parallel tempering
**kws: Additional emcee arguments
Returns:
MinimizerResult: MCMC results with chain data
"""
def prepare_fit(self, params=None):
"""
Prepare parameters and arrays for fitting.
Args:
params (Parameters): Parameters for fit
"""
def unprepare_fit(self):
"""Clean up after fitting"""Container for minimization results with comprehensive fit statistics and parameter information.
class MinimizerResult:
"""Results from minimization with fit statistics and parameter data"""
def show_candidates(self, n_candidates=5, precision=3):
"""
Display candidate solutions for ambiguous fits.
Args:
n_candidates (int): Number of candidates to show
precision (int): Decimal precision for display
"""
# Key attributes available after fitting:
# success (bool): Whether optimization succeeded
# message (str): Termination message from optimizer
# method (str): Optimization method used
# nfev (int): Number of function evaluations
# ndata (int): Number of data points
# nvarys (int): Number of varied parameters
# nfree (int): Degrees of freedom (ndata - nvarys)
# chisqr (float): Chi-squared statistic
# redchi (float): Reduced chi-squared (chisqr / nfree)
# aic (float): Akaike Information Criterion
# bic (float): Bayesian Information Criterion
# params (Parameters): Best-fit parameters with uncertainties
# var_names (list): Names of varied parameters
# covar (ndarray): Covariance matrix
# best_values (dict): Best-fit parameter values
# init_values (dict): Initial parameter values
# residual (ndarray): Residual array at best fit
# flatchain (ndarray): Flattened MCMC chain (for emcee results)High-level interface for minimization without creating a Minimizer instance.
def minimize(fcn, params, method='leastsq', args=None, kws=None,
scale_covar=True, iter_cb=None, reduce_fcn=None,
nan_policy='raise', calc_covar=True, max_nfev=None, **fit_kws):
"""
Minimize objective function using specified method.
Args:
fcn: Objective function to minimize
params (Parameters): Parameters for the fit
method (str): Optimization method to use
args (tuple): Additional arguments for objective function
kws (dict): Additional keyword arguments for objective function
scale_covar (bool): Scale covariance matrix
iter_cb: Iteration callback function
reduce_fcn: Function to convert residual array to scalar
nan_policy (str): Policy for handling NaN values
calc_covar (bool): Calculate covariance matrix
max_nfev (int): Maximum function evaluations
**fit_kws: Method-specific keyword arguments
Returns:
MinimizerResult: Optimization results
"""Least-Squares Methods:
'leastsq': Levenberg-Marquardt (default, best for most problems)'least_squares': Trust Region Reflective with bounds supportGlobal Optimization Methods:
'differential_evolution': Stochastic global optimizer'brute': Brute force over parameter grid'basinhopping': Basin-hopping global optimizer'ampgo': Adaptive Memory Programming for Global Optimization'shgo': Simplicial Homology Global Optimization'dual_annealing': Dual annealing global optimizerLocal Scalar Methods:
'nelder': Nelder-Mead simplex'powell': Powell's method'cg': Conjugate Gradient'newton': Newton-CG'l-bfgs-b': L-BFGS-B with bounds'tnc': Truncated Newton with bounds'cobyla': Constrained Optimization BY Linear Approximation'slsqp': Sequential Least Squares Programming'bfgs': BFGS quasi-NewtonSampling Methods:
'emcee': Markov Chain Monte Carlo (requires emcee package)import numpy as np
from lmfit import minimize, Parameters
def objective(params, x, data):
"""Objective function: residual between model and data"""
a = params['a']
b = params['b']
c = params['c']
model = a * np.exp(-b * x) + c
return model - data
# Create sample data
x = np.linspace(0, 15, 301)
data = 5.0 * np.exp(-0.5 * x) + np.random.normal(size=301, scale=0.2) + 2.0
# Set up parameters
params = Parameters()
params.add('a', value=10, min=0)
params.add('b', value=1, min=0)
params.add('c', value=2)
# Perform fit
result = minimize(objective, params, args=(x, data))
print(f"Chi-squared: {result.chisqr:.3f}")
print(f"Reduced chi-squared: {result.redchi:.3f}")# Try different optimization methods
methods = ['leastsq', 'least_squares', 'differential_evolution', 'nelder']
for method in methods:
result = minimize(objective, params, method=method, args=(x, data))
print(f"{method}: chi-squared = {result.chisqr:.3f}")# Perform MCMC sampling (requires emcee)
result_mcmc = minimize(objective, params, method='emcee',
args=(x, data), steps=2000, nwalkers=50, burn=500)
# Access MCMC chain
chain = result_mcmc.flatchain
means = np.mean(chain, axis=0)
stds = np.std(chain, axis=0)
print("MCMC parameter means:", means)
print("MCMC parameter stds:", stds)from lmfit import Minimizer
def iter_callback(params, iteration, residual):
"""Callback function called at each iteration"""
print(f"Iteration {iteration}: chi-squared = {np.sum(residual**2):.3f}")
# Create minimizer with detailed control
minimizer = Minimizer(objective, params, fcn_args=(x, data),
iter_cb=iter_callback, calc_covar=True)
# Fit with specific method and options
result = minimizer.minimize(method='leastsq', xtol=1e-8, ftol=1e-8)
# Access detailed results
print(f"Function evaluations: {result.nfev}")
print(f"Success: {result.success}")
print(f"Message: {result.message}")# Global optimization for difficult problems
result_global = minimize(objective, params, method='differential_evolution',
args=(x, data), seed=123, maxiter=1000)
# Basin-hopping for multiple local minima
result_basin = minimize(objective, params, method='basinhopping',
args=(x, data), niter=100, T=1.0)Install with Tessl CLI
npx tessl i tessl/pypi-lmfit