Jupyter-friendly Python frontend for MINUIT2 C++ library for function minimization and error analysis
—
SciPy-compatible minimization interface that provides the same API as scipy.optimize.minimize. This allows easy integration with existing SciPy workflows while accessing MINUIT's robust minimization algorithms.
Main interface function compatible with scipy.optimize.minimize API.
def minimize(fun, x0, args=(), method="migrad", jac=None, hess=None, hessp=None,
bounds=None, constraints=None, tol=None, callback=None, options=None):
"""
Interface to MIGRAD using the scipy.optimize.minimize API.
This function provides the same interface as scipy.optimize.minimize. If you are
familiar with the latter, this allows you to use Minuit with a quick start.
Args:
fun: Objective function to minimize
Signature: fun(x, *args) -> float
x0: Initial parameter values (array-like)
args: Extra arguments passed to objective function (tuple, optional)
method: Minimization method ("migrad" or "simplex", default: "migrad")
jac: Gradient function (callable, bool, or None)
If callable: jac(x, *args) -> array
If True: fun returns (f, g) where g is gradient
If None: numerical gradient used
hess: Hessian function (ignored, for compatibility)
hessp: Hessian-vector product (ignored, for compatibility)
bounds: Parameter bounds (sequence of (min, max) tuples or None)
constraints: Constraints (ignored, for compatibility)
tol: Tolerance for convergence (float, optional)
callback: Callback function called after each iteration (ignored)
options: Dictionary of solver options (optional)
Returns:
OptimizeResult: Result object with attributes:
x: Final parameter values (ndarray)
fun: Final function value (float)
success: Whether optimization succeeded (bool)
status: Termination status (int)
message: Termination message (str)
nfev: Number of function evaluations (int)
nit: Number of iterations (int)
hess_inv: Inverse Hessian approximation (2D array, if available)
"""Supported options for controlling minimization behavior.
# Options dictionary keys (all optional):
options = {
"disp": bool, # Set to True to print convergence messages (default: False)
"stra": int, # Minuit strategy (0: fast, 1: balanced, 2: accurate, default: 1)
"maxfun": int, # Maximum allowed number of function evaluations (default: None)
"maxfev": int, # Deprecated alias for maxfun
"eps": float, # Initial step size for numerical derivative (default: 1)
}from iminuit import minimize
import numpy as np
# Define objective function
def rosenbrock(x):
return (1 - x[0])**2 + 100 * (x[1] - x[0]**2)**2
# Minimize using MIGRAD
result = minimize(rosenbrock, x0=[0, 0])
print(f"Success: {result.success}")
print(f"Minimum at: {result.x}")
print(f"Function value: {result.fun}")
print(f"Function evaluations: {result.nfev}")# Define bounds for parameters
bounds = [(0, 2), (-1, 3)] # x[0] in [0, 2], x[1] in [-1, 3]
result = minimize(rosenbrock, x0=[0.5, 0.5], bounds=bounds)
print(f"Bounded minimum: {result.x}")def rosenbrock_with_grad(x):
f = (1 - x[0])**2 + 100 * (x[1] - x[0]**2)**2
g = np.array([
-2 * (1 - x[0]) - 400 * x[0] * (x[1] - x[0]**2),
200 * (x[1] - x[0]**2)
])
return f, g
# Use gradient information
result = minimize(rosenbrock_with_grad, x0=[0, 0], jac=True)
print(f"With gradient - minimum at: {result.x}")def rosenbrock_grad(x):
return np.array([
-2 * (1 - x[0]) - 400 * x[0] * (x[1] - x[0]**2),
200 * (x[1] - x[0]**2)
])
result = minimize(rosenbrock, x0=[0, 0], jac=rosenbrock_grad)# Set minimization options
options = {
"disp": True, # Print convergence messages
"stra": 2, # Use accurate strategy
"maxfun": 10000, # Maximum function evaluations
"eps": 0.1 # Initial step size
}
result = minimize(rosenbrock, x0=[0, 0], options=options)# Use Simplex instead of MIGRAD
result = minimize(rosenbrock, x0=[0, 0], method="simplex")
print(f"Simplex result: {result.x}")def quadratic(x, a, b):
return a * (x[0] - 1)**2 + b * (x[1] - 2)**2
# Pass extra arguments to function
result = minimize(quadratic, x0=[0, 0], args=(2, 3))
print(f"Minimum with args: {result.x}")from scipy.optimize import minimize as scipy_minimize
from iminuit import minimize as iminuit_minimize
# Same function, different optimizers
result_scipy = scipy_minimize(rosenbrock, x0=[0, 0], method='BFGS')
result_iminuit = iminuit_minimize(rosenbrock, x0=[0, 0])
print(f"SciPy result: {result_scipy.x}, feval: {result_scipy.nfev}")
print(f"iminuit result: {result_iminuit.x}, feval: {result_iminuit.nfev}")def problematic_function(x):
if x[0] < 0:
return np.inf # Return inf for invalid regions
return (x[0] - 1)**2 + (x[1] - 2)**2
result = minimize(problematic_function, x0=[-1, 0])
if not result.success:
print(f"Optimization failed: {result.message}")
else:
print(f"Success: {result.x}")# Drop-in replacement for scipy.optimize.minimize
def my_optimization_routine(objective, initial_guess):
# This function can work with either scipy or iminuit
result = minimize(objective, initial_guess, method="migrad")
return result.x, result.fun
# Usage
best_params, best_value = my_optimization_routine(rosenbrock, [0, 0])The OptimizeResult object returned by minimize contains:
class OptimizeResult:
"""Result of minimization."""
x: np.ndarray # Final parameter values
fun: float # Final function value
success: bool # Whether optimization succeeded
status: int # Termination status code
message: str # Termination message
nfev: int # Number of function evaluations
nit: int # Number of iterations
hess_inv: np.ndarray # Inverse Hessian approximation (if available)hess, hessp, constraints, callbackThe interface is designed for easy migration from SciPy-based code while providing access to MINUIT's robust minimization algorithms.
Install with Tessl CLI
npx tessl i tessl/pypi-iminuit