Jupyter-friendly Python frontend for MINUIT2 C++ library for function minimization and error analysis
—
Common test functions for optimization algorithm benchmarking and validation. These functions provide standard optimization test cases with known minima for evaluating minimizer performance.
Well-known optimization test functions with documented properties and global minima.
def rosenbrock(x, y):
"""
Rosenbrock function. Minimum: f(1, 1) = 0.
The Rosenbrock function is a non-convex function used as a performance test
problem for optimization algorithms. It is introduced by Howard H. Rosenbrock
in 1960. Also known as Rosenbrock's valley or Rosenbrock's banana function.
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
float: Function value
Reference:
https://en.wikipedia.org/wiki/Rosenbrock_function
"""
def rosenbrock_grad(x, y):
"""
Gradient of Rosenbrock function.
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
Tuple[float, float]: Gradient components (df/dx, df/dy)
"""
def ackley(x, y):
"""
Ackley function. Minimum: f(0, 0) = 0.
The Ackley function is widely used for testing optimization algorithms.
It is characterized by a nearly flat outer region and a large hole at the center.
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
float: Function value
Reference:
https://en.wikipedia.org/wiki/Ackley_function
"""
def beale(x, y):
"""
Beale function. Minimum: f(3, 0.5) = 0.
The Beale function is multimodal, with sharp peaks at the corners of the
input domain.
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
float: Function value
Reference:
https://en.wikipedia.org/wiki/Test_functions_for_optimization
"""
def matyas(x, y):
"""
Matyas function. Minimum: f(0, 0) = 0.
The Matyas function has no local minima except the global one.
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
float: Function value
"""Test functions that work with N-dimensional parameter vectors.
def sphere_np(x):
"""
N-dimensional sphere function. Minimum: f([0, 0, ..., 0]) = 0.
Simple convex quadratic function, often used as a basic test case.
Args:
x: Parameter vector (array-like)
Returns:
float: Sum of squares of all parameters
"""Extended collection of optimization test functions for comprehensive benchmarking.
def goldstein_price(x, y):
"""
Goldstein-Price function. Minimum: f(0, -1) = 3.
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
float: Function value
"""
def booth(x, y):
"""
Booth function. Minimum: f(1, 3) = 0.
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
float: Function value
"""
def himmelblau(x, y):
"""
Himmelblau's function. Has four identical local minima.
Minima at:
- f(3.0, 2.0) = 0
- f(-2.805118, 3.131312) = 0
- f(-3.779310, -3.283186) = 0
- f(3.584428, -1.848126) = 0
Args:
x: First parameter (float)
y: Second parameter (float)
Returns:
float: Function value
"""from iminuit import Minuit
from iminuit.testing import rosenbrock, rosenbrock_grad
# Test Rosenbrock function minimization
m = Minuit(rosenbrock, x=0, y=0)
m.migrad()
print(f"Minimum found at: x={m.values['x']:.6f}, y={m.values['y']:.6f}")
print(f"Function value: {m.fval:.6f}")
print(f"Expected minimum: (1, 1) with f=0")# Use analytical gradient for better convergence
m_with_grad = Minuit(rosenbrock, x=0, y=0, grad=rosenbrock_grad)
m_with_grad.migrad()
print(f"With gradient - Function calls: {m_with_grad.nfcn}")
print(f"Without gradient - Function calls: {m.nfcn}")from iminuit.testing import ackley, beale, matyas
test_functions = [
(rosenbrock, (0, 0), (1, 1), 0), # (function, start, expected_min, expected_val)
(ackley, (1, 1), (0, 0), 0),
(beale, (1, 1), (3, 0.5), 0),
(matyas, (1, 1), (0, 0), 0),
]
for func, start, expected_min, expected_val in test_functions:
m = Minuit(func, x=start[0], y=start[1])
m.migrad()
print(f"\n{func.__name__}:")
print(f" Found: ({m.values['x']:.3f}, {m.values['y']:.3f}), f={m.fval:.6f}")
print(f" Expected: {expected_min}, f={expected_val}")
print(f" Converged: {m.valid}, Calls: {m.nfcn}")from iminuit.testing import sphere_np
import numpy as np
# Test N-dimensional sphere function
def sphere_5d(x1, x2, x3, x4, x5):
return sphere_np([x1, x2, x3, x4, x5])
# Start from random point
start = np.random.randn(5)
m = Minuit(sphere_5d, x1=start[0], x2=start[1], x3=start[2], x4=start[3], x5=start[4])
m.migrad()
print(f"5D sphere minimum: {list(m.values.values())}")
print(f"Function value: {m.fval:.6f}")def test_algorithm_performance(func, start_point, methods=['migrad', 'simplex']):
"""Compare different minimization algorithms."""
results = {}
for method in methods:
m = Minuit(func, x=start_point[0], y=start_point[1])
if method == 'migrad':
m.migrad()
elif method == 'simplex':
m.simplex()
results[method] = {
'minimum': (m.values['x'], m.values['y']),
'fval': m.fval,
'nfcn': m.nfcn,
'valid': m.valid
}
return results
# Test different algorithms on Rosenbrock function
results = test_algorithm_performance(rosenbrock, (-1, -1))
for method, result in results.items():
print(f"{method}: {result}")def convergence_study(func, start_points, tolerance_levels):
"""Study convergence from different starting points."""
success_rate = {}
for tol in tolerance_levels:
successes = 0
total_calls = 0
for start in start_points:
m = Minuit(func, x=start[0], y=start[1])
m.tol = tol
m.migrad()
if m.valid:
successes += 1
total_calls += m.nfcn
success_rate[tol] = {
'rate': successes / len(start_points),
'avg_calls': total_calls / len(start_points)
}
return success_rate
# Study convergence for different tolerance levels
start_points = [(0, 0), (1, 1), (-1, -1), (2, -2), (-2, 2)]
tolerances = [1e-3, 1e-4, 1e-5, 1e-6]
results = convergence_study(rosenbrock, start_points, tolerances)
for tol, result in results.items():
print(f"Tolerance {tol}: Success rate {result['rate']:.1%}, "
f"Avg calls {result['avg_calls']:.1f}")import time
def benchmark_function(func, start_point, n_runs=10):
"""Benchmark minimization performance."""
times = []
nfcn_list = []
for _ in range(n_runs):
m = Minuit(func, x=start_point[0], y=start_point[1])
start_time = time.time()
m.migrad()
end_time = time.time()
times.append(end_time - start_time)
nfcn_list.append(m.nfcn)
return {
'avg_time': np.mean(times),
'std_time': np.std(times),
'avg_nfcn': np.mean(nfcn_list),
'std_nfcn': np.std(nfcn_list)
}
# Benchmark different test functions
functions = [rosenbrock, ackley, beale]
for func in functions:
result = benchmark_function(func, (0.5, 0.5))
print(f"{func.__name__}: {result['avg_time']:.4f}±{result['std_time']:.4f}s, "
f"{result['avg_nfcn']:.1f}±{result['std_nfcn']:.1f} calls")def create_shifted_quadratic(center, scale):
"""Create a shifted and scaled quadratic function for testing."""
def shifted_quadratic(x, y):
return scale * ((x - center[0])**2 + (y - center[1])**2)
# Add metadata for testing
shifted_quadratic.minimum = center
shifted_quadratic.minimum_value = 0.0
shifted_quadratic.__name__ = f"shifted_quadratic_{center}_{scale}"
return shifted_quadratic
# Create and test custom function
custom_func = create_shifted_quadratic((2, -1), 0.5)
m = Minuit(custom_func, x=0, y=0)
m.migrad()
print(f"Custom function minimum: ({m.values['x']:.3f}, {m.values['y']:.3f})")
print(f"Expected: {custom_func.minimum}")| Function | Global Minimum | Function Value | Characteristics |
|---|---|---|---|
| Rosenbrock | (1, 1) | 0 | Non-convex, narrow curved valley |
| Ackley | (0, 0) | 0 | Many local minima, flat outer region |
| Beale | (3, 0.5) | 0 | Multimodal, sharp peaks |
| Matyas | (0, 0) | 0 | No local minima |
| Sphere | (0, ..., 0) | 0 | Convex, simple quadratic |
| Goldstein-Price | (0, -1) | 3 | Multiple local minima |
| Booth | (1, 3) | 0 | Simple quadratic with cross-term |
| Himmelblau | Multiple | 0 | Four identical global minima |
These functions provide a diverse set of optimization challenges for testing minimizer robustness, convergence speed, and accuracy.
Install with Tessl CLI
npx tessl i tessl/pypi-iminuit