A Python toolbox for performing gradient-free optimization with unified interfaces for optimization algorithms and parameter handling.
—
Comprehensive collection of 368+ gradient-free optimization algorithms with unified interface. Includes Evolution Strategies, Differential Evolution, Particle Swarm Optimization, Bayesian Optimization, meta-model approaches, and scipy-based methods for black-box optimization tasks.
The fundamental Optimizer base class that all optimization algorithms inherit from, providing a consistent interface for ask-tell optimization patterns.
class Optimizer:
"""
Abstract base class for all optimization algorithms.
Parameters:
- parametrization: Parameter object defining search space
- budget: Maximum number of evaluations (int, optional)
- num_workers: Number of parallel workers (int, default=1)
"""
def __init__(self, parametrization: Parameter, budget: int = None, num_workers: int = 1):
"""Initialize optimizer with parametrization and budget."""
def ask(self) -> Parameter:
"""
Get next candidate for evaluation.
Returns:
Parameter candidate for function evaluation
"""
def tell(self, candidate: Parameter, loss: float) -> None:
"""
Provide evaluation result back to optimizer.
Args:
candidate: The parameter that was evaluated
loss: The function value (to be minimized)
"""
def provide_recommendation(self) -> Parameter:
"""
Get final recommendation after optimization.
Returns:
Best parameter found during optimization
"""
def minimize(self, function: Callable) -> Parameter:
"""
Complete optimization process.
Args:
function: Function to minimize
Returns:
Best parameter found
"""
def pareto_front(self) -> List[Parameter]:
"""
Get Pareto front for multi-objective optimization.
Returns:
List of Pareto-optimal parameters
"""Centralized registry system for optimizer discovery and access, providing programmatic access to all available optimization algorithms.
registry: Registry[OptCls]
"""
Registry containing all available optimizer classes.
Usage:
optimizer_class = ng.optimizers.registry["CMA"]
optimizer = optimizer_class(parametrization, budget=100)
"""Evolution Strategy algorithms including CMA-ES variants and (1+1) Evolution Strategy for continuous optimization with adaptive step sizes.
class CMA(Optimizer):
"""Covariance Matrix Adaptation Evolution Strategy."""
class FCMA(Optimizer):
"""Fast CMA-ES variant."""
class ECMA(Optimizer):
"""Elitist CMA-ES variant."""
class DiagonalCMA(Optimizer):
"""Diagonal CMA-ES for high-dimensional problems."""
class OnePlusOne(Optimizer):
"""(1+1) Evolution Strategy."""
class DiscreteOnePlusOne(Optimizer):
"""Discrete (1+1) Evolution Strategy."""
class NoisyOnePlusOne(Optimizer):
"""Noisy (1+1) Evolution Strategy."""
class OptimisticDiscreteOnePlusOne(Optimizer):
"""Optimistic discrete (1+1) variant."""Differential Evolution algorithms with various strategies and parameter settings for global optimization.
class DE(Optimizer):
"""Standard Differential Evolution."""
class TwoPointsDE(Optimizer):
"""Two-point Differential Evolution."""
class LhsDE(Optimizer):
"""DE with Latin Hypercube Sampling initialization."""
class QrDE(Optimizer):
"""Quasi-random Differential Evolution."""
class MiniDE(Optimizer):
"""Minimal Differential Evolution."""
class MiniLhsDE(Optimizer):
"""Minimal DE with LHS initialization."""
class MiniQrDE(Optimizer):
"""Minimal quasi-random DE."""Particle Swarm Optimization algorithms with various topologies and parameter configurations for swarm-based optimization.
class PSO(Optimizer):
"""Particle Swarm Optimization."""
class QOPSO(Optimizer):
"""Quasi-Oppositional PSO."""
class SQOPSO(Optimizer):
"""Simplified Quasi-Oppositional PSO."""
class SPSO(Optimizer):
"""Standard PSO."""
class RealSpacePSO(Optimizer):
"""Real-space PSO variant."""Pure sampling methods and quasi-random sampling strategies for exploration-based optimization without learning.
class RandomSearch(Optimizer):
"""Pure random sampling."""
class HaltonSearch(Optimizer):
"""Halton sequence quasi-random sampling."""
class HammersleySearch(Optimizer):
"""Hammersley sequence sampling."""
class LHSSearch(Optimizer):
"""Latin Hypercube Sampling."""
class QORandomSearch(Optimizer):
"""Quasi-oppositional random search."""
class ScrHammersleySearch(Optimizer):
"""Scrambled Hammersley search."""
class OrthogonalSamplingSearch(Optimizer):
"""Orthogonal sampling-based search."""Wrappers for scipy.optimize algorithms providing access to classical optimization methods through the unified nevergrad interface.
class BFGS(Optimizer):
"""Broyden-Fletcher-Goldfarb-Shanno quasi-Newton method."""
class LBFGSB(Optimizer):
"""Limited-memory BFGS with bounds."""
class Powell(Optimizer):
"""Powell's conjugate direction method."""
class NelderMead(Optimizer):
"""Nelder-Mead simplex algorithm."""
class COBYLA(Optimizer):
"""Constrained optimization by linear approximation."""
class SLSQP(Optimizer):
"""Sequential Least Squares Programming."""
class TrustRegionDL(Optimizer):
"""Trust region with dogleg method."""Surrogate model-based optimization using polynomial, neural network, SVM, and random forest metamodels for expensive function evaluations.
class MetaModel(Optimizer):
"""Polynomial metamodel optimization."""
class NeuralMetaModel(Optimizer):
"""Neural network surrogate models."""
class SVMMetaModel(Optimizer):
"""Support Vector Machine metamodels."""
class RFMetaModel(Optimizer):
"""Random Forest metamodels."""
class MetaTuneRecentering(Optimizer):
"""Meta-model with recentering."""
class EvoMixDeterministic(Optimizer):
"""Evolutionary mixture with deterministic metamodel."""Bayesian optimization algorithms using Gaussian processes for sequential design and acquisition function optimization.
class BO(Optimizer):
"""Basic Bayesian Optimization."""
class PCABO(Optimizer):
"""PCA-based Bayesian Optimization."""
class BayesOptimBO(Optimizer):
"""BayesOpt library integration."""
class UltraLowBudgetBO(Optimizer):
"""Bayesian optimization for very small budgets."""
class ParametrizedBO(Optimizer):
"""Configurable Bayesian Optimization."""Multi-algorithm approaches that combine multiple optimization strategies or run algorithms in parallel for robust optimization.
class NGOpt(Optimizer):
"""Automatic algorithm selection portfolio."""
class MultiCMA(Optimizer):
"""Multiple CMA-ES instances."""
class TripleCMA(Optimizer):
"""Three parallel CMA-ES instances."""
class Portfolio(Optimizer):
"""Basic portfolio optimizer."""
class ParaPortfolio(Optimizer):
"""Parallel portfolio execution."""
class ASCMADEthird(Optimizer):
"""Adaptive selection of CMA and DE."""
class ASCMADEQRthird(Optimizer):
"""Adaptive selection with quasi-random initialization."""Multi-stage optimization approaches that combine different algorithms sequentially for improved performance.
class ChainCMAPowell(Optimizer):
"""CMA-ES followed by Powell method."""
class ChainDEwithLHS(Optimizer):
"""DE with Latin Hypercube initialization."""
class ChainNaiveTBPSACMAPowell(Optimizer):
"""Sequential TBPSA, CMA, and Powell."""
class CMAL(Optimizer):
"""CMA-ES with local search refinement."""
class CMALarge(Optimizer):
"""CMA for large-scale problems with chaining."""Integration with external optimization libraries and specialized algorithms for specific problem domains.
class HyperOpt(Optimizer):
"""Hyperopt library integration."""
class SMAC(Optimizer):
"""SMAC algorithm integration."""
class SMAC3(Optimizer):
"""SMAC3 algorithm integration."""
class AX(Optimizer):
"""Facebook Ax platform integration."""
class Optuna(Optimizer):
"""Optuna optimization framework integration."""
class PymooNSGA2(Optimizer):
"""NSGA-II multi-objective optimization."""
class PymooDEwithLHS(Optimizer):
"""Pymoo DE with Latin Hypercube Sampling."""Helper functions for optimizer analysis, comparison, and learning from optimization results.
def learn_on_k_best(archive, k: int, method: str = "polynomial") -> Callable:
"""
Meta-model learning on best candidates.
Args:
archive: Optimization archive with evaluated points
k: Number of best points to use for learning
method: Learning method ("polynomial", "neural", "svm", "rf")
Returns:
Learned model function
"""
def addCompare(name1: str, name2: str) -> None:
"""
Add optimizer comparison for benchmarking.
Args:
name1: First optimizer name
name2: Second optimizer name
"""import nevergrad as ng
# Define function to minimize
def sphere(x):
return sum(x**2)
# Set up parametrization and optimizer
param = ng.p.Array(shape=(10,))
optimizer = ng.optimizers.CMA(parametrization=param, budget=100)
# Optimization loop
for _ in range(optimizer.budget):
x = optimizer.ask()
loss = sphere(x.value)
optimizer.tell(x, loss)
# Get result
recommendation = optimizer.provide_recommendation()import nevergrad as ng
# Try different optimizers on the same problem
optimizers_to_test = [
ng.optimizers.CMA,
ng.optimizers.DE,
ng.optimizers.PSO,
ng.optimizers.NGOpt, # Automatic selection
]
param = ng.p.Array(shape=(5,))
budget = 100
results = {}
for optimizer_class in optimizers_to_test:
optimizer = optimizer_class(parametrization=param, budget=budget)
result = optimizer.minimize(sphere)
results[optimizer_class.__name__] = sphere(result.value)
print("Results:", results)def multi_objective_function(x):
# Return multiple objectives
obj1 = sum(x**2) # Minimize sphere
obj2 = sum((x - 1)**2) # Minimize distance from ones
return [obj1, obj2]
param = ng.p.Array(shape=(5,))
optimizer = ng.optimizers.CMA(parametrization=param, budget=100)
for _ in range(optimizer.budget):
x = optimizer.ask()
losses = multi_objective_function(x.value)
optimizer.tell(x, losses)
# Get Pareto front
pareto_front = optimizer.pareto_front()# Get optimizer by name
optimizer_name = "CMA"
optimizer_class = ng.optimizers.registry[optimizer_name]
optimizer = optimizer_class(parametrization=param, budget=100)
# List all available optimizers
available_optimizers = list(ng.optimizers.registry.keys())
print(f"Available optimizers: {len(available_optimizers)}")Install with Tessl CLI
npx tessl i tessl/pypi-nevergrad