A Python toolbox for performing gradient-free optimization with unified interfaces for optimization algorithms and parameter handling.
npx @tessl/cli install tessl/pypi-nevergrad@1.0.0A comprehensive Python toolbox for performing gradient-free optimization. Nevergrad provides a unified interface for black-box optimization with extensive support for parameter types, optimization algorithms, and monitoring capabilities. It handles complex parametrization including continuous, discrete, and mixed variables with advanced features like log-distributed parameters, categorical choices, and hierarchical parameter structures.
pip install nevergradimport nevergrad as ngCommon usage patterns:
# Parametrization
import nevergrad as ng
param = ng.p.Array(shape=(10,))
# Optimization
optimizer = ng.optimizers.CMA(parametrization=param, budget=100)
# Operations (constraints, mutations, integer casting)
from nevergrad import ops
# Functions and benchmarking
from nevergrad.functions import ArtificialFunction
from nevergrad.benchmark import Experiment
# Error handling
from nevergrad import errorsimport nevergrad as ng
import numpy as np
# Define the function to optimize (minimize)
def sphere(x):
return sum(x**2)
# Create parametrization - array of 10 floats
parametrization = ng.p.Array(shape=(10,))
# Choose optimizer and budget
optimizer = ng.optimizers.CMA(parametrization=parametrization, budget=100)
# Optimization loop
for _ in range(optimizer.budget):
x = optimizer.ask() # Get candidate
loss = sphere(x.value) # Evaluate function
optimizer.tell(x, loss) # Tell optimizer the result
# Get recommendation
recommendation = optimizer.provide_recommendation()
print(f"Best point: {recommendation.value}")
print(f"Best loss: {sphere(recommendation.value)}")Nevergrad follows a layered architecture with clear separation of concerns:
ng.p moduleng.optimizersng.familiesng.callbacksng.opsng.typingng.errorsThis design enables maximum flexibility and extensibility for machine learning hyperparameter optimization, neural architecture search, automated algorithm configuration, and general scientific optimization tasks.
Comprehensive parameter handling supporting scalar values, arrays, discrete choices, hierarchical structures, and constraints. Includes data transformations, bounds handling, mutation strategies, and constraint satisfaction.
# Core parameter types
class Parameter:
value: Any
dimension: int
def mutate(self) -> None: ...
def sample(self) -> 'Parameter': ...
class Array(Parameter): ...
class Scalar(Parameter): ...
class Choice(Parameter): ...
class Dict(Parameter): ...
class Tuple(Parameter): ...368+ registered optimization algorithms including Evolution Strategies, Differential Evolution, Particle Swarm Optimization, Bayesian Optimization, meta-model approaches, and scipy-based methods. Provides unified interface for all algorithms.
class Optimizer:
def __init__(self, parametrization, budget=None, num_workers=1): ...
def ask(self) -> Parameter: ...
def tell(self, candidate: Parameter, loss: float) -> None: ...
def minimize(self, function) -> Parameter: ...
def provide_recommendation(self) -> Parameter: ...Parametrizable optimizer configurations enabling algorithm customization and automated hyperparameter tuning. Provides factory patterns for creating specialized optimizer variants.
class ParametrizedCMA: ...
class ParametrizedBO: ...
class DifferentialEvolution: ...
class Chaining: ...Comprehensive callback system for optimization monitoring, logging, progress tracking, early stopping, and state persistence during optimization runs.
class OptimizationPrinter: ...
class OptimizationLogger: ...
class ProgressBar: ...
class EarlyStopping: ...Rich type system with protocol definitions and comprehensive error handling for robust optimization workflows.
# Key types
ArrayLike = Union[Tuple[float, ...], List[float], np.ndarray]
Loss = Union[float, ArrayLike]
# Error handling
class NevergradError(Exception): ...
class NevergradRuntimeError(NevergradError): ...Specialized parameter operations including constraint handling, mutation operators for evolutionary algorithms, and parameter transformations for discrete optimization.
class Constraint:
def __init__(self, func, optimizer="NGOpt", budget=100): ...
def __call__(self, parameter): ...
class Mutation:
def __call__(self, parameter, inplace=False): ...
class Crossover(Mutation): ...
class Translation(Mutation): ...
def Int(deterministic=True): ...Comprehensive benchmarking framework with 37+ artificial test functions and systematic experiment management for optimizer evaluation and comparison.
class ArtificialFunction:
def __init__(self, name, block_dimension, **config): ...
def __call__(self, x) -> float: ...
class Experiment:
def __init__(self, function, optimizer, budget, **params): ...
def run(self) -> Parameter: ...
class ExperimentFunction:
def __init__(self, function, parametrization): ...