SMAC3 is a versatile Bayesian Optimization package for hyperparameter optimization and algorithm configuration.
—
Lower-level components for building custom optimization workflows. These components provide fine-grained control over the optimization process and can be combined to create specialized optimization strategies.
Base interface and concrete implementations for learning from trial history to predict configuration performance.
class AbstractModel:
def __init__(
self,
configspace: ConfigurationSpace,
instance_features: dict[str, list[int | float]] | None = None,
pca_components: int | None = 7,
seed: int = 0
):
"""
Base surrogate model interface.
Parameters:
- configspace: Configuration space definition
- instance_features: Features for problem instances
- pca_components: PCA dimensionality reduction components
- seed: Random seed for reproducibility
"""
def train(self, X: np.ndarray, Y: np.ndarray) -> AbstractModel:
"""
Train model on historical data.
Parameters:
- X: Input configurations as feature matrix
- Y: Target values (costs/objectives)
Returns:
- Self for method chaining
"""
def predict(
self,
X: np.ndarray,
covariance_type: str | None = "diagonal"
) -> tuple[np.ndarray, np.ndarray | None]:
"""
Make predictions for new configurations.
Parameters:
- X: Configurations to predict
- covariance_type: Type of uncertainty estimation
Returns:
- (mean_predictions, uncertainty_estimates)
"""
def predict_marginalized(
self,
X: np.ndarray
) -> tuple[np.ndarray, np.ndarray | None]:
"""
Predict averaging over problem instances.
Parameters:
- X: Configurations to predict
Returns:
- (mean_predictions, uncertainty_estimates)
"""
@property
def meta(self) -> dict[str, Any]:
"""Model metadata and hyperparameters."""Concrete model implementations:
class RandomModel(AbstractModel):
"""Random baseline model that returns random predictions."""
class GaussianProcess(AbstractModel):
"""Gaussian process model for continuous optimization."""
class MCMCGaussianProcess(AbstractModel):
"""MCMC-based Gaussian process with integrated uncertainty."""
class RandomForest(AbstractModel):
"""Random forest model for mixed variable spaces."""
class MultiObjectiveModel(AbstractModel):
"""Wrapper for handling multiple objectives."""Functions that determine which configuration to evaluate next based on surrogate model predictions.
class AbstractAcquisitionFunction:
def __call__(self, configurations: np.ndarray) -> np.ndarray:
"""
Evaluate acquisition function for configurations.
Parameters:
- configurations: Configurations to evaluate
Returns:
- Acquisition values (higher = more promising)
"""
def update(self, model: AbstractModel, **kwargs) -> None:
"""
Update acquisition function after model training.
Parameters:
- model: Updated surrogate model
- **kwargs: Additional update parameters
"""
@property
def name(self) -> str:
"""Function name."""
@property
def model(self) -> AbstractModel | None:
"""Associated surrogate model."""
@property
def meta(self) -> dict[str, Any]:
"""Function metadata."""Concrete acquisition functions:
class EI(AbstractAcquisitionFunction):
"""Expected Improvement acquisition function."""
class EIPS(AbstractAcquisitionFunction):
"""Expected Improvement Per Second (for runtime optimization)."""
class PI(AbstractAcquisitionFunction):
"""Probability of Improvement acquisition function."""
class LCB(AbstractAcquisitionFunction):
"""Lower Confidence Bound acquisition function."""
class TS(AbstractAcquisitionFunction):
"""Thompson Sampling acquisition function."""
class PriorAcquisitionFunction(AbstractAcquisitionFunction):
"""Prior-based acquisition for initial exploration."""
class IntegratedAcquisitionFunction(AbstractAcquisitionFunction):
"""MCMC integrated acquisition function."""Optimization strategies for finding configurations that maximize acquisition functions.
class AbstractAcquisitionMaximizer:
def maximize(
self,
acquisition_function: AbstractAcquisitionFunction,
history: RunHistory,
num_points: int
) -> list[Configuration]:
"""
Find configurations that maximize acquisition function.
Parameters:
- acquisition_function: Function to maximize
- history: Historical trial data
- num_points: Number of configurations to return
Returns:
- List of promising configurations
"""Concrete maximizers:
class RandomSearch(AbstractAcquisitionMaximizer):
"""Random sampling acquisition maximizer."""
class LocalSearch(AbstractAcquisitionMaximizer):
"""Local optimization acquisition maximizer."""
class DifferentialEvolution(AbstractAcquisitionMaximizer):
"""Evolutionary algorithm acquisition maximizer."""
class LocalAndSortedRandomSearch(AbstractAcquisitionMaximizer):
"""Hybrid random/local search acquisition maximizer."""Sampling strategies for initial exploration before surrogate model training.
class AbstractInitialDesign:
def select_initial_configurations(
self,
num_configs: int,
additional_configs: list[Configuration] | None = None
) -> list[Configuration]:
"""
Generate initial configurations for exploration.
Parameters:
- num_configs: Number of configurations to generate
- additional_configs: User-provided configurations to include
Returns:
- List of initial configurations
"""Concrete initial designs:
class RandomInitialDesign(AbstractInitialDesign):
"""Random sampling initial design."""
class LatinHypercubeInitialDesign(AbstractInitialDesign):
"""Latin hypercube sampling for space-filling design."""
class SobolInitialDesign(AbstractInitialDesign):
"""Sobol sequence sampling for low-discrepancy coverage."""
class FactorialInitialDesign(AbstractInitialDesign):
"""Factorial design for systematic parameter exploration."""
class DefaultInitialDesign(AbstractInitialDesign):
"""Default configuration only (no random exploration)."""Strategies for comparing configurations and managing multi-fidelity budget allocation.
class AbstractIntensifier:
def get_next_trial(self, incumbent: Configuration | None = None) -> TrialInfo:
"""
Get next trial to execute.
Parameters:
- incumbent: Current best configuration
Returns:
- Trial information for next evaluation
"""
def update_incumbents(self, trials: list[tuple[TrialInfo, TrialValue]]) -> None:
"""
Update incumbent configurations based on new trial results.
Parameters:
- trials: List of (trial_info, trial_value) pairs
"""Concrete intensifiers:
class Intensifier(AbstractIntensifier):
"""Basic racing intensifier for configuration comparison."""
class SuccessiveHalving(AbstractIntensifier):
"""Multi-fidelity successive halving intensifier."""
class Hyperband(AbstractIntensifier):
"""Multi-fidelity Hyperband intensifier with multiple brackets."""Strategies for handling multiple optimization objectives through scalarization.
class AbstractMultiObjectiveAlgorithm:
def scalarize(self, objectives: list[float]) -> float:
"""
Convert multiple objectives to single scalar value.
Parameters:
- objectives: List of objective values
Returns:
- Scalarized value for comparison
"""Concrete multi-objective algorithms:
class MeanAggregationStrategy(AbstractMultiObjectiveAlgorithm):
"""Weighted sum scalarization strategy."""
class ParEGO(AbstractMultiObjectiveAlgorithm):
"""ParEGO scalarization with random weight vectors."""Strategies for introducing randomness during optimization to maintain exploration.
class AbstractRandomDesign:
def should_use_random_design(self, iteration: int) -> bool:
"""
Determine whether to use random sampling instead of model-based selection.
Parameters:
- iteration: Current optimization iteration
Returns:
- True if random design should be used
"""Concrete random designs:
class ProbabilityRandomDesign(AbstractRandomDesign):
"""Fixed probability random sampling."""
class ModulusRandomDesign(AbstractRandomDesign):
"""Modulus-based random sampling schedule."""
class CosineAnnealingRandomDesign(AbstractRandomDesign):
"""Cosine annealing schedule for random sampling."""from smac import AbstractFacade, Scenario
from smac.model import RandomForest
from smac.acquisition.function import EI
from smac.acquisition.maximizer import LocalAndSortedRandomSearch
from smac.initial_design import LatinHypercubeInitialDesign
from smac.intensifier import Intensifier
from smac.multi_objective import MeanAggregationStrategy
class CustomFacade(AbstractFacade):
@staticmethod
def get_model(scenario, **kwargs):
return RandomForest(
configspace=scenario.configspace,
n_trees=20, # Custom: more trees
max_depth=15, # Custom: shallower trees
**kwargs
)
@staticmethod
def get_acquisition_function(scenario, **kwargs):
return EI(xi=0.01) # Custom: lower exploration parameter
@staticmethod
def get_acquisition_maximizer(scenario, **kwargs):
return LocalAndSortedRandomSearch(
configspace=scenario.configspace,
challengers=5000, # Custom: more challengers
local_search_iterations=20 # Custom: more local search
)
@staticmethod
def get_initial_design(scenario, **kwargs):
return LatinHypercubeInitialDesign(
configspace=scenario.configspace,
n_configs=50, # Custom: more initial samples
**kwargs
)
@staticmethod
def get_intensifier(scenario, **kwargs):
return Intensifier(
scenario=scenario,
max_config_calls=5, # Custom: more evaluations per config
**kwargs
)
# Use custom facade
scenario = Scenario(configspace=config_space, n_trials=100)
custom_facade = CustomFacade(scenario, objective)
result = custom_facade.optimize()Package-wide constants used throughout SMAC3.
# Maximum integer value
MAXINT = 2**31 - 1
# Minimum cost for logarithmic scaling
MINIMAL_COST_FOR_LOG = 0.00001
# Maximum cutoff value
MAX_CUTOFF = 65535
# Numerical epsilon for computations
VERY_SMALL_NUMBER = 1e-10
# Default number of trees for random forests
N_TREES = 10Install with Tessl CLI
npx tessl i tessl/pypi-smac