CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-smac

SMAC3 is a versatile Bayesian Optimization package for hyperparameter optimization and algorithm configuration.

Pending
Overview
Eval results
Files

facades.mddocs/

Optimization Facades

High-level interfaces that pre-configure SMAC components for specific optimization scenarios. Each facade extends AbstractFacade and provides static methods for customizing individual components while maintaining sensible defaults.

Capabilities

Abstract Base Facade

Base class providing the core optimization interface used by all concrete facades.

class AbstractFacade:
    def __init__(
        self,
        scenario: Scenario,
        target_function: Callable | str | AbstractRunner,
        *,
        model: AbstractModel | None = None,
        acquisition_function: AbstractAcquisitionFunction | None = None,
        acquisition_maximizer: AbstractAcquisitionMaximizer | None = None,
        initial_design: AbstractInitialDesign | None = None,
        random_design: AbstractRandomDesign | None = None,
        intensifier: AbstractIntensifier | None = None,
        multi_objective_algorithm: AbstractMultiObjectiveAlgorithm | None = None,
        runhistory_encoder: AbstractRunHistoryEncoder | None = None,
        config_selector: ConfigSelector | None = None,
        logging_level: int | Path | Literal[False] | None = None,
        callbacks: list[Callback] = None,
        overwrite: bool = False,
        dask_client: Client | None = None
    ): ...
    
    def ask(self) -> TrialInfo:
        """Get next configuration to evaluate."""
    
    def tell(self, info: TrialInfo, value: TrialValue, save: bool = True) -> None:
        """Report trial results."""
    
    def optimize(self, *, data_to_scatter: dict[str, Any] | None = None) -> Configuration | list[Configuration]:
        """Run optimization loop."""
    
    def validate(self, config: Configuration, *, seed: int | None = None) -> float | list[float]:
        """Validate configuration."""
    
    @property
    def scenario(self) -> Scenario: ...
    
    @property
    def runhistory(self) -> RunHistory: ...
    
    @property
    def optimizer(self) -> SMBO: ...
    
    @property
    def intensifier(self) -> AbstractIntensifier: ...
    
    @property
    def meta(self) -> dict[str, Any]: ...

BlackBox Optimization

General-purpose Bayesian optimization using Gaussian processes, ideal for continuous optimization problems without categorical hyperparameters.

class BlackBoxFacade(AbstractFacade):
    @staticmethod
    def get_model(scenario, *, model_type="vanilla", kernel=None) -> AbstractGaussianProcess:
        """Configure Gaussian process model."""
    
    @staticmethod
    def get_kernel(scenario) -> kernels.Kernel:
        """Get composite kernel for Gaussian process."""
    
    @staticmethod
    def get_acquisition_function(scenario, *, xi=0.0) -> EI:
        """Configure Expected Improvement acquisition function."""
    
    @staticmethod
    def get_acquisition_maximizer(scenario, *, challengers=1000, local_search_iterations=10) -> LocalAndSortedRandomSearch:
        """Configure acquisition optimizer."""
    
    @staticmethod
    def get_intensifier(scenario, *, max_config_calls=3, max_incumbents=20) -> Intensifier:
        """Configure trial intensifier."""
    
    @staticmethod
    def get_initial_design(scenario, *, n_configs=None, n_configs_per_hyperparamter=8, max_ratio=0.25, additional_configs=None) -> SobolInitialDesign:
        """Configure Sobol sequence initial design."""
    
    @staticmethod
    def get_random_design(scenario, *, probability=0.08447232371720552) -> ProbabilityRandomDesign:
        """Configure random exploration."""
    
    @staticmethod
    def get_multi_objective_algorithm(scenario, *, objective_weights=None) -> MeanAggregationStrategy:
        """Configure multi-objective handling."""
    
    @staticmethod
    def get_runhistory_encoder(scenario) -> RunHistoryEncoder:
        """Configure feature encoding."""
    
    @staticmethod
    def get_config_selector(scenario, *, retrain_after=1, retries=16) -> ConfigSelector:
        """Configure model retraining schedule."""

Usage example:

from smac import BlackBoxFacade, Scenario
from ConfigSpace import ConfigurationSpace, Float

def objective(config):
    return config["x"]**2 + config["y"]**2

config_space = ConfigurationSpace()
config_space.add_hyperparameter(Float("x", bounds=(-5.0, 5.0)))
config_space.add_hyperparameter(Float("y", bounds=(-5.0, 5.0)))

scenario = Scenario(configspace=config_space, n_trials=100)
facade = BlackBoxFacade(scenario, objective)
incumbent = facade.optimize()

Hyperparameter Optimization

Hyperparameter optimization using random forests, suitable for mixed continuous/categorical spaces with moderate numbers of hyperparameters.

class HyperparameterOptimizationFacade(AbstractFacade):
    @staticmethod
    def get_model(scenario, *, n_trees=10, ratio_features=1.0, min_samples_split=2, min_samples_leaf=1, max_depth=2**20, bootstrapping=True) -> RandomForest:
        """Configure random forest surrogate model."""
    
    @staticmethod
    def get_acquisition_function(scenario, *, xi=0.0) -> EI:
        """Configure Expected Improvement acquisition function."""
    
    @staticmethod
    def get_acquisition_maximizer(scenario, *, challengers=10000, local_search_iterations=10) -> LocalAndSortedRandomSearch:
        """Configure acquisition optimizer with more challengers."""
    
    @staticmethod
    def get_intensifier(scenario, *, max_config_calls=3, max_incumbents=10) -> Intensifier:
        """Configure trial intensifier."""
    
    @staticmethod
    def get_initial_design(scenario, *, n_configs=None, n_configs_per_hyperparamter=10, max_ratio=0.25, additional_configs=None) -> SobolInitialDesign:
        """Configure Sobol sequence initial design."""
    
    @staticmethod
    def get_random_design(scenario, *, probability=0.2) -> ProbabilityRandomDesign:
        """Configure random exploration with higher probability."""
    
    @staticmethod
    def get_multi_objective_algorithm(scenario, *, objective_weights=None) -> MeanAggregationStrategy:
        """Configure multi-objective handling."""
    
    @staticmethod
    def get_runhistory_encoder(scenario) -> RunHistoryLogScaledEncoder:
        """Configure log-scaled feature encoding."""

Multi-Fidelity Optimization

Multi-fidelity optimization using Hyperband for efficient budget allocation, extending hyperparameter optimization with successive halving.

class MultiFidelityFacade(HyperparameterOptimizationFacade):
    @staticmethod
    def get_intensifier(scenario, *, eta=3, n_seeds=1, instance_seed_order="shuffle_once", max_incumbents=10, incumbent_selection="highest_observed_budget") -> Hyperband:
        """Configure Hyperband intensifier with successive halving."""
    
    @staticmethod
    def get_initial_design(scenario, *, n_configs=None, n_configs_per_hyperparamter=10, max_ratio=0.25, additional_configs=None) -> RandomInitialDesign:
        """Configure random initial design for multi-fidelity."""

Usage requires scenario with budget configuration:

from smac import MultiFidelityFacade, Scenario

def multi_fidelity_objective(config, seed=0, budget=1.0):
    # Use budget to control evaluation fidelity
    return expensive_evaluation(config, fidelity=budget)

scenario = Scenario(
    configspace=config_space,
    n_trials=50,
    min_budget=0.1,  # Required for multi-fidelity
    max_budget=1.0   # Required for multi-fidelity
)

facade = MultiFidelityFacade(scenario, multi_fidelity_objective)

Algorithm Configuration

Algorithm configuration for tuning algorithm parameters across multiple problem instances with intensive evaluation.

class AlgorithmConfigurationFacade(AbstractFacade):
    @staticmethod
    def get_model(scenario, *, n_trees=10, ratio_features=5.0/6.0, min_samples_split=3, min_samples_leaf=3, max_depth=20, bootstrapping=True, pca_components=4) -> RandomForest:
        """Configure random forest with PCA dimensionality reduction."""
    
    @staticmethod
    def get_acquisition_function(scenario, *, xi=0.0) -> EI:
        """Configure Expected Improvement acquisition function."""
    
    @staticmethod
    def get_acquisition_maximizer(scenario) -> LocalAndSortedRandomSearch:
        """Configure acquisition optimizer."""
    
    @staticmethod
    def get_intensifier(scenario, *, max_config_calls=2000, max_incumbents=10) -> Intensifier:
        """Configure intensive trial evaluation."""
    
    @staticmethod
    def get_initial_design(scenario, *, additional_configs=None) -> DefaultInitialDesign:
        """Configure default-only initial design."""
    
    @staticmethod
    def get_random_design(scenario, *, probability=0.5) -> ProbabilityRandomDesign:
        """Configure high random exploration."""
    
    @staticmethod
    def get_multi_objective_algorithm(scenario, *, objective_weights=None) -> MeanAggregationStrategy:
        """Configure multi-objective handling."""
    
    @staticmethod
    def get_runhistory_encoder(scenario) -> RunHistoryEncoder:
        """Configure standard feature encoding."""

Random Search Baseline

Random Online Aggressive Racing (ROAR) baseline using pure random search without surrogate models.

class RandomFacade(AbstractFacade):
    @staticmethod
    def get_model(scenario) -> RandomModel:
        """Get dummy random model."""
    
    @staticmethod
    def get_acquisition_function(scenario) -> AbstractAcquisitionFunction:
        """Get dummy acquisition function."""
    
    @staticmethod
    def get_acquisition_maximizer(scenario) -> RandomSearch:
        """Configure pure random search."""
    
    @staticmethod
    def get_intensifier(scenario, *, max_config_calls=3, max_incumbents=10) -> Intensifier:
        """Configure trial intensifier."""
    
    @staticmethod
    def get_initial_design(scenario, *, additional_configs=None) -> DefaultInitialDesign:
        """Configure default-only initial design."""
    
    @staticmethod
    def get_random_design(scenario) -> AbstractRandomDesign:
        """Get dummy random design."""
    
    @staticmethod
    def get_multi_objective_algorithm(scenario, *, objective_weights=None) -> MeanAggregationStrategy:
        """Configure multi-objective handling."""
    
    @staticmethod
    def get_runhistory_encoder(scenario) -> RunHistoryEncoder:
        """Configure standard feature encoding."""

Model-Free Hyperband

Model-free Hyperband algorithm for multi-fidelity optimization without surrogate models, combining random search with successive halving.

class HyperbandFacade(RandomFacade):
    @staticmethod
    def get_intensifier(scenario, *, eta=3, n_seeds=1, instance_seed_order="shuffle_once", max_incumbents=10, incumbent_selection="highest_observed_budget") -> Hyperband:
        """Configure Hyperband intensifier with successive halving."""

Install with Tessl CLI

npx tessl i tessl/pypi-smac

docs

advanced.md

core.md

facades.md

index.md

trials.md

tile.json