Intel Extension for Scikit-learn providing hardware-accelerated implementations of scikit-learn algorithms optimized for Intel CPUs and GPUs.
—
Control functions for enabling Intel optimizations globally and managing package configuration. These functions determine how scikit-learn algorithms are accelerated and allow runtime configuration of optimization behavior.
Enable or disable Intel optimizations for all scikit-learn imports system-wide.
def patch_sklearn():
"""
Enable Intel Extension optimizations for all scikit-learn algorithms.
After calling this function, all subsequent scikit-learn imports will
automatically use Intel-optimized implementations when available.
Example:
from sklearnex import patch_sklearn
patch_sklearn()
# Now uses Intel-optimized implementation
from sklearn.ensemble import RandomForestClassifier
"""
def unpatch_sklearn():
"""
Disable Intel Extension optimizations and restore original scikit-learn.
Returns scikit-learn to its original state, using standard implementations
for all subsequent imports and operations.
"""
def sklearn_is_patched() -> bool:
"""
Check if scikit-learn is currently patched with Intel optimizations.
Returns:
bool: True if Intel optimizations are active, False otherwise
"""Query available optimizations and check implementation status.
def get_patch_map() -> dict:
"""
Get mapping of available Intel-optimized implementations.
Returns:
dict: Mapping from original sklearn classes to Intel implementations
"""
def get_patch_names() -> list:
"""
Get list of algorithm names available for Intel optimization.
Returns:
list: Names of algorithms that have Intel-accelerated versions
"""
def is_patched_instance(estimator) -> bool:
"""
Check if a specific estimator instance uses Intel optimization.
Parameters:
estimator: Scikit-learn estimator instance
Returns:
bool: True if instance uses Intel optimization, False otherwise
"""Manage runtime configuration for Intel optimizations and scikit-learn settings.
def set_config(**params):
"""
Set configuration parameters for Intel Extension and scikit-learn.
Parameters:
**params: Configuration parameters to set
"""
def get_config() -> dict:
"""
Retrieve current configuration values.
Returns:
dict: Current configuration parameters and their values
"""
def config_context(**params):
"""
Context manager for temporary configuration changes.
Parameters:
**params: Temporary configuration parameters
Example:
with config_context(assume_finite=True):
# Code with temporary configuration
model.fit(X, y)
"""from sklearnex import patch_sklearn, sklearn_is_patched
# Check initial state
print(f"Initially patched: {sklearn_is_patched()}") # False
# Enable optimizations
patch_sklearn()
print(f"After patching: {sklearn_is_patched()}") # True
# All sklearn imports now use Intel optimizations
from sklearn.cluster import KMeans
from sklearn.ensemble import RandomForestClassifier
# Create and use optimized models
kmeans = KMeans(n_clusters=3)
rf = RandomForestClassifier(n_estimators=100)from sklearnex import patch_sklearn, get_patch_names, is_patched_instance
from sklearn.ensemble import RandomForestClassifier
# Enable optimizations
patch_sklearn()
# See what's available
available_patches = get_patch_names()
print(f"Available optimizations: {available_patches}")
# Create model and check if optimized
rf = RandomForestClassifier()
print(f"Using Intel optimization: {is_patched_instance(rf)}") # Truefrom sklearnex import config_context, get_config
from sklearn.ensemble import RandomForestClassifier
# Check current config
current_config = get_config()
print(f"Current config: {current_config}")
# Temporary configuration change
with config_context(assume_finite=True):
rf = RandomForestClassifier()
# Model created with temporary configurationunpatch_sklearn()Install with Tessl CLI
npx tessl i tessl/pypi-scikit-learn-intelex