CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-metric-learn

Python implementations of metric learning algorithms

Pending
Overview
Eval results
Files

specialized-algorithms.mddocs/

Specialized Algorithms

Algorithms designed for specific use cases beyond standard classification metric learning, including clustering, kernel regression, and baseline methods.

Capabilities

Metric Learning for Kernel Regression (MLKR)

Learns a metric specifically optimized for kernel regression tasks. Unlike other algorithms focused on classification, MLKR optimizes the metric to improve regression performance.

class MLKR(BaseMetricLearner, TransformerMixin):
    def __init__(self, n_components=None, init='auto', tol=None, max_iter=1000, verbose=False, preprocessor=None, random_state=None):
        """
        Parameters:
        - n_components: int or None, dimensionality of transformed space
        - init: str or array-like, initialization method ('auto', 'pca', 'lda', 'identity', 'random')
        - tol: float or None, convergence tolerance
        - max_iter: int, maximum number of iterations
        - verbose: bool, whether to print progress messages
        - preprocessor: array-like or callable, preprocessor for input data
        - random_state: int, random state for reproducibility
        """
    
    def fit(self, X, y):
        """
        Fit the MLKR metric learner.
        
        Parameters:
        - X: array-like, shape=(n_samples, n_features), training data
        - y: array-like, shape=(n_samples,), continuous target values
        
        Returns:
        - self: returns the instance itself
        """
    
    def transform(self, X):
        """
        Transform data using the learned metric.
        
        Parameters:
        - X: array-like, shape=(n_samples, n_features), data to transform
        
        Returns:
        - X_transformed: array-like, shape=(n_samples, n_components), transformed data
        """

Usage example:

from metric_learn import MLKR
from sklearn.datasets import make_regression
from sklearn.model_selection import train_test_split

# Generate regression data
X, y = make_regression(n_samples=200, n_features=10, noise=0.1, random_state=42)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)

# Learn metric for regression
mlkr = MLKR(n_components=8, max_iter=100)
mlkr.fit(X_train, y_train)

# Transform data for use with kernel regression
X_train_transformed = mlkr.transform(X_train)
X_test_transformed = mlkr.transform(X_test)

Mahalanobis Metric for Clustering (MMC)

Learns a metric optimized for clustering tasks by maximizing the separation between clusters while minimizing intra-cluster distances. Particularly effective for improving k-means clustering.

class MMC(MahalanobisMixin, TransformerMixin):
    def __init__(self, init='identity', max_iter=100, max_proj=10000, convergence_threshold=1e-3,
                 num_constraints=None, diagonal=False, diagonal_c=1.0, verbose=False,
                 preprocessor=None, random_state=None):
        """
        Parameters:
        - init: str or array-like, initialization method ('identity', 'random')
        - max_iter: int, maximum number of iterations
        - max_proj: int, maximum number of gradient projection steps  
        - convergence_threshold: float, convergence threshold
        - num_constraints: int or None, number of constraints to satisfy
        - diagonal: bool, whether to learn a diagonal metric
        - diagonal_c: float, regularization for diagonal entries
        - verbose: bool, whether to print progress messages
        - preprocessor: array-like or callable, preprocessor for input data
        - random_state: int, random state for reproducibility
        """
    
    def fit(self, pairs, y):
        """
        Fit the MMC metric learner.
        
        Parameters:
        - pairs: array-like, shape=(n_constraints, 2, n_features) or (n_constraints, 2),
                3D array of pairs or 2D array of indices
        - y: array-like, shape=(n_constraints,), constraint labels (+1 for similar, -1 for dissimilar)
        
        Returns:
        - self: returns the instance itself
        """

Usage example:

from metric_learn import MMC
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans

# Generate clusterable data
X, y_true = make_blobs(n_samples=300, centers=4, n_features=5, random_state=42)

# Create constraints based on true clusters (for demonstration)
from metric_learn import Constraints
constraints = Constraints(y_true)
pos_pairs, neg_pairs = constraints.positive_negative_pairs(n_constraints=250)
pairs = np.vstack([pos_pairs, neg_pairs])
y_constraints = np.hstack([np.ones(len(pos_pairs)), -np.ones(len(neg_pairs))])

# Learn metric for clustering
mmc = MMC(preprocessor=X, max_iter=50)
mmc.fit(pairs, y_constraints)

# Use with k-means clustering
X_transformed = mmc.transform(X)
kmeans = KMeans(n_clusters=4, random_state=42)
cluster_labels = kmeans.fit_predict(X_transformed)

Covariance Baseline

A simple baseline method that uses the inverse covariance matrix as the metric. This provides a reasonable starting point and comparison baseline for other metric learning algorithms.

class Covariance(MahalanobisMixin, TransformerMixin):
    def __init__(self, preprocessor=None):
        """
        Parameters:
        - preprocessor: array-like or callable, preprocessor for input data
        """
    
    def fit(self, X, y=None):
        """
        Fit the Covariance metric learner.
        
        Parameters:
        - X: array-like, shape=(n_samples, n_features), training data
        - y: array-like, optional, not used but kept for API consistency
        
        Returns:
        - self: returns the instance itself
        """

Usage example:

from metric_learn import Covariance
from sklearn.datasets import load_iris

X, y = load_iris(return_X_y=True)

# Fit simple covariance baseline
cov = Covariance()
cov.fit(X)

# Get the learned metric (inverse covariance)
metric_matrix = cov.get_mahalanobis_matrix()
print("Metric matrix shape:", metric_matrix.shape)

# Transform data
X_transformed = cov.transform(X)

Integration with Clustering

Specialized algorithms like MMC are designed to work seamlessly with clustering algorithms:

from metric_learn import MMC, Constraints
from sklearn.cluster import KMeans, AgglomerativeClustering
from sklearn.datasets import make_blobs
from sklearn.metrics import adjusted_rand_score

# Generate data with natural clusters
X, y_true = make_blobs(n_samples=200, centers=3, n_features=4, 
                       cluster_std=1.5, random_state=42)

# Generate constraints from partial labeling
constraints = Constraints(y_true)
pos_pairs, neg_pairs = constraints.positive_negative_pairs(n_constraints=150)
pairs = np.vstack([pos_pairs, neg_pairs])
y_constraints = np.hstack([np.ones(len(pos_pairs)), -np.ones(len(neg_pairs))])

# Learn clustering-optimized metric
mmc = MMC(preprocessor=X, max_iter=100, verbose=True)
mmc.fit(pairs, y_constraints)

# Apply to different clustering algorithms
X_transformed = mmc.transform(X)

# K-means clustering
kmeans = KMeans(n_clusters=3, random_state=42)
kmeans_labels = kmeans.fit_predict(X_transformed)
print("K-means ARI:", adjusted_rand_score(y_true, kmeans_labels))

# Hierarchical clustering  
agg_clustering = AgglomerativeClustering(n_clusters=3)
agg_labels = agg_clustering.fit_predict(X_transformed)
print("Hierarchical ARI:", adjusted_rand_score(y_true, agg_labels))

Regression Integration

MLKR is specifically designed for regression tasks:

from metric_learn import MLKR
from sklearn.kernel_ridge import KernelRidge
from sklearn.datasets import make_regression
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error

# Generate regression data
X, y = make_regression(n_samples=300, n_features=8, noise=0.1, random_state=42)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)

# Learn metric for regression
mlkr = MLKR(n_components=6, max_iter=200)
mlkr.fit(X_train, y_train)

# Transform data
X_train_transformed = mlkr.transform(X_train)
X_test_transformed = mlkr.transform(X_test)

# Use with kernel regression
kernel_ridge = KernelRidge(alpha=0.1, kernel='rbf')
kernel_ridge.fit(X_train_transformed, y_train)

# Evaluate
y_pred = kernel_ridge.predict(X_test_transformed)
mse = mean_squared_error(y_test, y_pred)
print(f"MSE with MLKR transformation: {mse:.4f}")

# Compare with original features
kernel_ridge_baseline = KernelRidge(alpha=0.1, kernel='rbf')
kernel_ridge_baseline.fit(X_train, y_train)
y_pred_baseline = kernel_ridge_baseline.predict(X_test)
mse_baseline = mean_squared_error(y_test, y_pred_baseline)
print(f"MSE without transformation: {mse_baseline:.4f}")

Common Patterns

All specialized algorithms follow similar patterns but are optimized for their specific use cases:

from metric_learn import MLKR, MMC, Covariance

# For regression tasks
mlkr = MLKR(n_components=5)
mlkr.fit(X_regression, y_continuous)

# For clustering tasks (requires constraints)
mmc = MMC(preprocessor=X_clustering)
mmc.fit(pairs, y_constraints)

# For baseline comparison
cov = Covariance()
cov.fit(X_baseline)

# All provide standard metric learning interface
for algo in [mlkr, mmc, cov]:
    X_transformed = algo.transform(X)
    metric_func = algo.get_metric()
    if hasattr(algo, 'get_mahalanobis_matrix'):
        M = algo.get_mahalanobis_matrix()

Install with Tessl CLI

npx tessl i tessl/pypi-metric-learn

docs

base-classes.md

index.md

specialized-algorithms.md

supervised-algorithms.md

utilities.md

weakly-supervised-algorithms.md

tile.json