CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-mlxtend

Machine Learning Library Extensions providing essential tools for day-to-day data science tasks

Pending
Overview
Eval results
Files

classification.mddocs/

Classification Algorithms

Advanced classification methods including ensemble voting, stacking, neural networks, and classic algorithms. All classifiers follow scikit-learn's fit/predict API and are compatible with scikit-learn pipelines and model selection tools.

Capabilities

Ensemble Vote Classifier

Combines multiple classifiers using majority voting (hard) or weighted average of predicted probabilities (soft voting).

class EnsembleVoteClassifier:
    def __init__(self, clfs, voting='hard', weights=None, verbose=0, 
                 use_clones=True, fit_base_estimators=True):
        """
        Ensemble classifier combining multiple base classifiers.
        
        Parameters:
        - clfs: list of sklearn-compatible classifiers
        - voting: str, 'hard' or 'soft' voting
        - weights: list, voting weights for classifiers
        - verbose: int, verbosity level
        - use_clones: bool, whether to clone base classifiers
        - fit_base_estimators: bool, whether to fit base estimators
        """
    
    def fit(self, X, y):
        """Fit ensemble classifier"""
        
    def predict(self, X):
        """Make predictions using ensemble"""
        
    def predict_proba(self, X):
        """Predict class probabilities (soft voting only)"""
        
    def transform(self, X):
        """Transform features using fitted classifiers"""
        
    def get_params(self, deep=True):
        """Get parameters for this estimator"""
        
    def set_params(self, **params):
        """Set parameters for this estimator"""

Stacking Classifier

Meta-learning ensemble that trains a meta-classifier on the predictions of base classifiers.

class StackingClassifier:
    def __init__(self, classifiers, meta_classifier, use_probas=False, 
                 average_probas=False, verbose=0, use_features_in_secondary=False):
        """
        Stacking classifier for meta-learning.
        
        Parameters:
        - classifiers: list of base classifiers
        - meta_classifier: classifier for meta-learning
        - use_probas: bool, use predicted probabilities as meta-features
        - average_probas: bool, average probabilities if use_probas=True
        - verbose: int, verbosity level
        - use_features_in_secondary: bool, include original features in meta-learning
        """
    
    def fit(self, X, y):
        """Fit stacking classifier"""
        
    def predict(self, X):
        """Make predictions using meta-classifier"""
        
    def predict_meta_features(self, X):
        """Generate meta-features from base classifiers"""
        
    def get_params(self, deep=True):
        """Get parameters for this estimator"""
        
    def set_params(self, **params):
        """Set parameters for this estimator"""

Stacking CV Classifier

Cross-validation stacking classifier that uses cross-validation to generate meta-features and avoid overfitting.

class StackingCVClassifier:
    def __init__(self, classifiers, meta_classifier, cv=2, shuffle=True, 
                 stratify=True, random_state=0, verbose=0, use_probas=False,
                 use_features_in_secondary=False, store_train_meta_features=False,
                 use_clones=True, n_jobs=1):
        """
        Cross-validation stacking classifier.
        
        Parameters:
        - classifiers: list of base classifiers
        - meta_classifier: classifier for meta-learning
        - cv: int, number of cross-validation folds
        - shuffle: bool, shuffle data before splitting
        - stratify: bool, stratified cross-validation
        - random_state: int, random state for reproducibility
        - verbose: int, verbosity level
        - use_probas: bool, use predicted probabilities as meta-features
        - use_features_in_secondary: bool, include original features
        - store_train_meta_features: bool, store training meta-features
        - use_clones: bool, clone base classifiers
        - n_jobs: int, number of parallel jobs
        """
    
    def fit(self, X, y, groups=None):
        """Fit CV stacking classifier"""
        
    def predict(self, X):
        """Make predictions using meta-classifier"""
        
    def predict_meta_features(self, X):
        """Generate meta-features using cross-validation"""
        
    def predict_proba(self, X):
        """Predict class probabilities if meta-classifier supports it"""

Multi-Layer Perceptron

Neural network with configurable hidden layers for classification tasks.

class MultiLayerPerceptron:
    def __init__(self, eta=0.5, epochs=50, hidden_layers=[50], n_classes=None,
                 momentum=0.0, l1=0.0, l2=0.0, dropout=1.0, minibatches=1,
                 random_seed=None, print_progress=0):
        """
        Multi-layer perceptron classifier.
        
        Parameters:
        - eta: float, learning rate
        - epochs: int, number of training epochs
        - hidden_layers: list, number of neurons in each hidden layer
        - n_classes: int, number of output classes
        - momentum: float, momentum parameter
        - l1: float, L1 regularization parameter
        - l2: float, L2 regularization parameter
        - dropout: float, dropout rate
        - minibatches: int, number of minibatches
        - random_seed: int, random seed
        - print_progress: int, print progress every n epochs
        """
    
    def fit(self, X, y):
        """Train the multi-layer perceptron"""
        
    def predict(self, X):
        """Make predictions"""
        
    def predict_proba(self, X):
        """Predict class probabilities"""

Logistic Regression

Binary logistic regression with L2 regularization and gradient descent optimization.

class LogisticRegression:
    def __init__(self, eta=0.01, epochs=50, l2_lambda=0.0, minibatches=1,
                 random_seed=None, print_progress=0):
        """
        Logistic regression for binary classification.
        
        Parameters:
        - eta: float, learning rate
        - epochs: int, number of training epochs
        - l2_lambda: float, L2 regularization parameter
        - minibatches: int, number of minibatches
        - random_seed: int, random seed
        - print_progress: int, print progress every n epochs
        """
    
    def fit(self, X, y):
        """Fit logistic regression model"""
        
    def predict(self, X):
        """Make binary predictions"""
        
    def predict_proba(self, X):
        """Predict class probabilities"""

Softmax Regression

Multinomial logistic regression for multi-class classification.

class SoftmaxRegression:
    def __init__(self, eta=0.01, epochs=50, l2=0.0, minibatches=1,
                 n_classes=None, random_seed=None, print_progress=0):
        """
        Softmax regression for multi-class classification.
        
        Parameters:
        - eta: float, learning rate
        - epochs: int, number of training epochs
        - l2: float, L2 regularization parameter
        - minibatches: int, number of minibatches
        - n_classes: int, number of classes
        - random_seed: int, random seed
        - print_progress: int, print progress every n epochs
        """
    
    def fit(self, X, y):
        """Fit softmax regression model"""
        
    def predict(self, X):
        """Make multi-class predictions"""
        
    def predict_proba(self, X):
        """Predict class probabilities"""

Perceptron

Classic perceptron algorithm for binary linear classification.

class Perceptron:
    def __init__(self, eta=0.01, epochs=50, random_seed=None, print_progress=0):
        """
        Perceptron classifier.
        
        Parameters:
        - eta: float, learning rate
        - epochs: int, number of training epochs
        - random_seed: int, random seed
        - print_progress: int, print progress every n epochs
        """
    
    def fit(self, X, y):
        """Fit perceptron model"""
        
    def predict(self, X):
        """Make binary predictions"""

Adaline

Adaptive Linear Neuron with gradient descent learning.

class Adaline:
    def __init__(self, eta=0.01, epochs=50, minibatches=None, random_seed=None,
                 print_progress=0):
        """
        Adaline (ADAptive LInear NEuron) classifier.
        
        Parameters:
        - eta: float, learning rate
        - epochs: int, number of training epochs
        - minibatches: int, number of minibatches for SGD
        - random_seed: int, random seed
        - print_progress: int, print progress every n epochs
        """
    
    def fit(self, X, y):
        """Fit Adaline model"""
        
    def predict(self, X):
        """Make binary predictions"""

OneR Classifier

Simple rule-based classifier that creates one rule for each predictor and selects the rule with the smallest total error.

class OneRClassifier:
    def __init__(self, resolve_ties='first'):
        """
        OneR (One Rule) classifier.
        
        Parameters:
        - resolve_ties: str, method to resolve ties ('first' or 'chi-squared')
        """
    
    def fit(self, X, y):
        """Fit OneR classifier"""
        
    def predict(self, X):
        """Make predictions using the single best rule"""

Usage Examples

Ensemble Voting Example

from mlxtend.classifier import EnsembleVoteClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from sklearn.datasets import make_classification

# Create dataset
X, y = make_classification(n_samples=1000, n_features=20, n_classes=2, random_state=42)

# Create base classifiers
clf1 = RandomForestClassifier(random_state=42)
clf2 = SVC(probability=True, random_state=42)
clf3 = LogisticRegression(random_state=42)

# Create ensemble with soft voting
ensemble = EnsembleVoteClassifier(clfs=[clf1, clf2, clf3], voting='soft')
ensemble.fit(X, y)

# Make predictions
predictions = ensemble.predict(X)
probabilities = ensemble.predict_proba(X)

Stacking Example

from mlxtend.classifier import StackingCVClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from sklearn.datasets import make_classification

# Create dataset
X, y = make_classification(n_samples=1000, n_features=20, n_classes=2, random_state=42)

# Create base and meta classifiers
clf1 = RandomForestClassifier(random_state=42)
clf2 = SVC(probability=True, random_state=42)
meta_clf = LogisticRegression(random_state=42)

# Create stacking classifier with cross-validation
stacking = StackingCVClassifier(classifiers=[clf1, clf2], 
                               meta_classifier=meta_clf,
                               use_probas=True, cv=5)
stacking.fit(X, y)

# Make predictions
predictions = stacking.predict(X)

Install with Tessl CLI

npx tessl i tessl/pypi-mlxtend

docs

classification.md

clustering.md

datasets.md

evaluation.md

feature-engineering.md

file-io.md

index.md

math-utils.md

pattern-mining.md

plotting.md

preprocessing.md

regression.md

text-processing.md

utilities.md

tile.json