Orange, a component-based data mining framework.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Orange3 provides a comprehensive collection of supervised learning algorithms for categorical prediction tasks. All classifiers follow a consistent pattern: create a learner, then call it with training data to produce a trained model.
Tree-based classification algorithms that create interpretable decision rules.
class TreeLearner:
"""
Decision tree classifier.
Args:
criterion: Split criterion ('gini', 'entropy')
max_depth: Maximum tree depth
min_samples_split: Minimum samples required to split
min_samples_leaf: Minimum samples in leaf nodes
"""
def __init__(self, criterion=None, max_depth=None,
min_samples_split=2, min_samples_leaf=1): ...
def __call__(self, data):
"""Train and return classification model."""
class SimpleTreeLearner:
"""Fast, simplified decision tree implementation."""
def __init__(self, max_depth=None, min_samples_split=5): ...
def __call__(self, data):
"""Train and return tree model."""Linear models for classification with probabilistic outputs.
class LogisticRegressionLearner:
"""
Logistic regression classifier.
Args:
penalty: Regularization type ('l1', 'l2', 'elasticnet')
C: Inverse regularization strength
solver: Optimization algorithm
"""
def __init__(self, penalty='l2', C=1.0, solver='lbfgs'): ...
def __call__(self, data):
"""Train and return logistic regression model."""
class SoftmaxRegressionLearner:
"""Multi-class logistic regression."""
def __init__(self, penalty='l2', C=1.0): ...
def __call__(self, data):
"""Train and return softmax regression model."""SVM-based classification with various kernel options.
class SVMLearner:
"""
Support Vector Machine classifier.
Args:
kernel: Kernel type ('linear', 'poly', 'rbf', 'sigmoid')
C: Regularization parameter
gamma: Kernel coefficient
degree: Polynomial kernel degree
"""
def __init__(self, kernel='rbf', C=1.0, gamma='scale', degree=3): ...
def __call__(self, data):
"""Train and return SVM model."""
class LinearSVMLearner:
"""Linear Support Vector Machine."""
def __init__(self, C=1.0, dual=True): ...
def __call__(self, data):
"""Train and return linear SVM model."""
class NuSVMLearner:
"""Nu-Support Vector Machine."""
def __init__(self, nu=0.5, kernel='rbf', gamma='scale'): ...
def __call__(self, data):
"""Train and return Nu-SVM model."""Ensemble algorithms that combine multiple learners for improved performance.
class RandomForestLearner:
"""
Random Forest classifier.
Args:
n_estimators: Number of trees
max_depth: Maximum tree depth
max_features: Number of features per tree
bootstrap: Use bootstrap sampling
"""
def __init__(self, n_estimators=10, max_depth=None,
max_features='sqrt', bootstrap=True): ...
def __call__(self, data):
"""Train and return random forest model."""
class SimpleRandomForestLearner:
"""Optimized random forest implementation."""
def __init__(self, n_estimators=10, max_depth=3): ...
def __call__(self, data):
"""Train and return simple random forest model."""
class GBClassifier:
"""Gradient Boosting classifier."""
def __init__(self, n_estimators=100, learning_rate=0.1, max_depth=3): ...
def __call__(self, data):
"""Train and return gradient boosting model."""Algorithms based on probabilistic modeling.
class NaiveBayesLearner:
"""
Naive Bayes classifier.
"""
def __call__(self, data):
"""Train and return Naive Bayes model."""k-Nearest Neighbors and related algorithms.
class KNNLearner:
"""
k-Nearest Neighbors classifier.
Args:
n_neighbors: Number of neighbors
metric: Distance metric
weights: Weight function ('uniform', 'distance')
"""
def __init__(self, n_neighbors=5, metric='euclidean', weights='uniform'): ...
def __call__(self, data):
"""Train and return k-NN model."""Multi-layer perceptron classifiers.
class NNClassificationLearner:
"""
Neural network classifier.
Args:
hidden_layer_sizes: Tuple of hidden layer sizes
activation: Activation function
solver: Optimization solver
learning_rate_init: Initial learning rate
"""
def __init__(self, hidden_layer_sizes=(100,), activation='relu',
solver='adam', learning_rate_init=0.001): ...
def __call__(self, data):
"""Train and return neural network model."""Stochastic gradient descent-based classifiers.
class SGDClassificationLearner:
"""
Stochastic Gradient Descent classifier.
Args:
loss: Loss function ('hinge', 'log', 'perceptron')
penalty: Regularization ('l1', 'l2', 'elasticnet')
alpha: Regularization strength
"""
def __init__(self, loss='hinge', penalty='l2', alpha=0.0001): ...
def __call__(self, data):
"""Train and return SGD model."""Simple baseline algorithms for comparison.
class MajorityLearner:
"""Always predicts the majority class."""
def __call__(self, data):
"""Train and return majority class model."""Rule induction algorithms.
class CN2Learner:
"""
CN2 rule learning algorithm.
Args:
rule_finder: Rule finding strategy
quality_evaluator: Rule quality measure
"""
def __init__(self, rule_finder=None, quality_evaluator=None): ...
def __call__(self, data):
"""Train and return CN2 rule model."""
class CN2UnorderedLearner:
"""CN2 algorithm producing unordered rules."""
def __init__(self): ...
def __call__(self, data):
"""Train and return unordered CN2 model."""Algorithms for identifying anomalous instances.
class LocalOutlierFactorLearner:
"""
Local Outlier Factor for outlier detection.
Args:
n_neighbors: Number of neighbors
contamination: Expected proportion of outliers
"""
def __init__(self, n_neighbors=20, contamination=0.1): ...
def __call__(self, data):
"""Train and return LOF model."""
class IsolationForestLearner:
"""Isolation Forest for outlier detection."""
def __init__(self, n_estimators=100, contamination=0.1): ...
def __call__(self, data):
"""Train and return isolation forest model."""
class OneClassSVMLearner:
"""One-class SVM for outlier detection."""
def __init__(self, kernel='rbf', gamma='scale', nu=0.05): ...
def __call__(self, data):
"""Train and return one-class SVM model."""Probability calibration for better confidence estimates.
class CalibratedLearner:
"""
Calibrated classifier for better probability estimates.
Args:
base_learner: Base classification algorithm
method: Calibration method ('sigmoid', 'isotonic')
"""
def __init__(self, base_learner, method='sigmoid'): ...
def __call__(self, data):
"""Train and return calibrated model."""
class ThresholdLearner:
"""Threshold-based binary classifier."""
def __init__(self, threshold=0.5): ...
def __call__(self, data):
"""Train and return threshold model."""# Basic classification workflow
from Orange.data import Table
from Orange.classification import TreeLearner, LogisticRegressionLearner
from Orange.evaluation import CrossValidation, CA
# Load data
data = Table("iris")
# Create learners
tree = TreeLearner(max_depth=5)
logistic = LogisticRegressionLearner(C=1.0)
# Train models
tree_model = tree(data)
logistic_model = logistic(data)
# Make predictions
predictions = tree_model(data[:5])
probabilities = logistic_model(data[:5], logistic_model.Probs)
# Evaluate with cross-validation
results = CrossValidation(data, [tree, logistic], k=10)
accuracies = CA(results)
print(f"Tree accuracy: {accuracies[0]:.3f}")
print(f"Logistic accuracy: {accuracies[1]:.3f}")
# Ensemble example
from Orange.classification import RandomForestLearner
rf = RandomForestLearner(n_estimators=50, max_depth=10)
rf_model = rf(data)
# Rule learning example
from Orange.classification import CN2Learner
cn2 = CN2Learner()
rule_model = cn2(data)Install with Tessl CLI
npx tessl i tessl/pypi-orange3