or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

blackbox.mddata.mdglassbox.mdgreybox.mdindex.mdperformance.mdprivacy.mdutils.mdvisualization.md

performance.mddocs/

0

# Performance Evaluation

1

2

Comprehensive model performance analysis tools with interactive visualizations for classification and regression tasks.

3

4

## Capabilities

5

6

### ROC Analysis

7

8

Receiver Operating Characteristic curves for binary and multi-class classification performance evaluation.

9

10

```python { .api }

11

class ROC:

12

def __init__(self, **kwargs):

13

"""ROC curve analyzer."""

14

15

def explain_perf(self, y_true, y_prob, name=None):

16

"""

17

Generate ROC curve analysis.

18

19

Parameters:

20

y_true (array-like): True binary labels

21

y_prob (array-like): Predicted probabilities

22

name (str, optional): Name for explanation

23

24

Returns:

25

Explanation object with ROC curves and AUC scores

26

"""

27

```

28

29

### Precision-Recall Analysis

30

31

Precision-Recall curves particularly useful for imbalanced datasets.

32

33

```python { .api }

34

class PR:

35

def __init__(self, **kwargs):

36

"""Precision-Recall curve analyzer."""

37

38

def explain_perf(self, y_true, y_prob, name=None):

39

"""

40

Generate Precision-Recall curve analysis.

41

42

Parameters:

43

y_true (array-like): True binary labels

44

y_prob (array-like): Predicted probabilities

45

name (str, optional): Name for explanation

46

47

Returns:

48

Explanation object with PR curves and average precision scores

49

"""

50

```

51

52

### Regression Performance

53

54

Comprehensive regression metrics including residual analysis and error distributions.

55

56

```python { .api }

57

class RegressionPerf:

58

def __init__(self, **kwargs):

59

"""Regression performance analyzer."""

60

61

def explain_perf(self, y_true, y_pred, name=None):

62

"""

63

Generate regression performance analysis.

64

65

Parameters:

66

y_true (array-like): True target values

67

y_pred (array-like): Predicted values

68

name (str, optional): Name for explanation

69

70

Returns:

71

Explanation object with regression metrics and residual plots

72

"""

73

```

74

75

## Usage Examples

76

77

```python

78

from interpret.perf import ROC, PR, RegressionPerf

79

from interpret import show

80

from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor

81

from sklearn.datasets import make_classification, make_regression

82

from sklearn.model_selection import train_test_split

83

84

# Classification performance

85

X, y = make_classification(n_samples=1000, n_classes=2, random_state=42)

86

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

87

88

clf = RandomForestClassifier(random_state=42)

89

clf.fit(X_train, y_train)

90

y_prob = clf.predict_proba(X_test)[:, 1]

91

92

# ROC analysis

93

roc = ROC()

94

roc_exp = roc.explain_perf(y_test, y_prob, name="ROC Analysis")

95

show(roc_exp)

96

97

# PR analysis

98

pr = PR()

99

pr_exp = pr.explain_perf(y_test, y_prob, name="PR Analysis")

100

show(pr_exp)

101

102

# Regression performance

103

X_reg, y_reg = make_regression(n_samples=1000, noise=0.1, random_state=42)

104

X_train_reg, X_test_reg, y_train_reg, y_test_reg = train_test_split(X_reg, y_reg, test_size=0.2, random_state=42)

105

106

reg = RandomForestRegressor(random_state=42)

107

reg.fit(X_train_reg, y_train_reg)

108

y_pred_reg = reg.predict(X_test_reg)

109

110

reg_perf = RegressionPerf()

111

reg_exp = reg_perf.explain_perf(y_test_reg, y_pred_reg, name="Regression Performance")

112

show(reg_exp)

113

```