or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

advanced-methods.mdauto-classes.mdcore-models.mdindex.mdlora-methods.mdprompt-learning.mdutilities.md

auto-classes.mddocs/

0

# Auto Classes

1

2

Automatic model loading and task-specific PEFT model classes that provide seamless integration with Hugging Face Hub and intelligent model selection based on saved configurations and task types.

3

4

## Capabilities

5

6

### Base Auto Model

7

8

The foundational auto class for loading PEFT models from pretrained configurations.

9

10

```python { .api }

11

class AutoPeftModel:

12

"""Base auto class for loading PEFT models."""

13

14

@classmethod

15

def from_pretrained(

16

cls,

17

pretrained_model_name_or_path,

18

adapter_name: str = "default",

19

is_trainable: bool = False,

20

config: Optional[PeftConfig] = None,

21

revision: Optional[str] = None,

22

**kwargs

23

):

24

"""

25

Load a PEFT model from pretrained configuration.

26

27

Args:

28

pretrained_model_name_or_path: Path or identifier of pretrained model

29

adapter_name: Name of the adapter to load

30

is_trainable: Whether the model should be trainable

31

config: Optional PEFT configuration to override saved config

32

revision: Revision of the model to load

33

**kwargs: Additional arguments passed to model loading

34

35

Returns:

36

Loaded PEFT model instance

37

"""

38

```

39

40

### Task-Specific Auto Models

41

42

Specialized auto classes for different NLP tasks that automatically select the appropriate model architecture.

43

44

```python { .api }

45

class AutoPeftModelForCausalLM(AutoPeftModel):

46

"""Auto class for causal language modeling PEFT models."""

47

48

@classmethod

49

def from_pretrained(

50

cls,

51

pretrained_model_name_or_path,

52

adapter_name: str = "default",

53

is_trainable: bool = False,

54

config: Optional[PeftConfig] = None,

55

revision: Optional[str] = None,

56

**kwargs

57

) -> PeftModelForCausalLM:

58

"""

59

Load a PEFT model for causal language modeling.

60

61

Returns:

62

PeftModelForCausalLM instance

63

"""

64

65

class AutoPeftModelForSequenceClassification(AutoPeftModel):

66

"""Auto class for sequence classification PEFT models."""

67

68

@classmethod

69

def from_pretrained(

70

cls,

71

pretrained_model_name_or_path,

72

adapter_name: str = "default",

73

is_trainable: bool = False,

74

config: Optional[PeftConfig] = None,

75

revision: Optional[str] = None,

76

**kwargs

77

) -> PeftModelForSequenceClassification:

78

"""

79

Load a PEFT model for sequence classification.

80

81

Returns:

82

PeftModelForSequenceClassification instance

83

"""

84

85

class AutoPeftModelForTokenClassification(AutoPeftModel):

86

"""Auto class for token classification PEFT models."""

87

88

@classmethod

89

def from_pretrained(

90

cls,

91

pretrained_model_name_or_path,

92

adapter_name: str = "default",

93

is_trainable: bool = False,

94

config: Optional[PeftConfig] = None,

95

revision: Optional[str] = None,

96

**kwargs

97

) -> PeftModelForTokenClassification:

98

"""

99

Load a PEFT model for token classification.

100

101

Returns:

102

PeftModelForTokenClassification instance

103

"""

104

105

class AutoPeftModelForQuestionAnswering(AutoPeftModel):

106

"""Auto class for question answering PEFT models."""

107

108

@classmethod

109

def from_pretrained(

110

cls,

111

pretrained_model_name_or_path,

112

adapter_name: str = "default",

113

is_trainable: bool = False,

114

config: Optional[PeftConfig] = None,

115

revision: Optional[str] = None,

116

**kwargs

117

) -> PeftModelForQuestionAnswering:

118

"""

119

Load a PEFT model for question answering.

120

121

Returns:

122

PeftModelForQuestionAnswering instance

123

"""

124

125

class AutoPeftModelForSeq2SeqLM(AutoPeftModel):

126

"""Auto class for sequence-to-sequence language modeling PEFT models."""

127

128

@classmethod

129

def from_pretrained(

130

cls,

131

pretrained_model_name_or_path,

132

adapter_name: str = "default",

133

is_trainable: bool = False,

134

config: Optional[PeftConfig] = None,

135

revision: Optional[str] = None,

136

**kwargs

137

) -> PeftModelForSeq2SeqLM:

138

"""

139

Load a PEFT model for sequence-to-sequence language modeling.

140

141

Returns:

142

PeftModelForSeq2SeqLM instance

143

"""

144

145

class AutoPeftModelForFeatureExtraction(AutoPeftModel):

146

"""Auto class for feature extraction PEFT models."""

147

148

@classmethod

149

def from_pretrained(

150

cls,

151

pretrained_model_name_or_path,

152

adapter_name: str = "default",

153

is_trainable: bool = False,

154

config: Optional[PeftConfig] = None,

155

revision: Optional[str] = None,

156

**kwargs

157

) -> PeftModelForFeatureExtraction:

158

"""

159

Load a PEFT model for feature extraction.

160

161

Returns:

162

PeftModelForFeatureExtraction instance

163

"""

164

```

165

166

### Task Type Mapping

167

168

Internal mapping that connects task types to appropriate model classes.

169

170

```python { .api }

171

MODEL_TYPE_TO_PEFT_MODEL_MAPPING: dict[str, type[PeftModel]] = {

172

"SEQ_CLS": PeftModelForSequenceClassification,

173

"SEQ_2_SEQ_LM": PeftModelForSeq2SeqLM,

174

"CAUSAL_LM": PeftModelForCausalLM,

175

"TOKEN_CLS": PeftModelForTokenClassification,

176

"QUESTION_ANS": PeftModelForQuestionAnswering,

177

"FEATURE_EXTRACTION": PeftModelForFeatureExtraction,

178

}

179

```

180

181

## Usage Examples

182

183

### Loading a Causal LM PEFT Model

184

185

```python

186

from peft import AutoPeftModelForCausalLM

187

188

# Load from Hugging Face Hub

189

model = AutoPeftModelForCausalLM.from_pretrained(

190

"username/my-peft-model",

191

adapter_name="default"

192

)

193

194

# Load local model

195

model = AutoPeftModelForCausalLM.from_pretrained(

196

"./path/to/saved/peft/model"

197

)

198

199

# Load for training

200

model = AutoPeftModelForCausalLM.from_pretrained(

201

"username/my-peft-model",

202

is_trainable=True

203

)

204

```

205

206

### Loading Different Task Types

207

208

```python

209

from peft import (

210

AutoPeftModelForSequenceClassification,

211

AutoPeftModelForTokenClassification,

212

AutoPeftModelForQuestionAnswering

213

)

214

215

# Sequence classification

216

seq_clf_model = AutoPeftModelForSequenceClassification.from_pretrained(

217

"username/sentiment-peft-model"

218

)

219

220

# Token classification (NER)

221

token_clf_model = AutoPeftModelForTokenClassification.from_pretrained(

222

"username/ner-peft-model"

223

)

224

225

# Question answering

226

qa_model = AutoPeftModelForQuestionAnswering.from_pretrained(

227

"username/qa-peft-model"

228

)

229

```

230

231

### Loading with Custom Configuration

232

233

```python

234

from peft import AutoPeftModelForCausalLM, LoraConfig

235

236

# Override saved configuration

237

custom_config = LoraConfig(

238

r=32, # Different rank than saved model

239

lora_alpha=64,

240

target_modules=["q_proj", "v_proj"],

241

task_type="CAUSAL_LM"

242

)

243

244

model = AutoPeftModelForCausalLM.from_pretrained(

245

"username/my-peft-model",

246

config=custom_config,

247

is_trainable=True

248

)

249

```

250

251

### Loading Specific Adapters

252

253

```python

254

# Load specific adapter by name

255

model = AutoPeftModelForCausalLM.from_pretrained(

256

"username/multi-adapter-model",

257

adapter_name="task_specific_adapter"

258

)

259

260

# Load multiple adapters (done after initial loading)

261

model.load_adapter("username/another-adapter", adapter_name="second_task")

262

model.set_adapter("second_task") # Switch to second adapter

263

```

264

265

### Loading from Different Revisions

266

267

```python

268

# Load specific revision/version

269

model = AutoPeftModelForCausalLM.from_pretrained(

270

"username/my-peft-model",

271

revision="v1.0"

272

)

273

274

# Load main branch (default)

275

model = AutoPeftModelForCausalLM.from_pretrained(

276

"username/my-peft-model",

277

revision="main"

278

)

279

```

280

281

### Working with Seq2Seq Models

282

283

```python

284

from peft import AutoPeftModelForSeq2SeqLM

285

from transformers import AutoTokenizer

286

287

# Load seq2seq PEFT model

288

model = AutoPeftModelForSeq2SeqLM.from_pretrained(

289

"username/translation-peft-model"

290

)

291

292

tokenizer = AutoTokenizer.from_pretrained("t5-base")

293

294

# Use for generation

295

input_text = "translate English to French: Hello world"

296

inputs = tokenizer(input_text, return_tensors="pt")

297

298

outputs = model.generate(

299

**inputs,

300

max_new_tokens=50,

301

num_beams=4,

302

early_stopping=True

303

)

304

305

generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)

306

```

307

308

### Feature Extraction

309

310

```python

311

from peft import AutoPeftModelForFeatureExtraction

312

313

# Load feature extraction model

314

model = AutoPeftModelForFeatureExtraction.from_pretrained(

315

"username/embedding-peft-model"

316

)

317

318

# Extract features

319

inputs = tokenizer("Hello world", return_tensors="pt")

320

outputs = model(**inputs)

321

322

# Get last hidden states as features

323

features = outputs.last_hidden_state

324

```

325

326

### Error Handling and Validation

327

328

```python

329

from peft import AutoPeftModelForCausalLM, PeftConfig

330

331

try:

332

# This will raise an error if task type doesn't match

333

model = AutoPeftModelForCausalLM.from_pretrained(

334

"username/sequence-classification-model" # Wrong task type

335

)

336

except ValueError as e:

337

print(f"Task type mismatch: {e}")

338

339

# Check config before loading

340

config = PeftConfig.from_pretrained("username/my-peft-model")

341

print(f"Task type: {config.task_type}")

342

print(f"PEFT type: {config.peft_type}")

343

344

# Load with correct auto class based on config

345

if config.task_type == "CAUSAL_LM":

346

model = AutoPeftModelForCausalLM.from_pretrained("username/my-peft-model")

347

elif config.task_type == "SEQ_CLS":

348

model = AutoPeftModelForSequenceClassification.from_pretrained("username/my-peft-model")

349

```