State-of-the-art Parameter-Efficient Fine-Tuning (PEFT) methods for efficiently adapting large pretrained models
—
Automatic model loading and task-specific PEFT model classes that provide seamless integration with Hugging Face Hub and intelligent model selection based on saved configurations and task types.
The foundational auto class for loading PEFT models from pretrained configurations.
class AutoPeftModel:
"""Base auto class for loading PEFT models."""
@classmethod
def from_pretrained(
cls,
pretrained_model_name_or_path,
adapter_name: str = "default",
is_trainable: bool = False,
config: Optional[PeftConfig] = None,
revision: Optional[str] = None,
**kwargs
):
"""
Load a PEFT model from pretrained configuration.
Args:
pretrained_model_name_or_path: Path or identifier of pretrained model
adapter_name: Name of the adapter to load
is_trainable: Whether the model should be trainable
config: Optional PEFT configuration to override saved config
revision: Revision of the model to load
**kwargs: Additional arguments passed to model loading
Returns:
Loaded PEFT model instance
"""Specialized auto classes for different NLP tasks that automatically select the appropriate model architecture.
class AutoPeftModelForCausalLM(AutoPeftModel):
"""Auto class for causal language modeling PEFT models."""
@classmethod
def from_pretrained(
cls,
pretrained_model_name_or_path,
adapter_name: str = "default",
is_trainable: bool = False,
config: Optional[PeftConfig] = None,
revision: Optional[str] = None,
**kwargs
) -> PeftModelForCausalLM:
"""
Load a PEFT model for causal language modeling.
Returns:
PeftModelForCausalLM instance
"""
class AutoPeftModelForSequenceClassification(AutoPeftModel):
"""Auto class for sequence classification PEFT models."""
@classmethod
def from_pretrained(
cls,
pretrained_model_name_or_path,
adapter_name: str = "default",
is_trainable: bool = False,
config: Optional[PeftConfig] = None,
revision: Optional[str] = None,
**kwargs
) -> PeftModelForSequenceClassification:
"""
Load a PEFT model for sequence classification.
Returns:
PeftModelForSequenceClassification instance
"""
class AutoPeftModelForTokenClassification(AutoPeftModel):
"""Auto class for token classification PEFT models."""
@classmethod
def from_pretrained(
cls,
pretrained_model_name_or_path,
adapter_name: str = "default",
is_trainable: bool = False,
config: Optional[PeftConfig] = None,
revision: Optional[str] = None,
**kwargs
) -> PeftModelForTokenClassification:
"""
Load a PEFT model for token classification.
Returns:
PeftModelForTokenClassification instance
"""
class AutoPeftModelForQuestionAnswering(AutoPeftModel):
"""Auto class for question answering PEFT models."""
@classmethod
def from_pretrained(
cls,
pretrained_model_name_or_path,
adapter_name: str = "default",
is_trainable: bool = False,
config: Optional[PeftConfig] = None,
revision: Optional[str] = None,
**kwargs
) -> PeftModelForQuestionAnswering:
"""
Load a PEFT model for question answering.
Returns:
PeftModelForQuestionAnswering instance
"""
class AutoPeftModelForSeq2SeqLM(AutoPeftModel):
"""Auto class for sequence-to-sequence language modeling PEFT models."""
@classmethod
def from_pretrained(
cls,
pretrained_model_name_or_path,
adapter_name: str = "default",
is_trainable: bool = False,
config: Optional[PeftConfig] = None,
revision: Optional[str] = None,
**kwargs
) -> PeftModelForSeq2SeqLM:
"""
Load a PEFT model for sequence-to-sequence language modeling.
Returns:
PeftModelForSeq2SeqLM instance
"""
class AutoPeftModelForFeatureExtraction(AutoPeftModel):
"""Auto class for feature extraction PEFT models."""
@classmethod
def from_pretrained(
cls,
pretrained_model_name_or_path,
adapter_name: str = "default",
is_trainable: bool = False,
config: Optional[PeftConfig] = None,
revision: Optional[str] = None,
**kwargs
) -> PeftModelForFeatureExtraction:
"""
Load a PEFT model for feature extraction.
Returns:
PeftModelForFeatureExtraction instance
"""Internal mapping that connects task types to appropriate model classes.
MODEL_TYPE_TO_PEFT_MODEL_MAPPING: dict[str, type[PeftModel]] = {
"SEQ_CLS": PeftModelForSequenceClassification,
"SEQ_2_SEQ_LM": PeftModelForSeq2SeqLM,
"CAUSAL_LM": PeftModelForCausalLM,
"TOKEN_CLS": PeftModelForTokenClassification,
"QUESTION_ANS": PeftModelForQuestionAnswering,
"FEATURE_EXTRACTION": PeftModelForFeatureExtraction,
}from peft import AutoPeftModelForCausalLM
# Load from Hugging Face Hub
model = AutoPeftModelForCausalLM.from_pretrained(
"username/my-peft-model",
adapter_name="default"
)
# Load local model
model = AutoPeftModelForCausalLM.from_pretrained(
"./path/to/saved/peft/model"
)
# Load for training
model = AutoPeftModelForCausalLM.from_pretrained(
"username/my-peft-model",
is_trainable=True
)from peft import (
AutoPeftModelForSequenceClassification,
AutoPeftModelForTokenClassification,
AutoPeftModelForQuestionAnswering
)
# Sequence classification
seq_clf_model = AutoPeftModelForSequenceClassification.from_pretrained(
"username/sentiment-peft-model"
)
# Token classification (NER)
token_clf_model = AutoPeftModelForTokenClassification.from_pretrained(
"username/ner-peft-model"
)
# Question answering
qa_model = AutoPeftModelForQuestionAnswering.from_pretrained(
"username/qa-peft-model"
)from peft import AutoPeftModelForCausalLM, LoraConfig
# Override saved configuration
custom_config = LoraConfig(
r=32, # Different rank than saved model
lora_alpha=64,
target_modules=["q_proj", "v_proj"],
task_type="CAUSAL_LM"
)
model = AutoPeftModelForCausalLM.from_pretrained(
"username/my-peft-model",
config=custom_config,
is_trainable=True
)# Load specific adapter by name
model = AutoPeftModelForCausalLM.from_pretrained(
"username/multi-adapter-model",
adapter_name="task_specific_adapter"
)
# Load multiple adapters (done after initial loading)
model.load_adapter("username/another-adapter", adapter_name="second_task")
model.set_adapter("second_task") # Switch to second adapter# Load specific revision/version
model = AutoPeftModelForCausalLM.from_pretrained(
"username/my-peft-model",
revision="v1.0"
)
# Load main branch (default)
model = AutoPeftModelForCausalLM.from_pretrained(
"username/my-peft-model",
revision="main"
)from peft import AutoPeftModelForSeq2SeqLM
from transformers import AutoTokenizer
# Load seq2seq PEFT model
model = AutoPeftModelForSeq2SeqLM.from_pretrained(
"username/translation-peft-model"
)
tokenizer = AutoTokenizer.from_pretrained("t5-base")
# Use for generation
input_text = "translate English to French: Hello world"
inputs = tokenizer(input_text, return_tensors="pt")
outputs = model.generate(
**inputs,
max_new_tokens=50,
num_beams=4,
early_stopping=True
)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)from peft import AutoPeftModelForFeatureExtraction
# Load feature extraction model
model = AutoPeftModelForFeatureExtraction.from_pretrained(
"username/embedding-peft-model"
)
# Extract features
inputs = tokenizer("Hello world", return_tensors="pt")
outputs = model(**inputs)
# Get last hidden states as features
features = outputs.last_hidden_statefrom peft import AutoPeftModelForCausalLM, PeftConfig
try:
# This will raise an error if task type doesn't match
model = AutoPeftModelForCausalLM.from_pretrained(
"username/sequence-classification-model" # Wrong task type
)
except ValueError as e:
print(f"Task type mismatch: {e}")
# Check config before loading
config = PeftConfig.from_pretrained("username/my-peft-model")
print(f"Task type: {config.task_type}")
print(f"PEFT type: {config.peft_type}")
# Load with correct auto class based on config
if config.task_type == "CAUSAL_LM":
model = AutoPeftModelForCausalLM.from_pretrained("username/my-peft-model")
elif config.task_type == "SEQ_CLS":
model = AutoPeftModelForSequenceClassification.from_pretrained("username/my-peft-model")Install with Tessl CLI
npx tessl i tessl/pypi-peft