Multi-backend deep learning framework providing a unified API for building and training neural networks across JAX, TensorFlow, PyTorch, and OpenVINO backends
—
Regularization techniques for preventing overfitting in neural networks. Regularizers add penalty terms to the loss function based on layer weights, encouraging simpler models that generalize better. Keras provides standard regularization methods including L1, L2, and orthogonal regularization.
Standard weight decay regularization techniques that penalize large weights.
class L1:
"""L1 regularization (Lasso)."""
def __init__(self, l1=0.01): ...
class L2:
"""L2 regularization (Ridge)."""
def __init__(self, l2=0.01): ...
class L1L2:
"""Combined L1 and L2 regularization (Elastic Net)."""
def __init__(self, l1=0.0, l2=0.0): ...Advanced regularization techniques for specific architectural needs.
class OrthogonalRegularizer:
"""Orthogonal regularization for weight matrices."""
def __init__(self, factor=0.01, mode='rows'): ...Base classes and utility functions for working with regularizers.
class Regularizer:
"""Base class for all regularizers."""
def __call__(self, weights): ...
def get_config(self): ...
def get(identifier):
"""Retrieve a regularizer by name or instance."""
def serialize(regularizer):
"""Serialize a regularizer to configuration."""
def deserialize(config, custom_objects=None):
"""Deserialize a regularizer from configuration."""Convenient function aliases for creating regularizers.
def l1(l1=0.01):
"""Create L1 regularizer."""
def l2(l2=0.01):
"""Create L2 regularizer."""
def l1_l2(l1=0.0, l2=0.0):
"""Create L1L2 regularizer."""
def orthogonal_regularizer(factor=0.01, mode='rows'):
"""Create orthogonal regularizer."""from keras import layers, regularizers
# L2 regularization on Dense layer
dense_layer = layers.Dense(64,
kernel_regularizer=regularizers.L2(0.01),
bias_regularizer=regularizers.L2(0.01))
# Using string identifier
dense_layer = layers.Dense(64, kernel_regularizer='l2')
# Using function form
dense_layer = layers.Dense(64, kernel_regularizer=regularizers.l2(0.001))from keras import layers, regularizers
# L1 + L2 regularization (Elastic Net)
dense_layer = layers.Dense(64,
kernel_regularizer=regularizers.L1L2(l1=0.001, l2=0.01),
activity_regularizer=regularizers.L1(0.01))
# Using function form
dense_layer = layers.Dense(64,
kernel_regularizer=regularizers.l1_l2(l1=0.001, l2=0.01))from keras import layers, regularizers
# Regularized convolutional layer
conv_layer = layers.Conv2D(32, (3, 3),
kernel_regularizer=regularizers.L2(0.001),
bias_regularizer=regularizers.L1(0.001))
# Activity regularization
conv_layer = layers.Conv2D(32, (3, 3),
activity_regularizer=regularizers.L1(0.01))from keras import layers, regularizers
# Orthogonal regularization for recurrent layers
lstm_layer = layers.LSTM(128,
kernel_regularizer=regularizers.OrthogonalRegularizer(factor=0.01),
recurrent_regularizer=regularizers.orthogonal_regularizer(0.01))
# Different modes for orthogonal regularization
dense_layer = layers.Dense(64,
kernel_regularizer=regularizers.OrthogonalRegularizer(
factor=0.01, mode='columns'))import keras
from keras import regularizers
class CustomRegularizer(regularizers.Regularizer):
def __init__(self, strength=0.01):
self.strength = strength
def __call__(self, weights):
# Custom regularization logic (e.g., group sparsity)
return self.strength * keras.ops.sum(keras.ops.sqrt(
keras.ops.sum(keras.ops.square(weights), axis=0)))
def get_config(self):
return {'strength': self.strength}
# Use custom regularizer
dense_layer = layers.Dense(64, kernel_regularizer=CustomRegularizer(0.01))from keras import layers, regularizers, models
def add_regularization(model, regularizer):
"""Add regularization to all layers in a model."""
for layer in model.layers:
if hasattr(layer, 'kernel_regularizer'):
layer.kernel_regularizer = regularizer
return model
# Apply regularization to existing model
base_model = models.Sequential([
layers.Dense(128, activation='relu'),
layers.Dense(64, activation='relu'),
layers.Dense(10, activation='softmax')
])
regularized_model = add_regularization(base_model, regularizers.L2(0.01))from keras import callbacks, regularizers
import keras
class RegularizationScheduler(callbacks.Callback):
def __init__(self, layer_name, initial_strength=0.01, decay_rate=0.1):
self.layer_name = layer_name
self.initial_strength = initial_strength
self.decay_rate = decay_rate
def on_epoch_begin(self, epoch, logs=None):
# Decay regularization strength over time
strength = self.initial_strength * (self.decay_rate ** epoch)
layer = self.model.get_layer(self.layer_name)
layer.kernel_regularizer = regularizers.L2(strength)
# Usage in training
model.fit(x_train, y_train,
callbacks=[RegularizationScheduler('dense_1', 0.01, 0.9)])from keras import layers, regularizers, models
import numpy as np
# Compare different regularization strengths
def create_model(regularizer):
return models.Sequential([
layers.Dense(128, activation='relu', kernel_regularizer=regularizer),
layers.Dense(64, activation='relu', kernel_regularizer=regularizer),
layers.Dense(10, activation='softmax')
])
# Different regularization approaches
l1_model = create_model(regularizers.L1(0.01))
l2_model = create_model(regularizers.L2(0.01))
l1l2_model = create_model(regularizers.L1L2(l1=0.01, l2=0.01))# Typical CNN regularization
conv_reg = regularizers.L2(0.0001)
dense_reg = regularizers.L2(0.001)
# Typical RNN regularization
kernel_reg = regularizers.L2(0.001)
recurrent_reg = regularizers.OrthogonalRegularizer(0.01)
# Strong regularization for small datasets
strong_reg = regularizers.L1L2(l1=0.01, l2=0.01)Install with Tessl CLI
npx tessl i tessl/pypi-keras-nightly