Multi-backend deep learning framework that provides a unified, high-level API for building and training neural networks across JAX, TensorFlow, PyTorch, and OpenVINO backends.
—
Keras provides random number generation functions for sampling from various probability distributions and performing stochastic operations. These functions support reproducible randomness through seed management.
Functions for sampling from continuous probability distributions.
def normal(shape, mean=0.0, stddev=1.0, dtype=None, seed=None):
"""
Generate random samples from a normal (Gaussian) distribution.
Parameters:
- shape: Shape of the output tensor
- mean: Mean of the normal distribution (default: 0.0)
- stddev: Standard deviation of the normal distribution (default: 1.0)
- dtype: Data type of the output (default: None)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of random samples from normal distribution
"""
def uniform(shape, minval=0.0, maxval=1.0, dtype=None, seed=None):
"""
Generate random samples from a uniform distribution.
Parameters:
- shape: Shape of the output tensor
- minval: Lower bound of the uniform distribution (default: 0.0)
- maxval: Upper bound of the uniform distribution (default: 1.0)
- dtype: Data type of the output (default: None)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of random samples from uniform distribution
"""
def truncated_normal(shape, mean=0.0, stddev=1.0, dtype=None, seed=None):
"""
Generate random samples from a truncated normal distribution.
Values more than 2 standard deviations from the mean are discarded and redrawn.
Parameters:
- shape: Shape of the output tensor
- mean: Mean of the normal distribution (default: 0.0)
- stddev: Standard deviation of the normal distribution (default: 1.0)
- dtype: Data type of the output (default: None)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of random samples from truncated normal distribution
"""
def beta(shape, alpha, beta, dtype=None, seed=None):
"""
Generate random samples from a beta distribution.
Parameters:
- shape: Shape of the output tensor
- alpha: Alpha parameter of the beta distribution
- beta: Beta parameter of the beta distribution
- dtype: Data type of the output (default: None)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of random samples from beta distribution
"""
def gamma(shape, alpha, beta=None, dtype=None, seed=None):
"""
Generate random samples from a gamma distribution.
Parameters:
- shape: Shape of the output tensor
- alpha: Shape parameter (alpha) of the gamma distribution
- beta: Rate parameter (beta) of the gamma distribution (default: None)
- dtype: Data type of the output (default: None)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of random samples from gamma distribution
"""Functions for sampling from discrete probability distributions.
def randint(shape, minval, maxval, dtype='int32', seed=None):
"""
Generate random integers from a uniform distribution.
Parameters:
- shape: Shape of the output tensor
- minval: Lower bound (inclusive) of the range
- maxval: Upper bound (exclusive) of the range
- dtype: Data type of the output (default: 'int32')
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of random integers
"""
def binomial(shape, counts, probabilities, dtype=None, seed=None):
"""
Generate random samples from binomial distributions.
Parameters:
- shape: Shape of the output tensor
- counts: Number of trials for each binomial distribution
- probabilities: Success probabilities for each trial
- dtype: Data type of the output (default: None)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of random samples from binomial distributions
"""
def categorical(logits, num_samples, dtype=None, seed=None):
"""
Generate random samples from categorical distributions.
Parameters:
- logits: 2D tensor of shape (batch_size, num_classes) with unnormalized log probabilities
- num_samples: Number of samples to draw for each distribution
- dtype: Data type of the output (default: None)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor of shape (batch_size, num_samples) with sampled class indices
"""Functions for data manipulation and stochastic operations.
def shuffle(x, axis=0, seed=None):
"""
Randomly shuffle a tensor along the specified axis.
Parameters:
- x: Input tensor to shuffle
- axis: Axis along which to shuffle (default: 0)
- seed: Random seed for reproducibility (default: None)
Returns:
Shuffled tensor with same shape as input
"""
def dropout(x, rate, noise_shape=None, seed=None):
"""
Randomly sets input units to 0 with frequency `rate` at each step during training.
Parameters:
- x: Input tensor
- rate: Fraction of input units to drop (between 0 and 1)
- noise_shape: Shape for generated random values (default: None, uses input shape)
- seed: Random seed for reproducibility (default: None)
Returns:
Tensor with same shape as input, with random units set to 0
"""Tools for managing random number generation seeds for reproducible results.
class SeedGenerator:
"""
Random seed generator for reproducible randomness across operations.
Manages seed state to ensure reproducible random number generation
while allowing for different random sequences.
Usage:
```python
seed_gen = SeedGenerator(42)
x = keras.random.normal((10, 10), seed=seed_gen)
y = keras.random.uniform((5, 5), seed=seed_gen)
```
"""
def __init__(self, seed=None):
"""
Initialize the seed generator.
Parameters:
- seed: Initial seed value (default: None for random initialization)
"""
def next(self, ordered=True):
"""
Generate the next seed value.
Parameters:
- ordered: Whether to generate seeds in deterministic order (default: True)
Returns:
Next seed value
"""
def state(self):
"""
Get the current state of the seed generator.
Returns:
Current seed generator state
"""import keras
from keras import random
# Generate random tensors from different distributions
normal_samples = random.normal((100, 10), mean=0.0, stddev=1.0)
uniform_samples = random.uniform((50, 5), minval=-1.0, maxval=1.0)
truncated_samples = random.truncated_normal((20, 3), mean=0.0, stddev=0.5)
# Discrete sampling
integers = random.randint((10, 10), minval=0, maxval=100)
coin_flips = random.binomial((100,), counts=1, probabilities=0.5)
# Categorical sampling from logits
logits = keras.ops.array([[1.0, 2.0, 3.0], [2.0, 1.0, 0.5]])
samples = random.categorical(logits, num_samples=5)import keras
from keras import random
# Using seed for reproducibility
seed = 42
x1 = random.normal((10, 10), seed=seed)
x2 = random.normal((10, 10), seed=seed) # Same as x1
# Using SeedGenerator for multiple operations with different seeds
seed_gen = random.SeedGenerator(42)
y1 = random.normal((10, 10), seed=seed_gen)
y2 = random.uniform((10, 10), seed=seed_gen) # Different from y1 but reproducible
# Reset seed generator for reproducible sequences
seed_gen = random.SeedGenerator(42)
z1 = random.normal((10, 10), seed=seed_gen)
z2 = random.uniform((10, 10), seed=seed_gen)
# z1 and z2 will be identical to y1 and y2import keras
from keras import random
def data_augmentation(x, training=True):
"""Apply random data augmentation during training."""
if training:
# Random dropout
x = random.dropout(x, rate=0.1)
# Add random noise
noise = random.normal(keras.ops.shape(x), stddev=0.01)
x = x + noise
# Random shuffle batch
x = random.shuffle(x)
return x
# Use in a custom layer
class AugmentationLayer(keras.layers.Layer):
def __init__(self, dropout_rate=0.1, noise_stddev=0.01):
super().__init__()
self.dropout_rate = dropout_rate
self.noise_stddev = noise_stddev
self.seed_gen = random.SeedGenerator()
def call(self, x, training=None):
if training:
x = random.dropout(x, self.dropout_rate, seed=self.seed_gen)
noise = random.normal(
keras.ops.shape(x),
stddev=self.noise_stddev,
seed=self.seed_gen
)
x = x + noise
return ximport keras
from keras import random
class CustomInitializer(keras.initializers.Initializer):
def __init__(self, seed=None):
self.seed = seed
def __call__(self, shape, dtype=None, **kwargs):
# Custom initialization using multiple random distributions
base = random.uniform(shape, minval=-0.1, maxval=0.1, seed=self.seed)
perturbation = random.normal(shape, stddev=0.01, seed=self.seed)
return base + perturbation
# Use custom initializer
layer = keras.layers.Dense(
64,
kernel_initializer=CustomInitializer(seed=42),
activation='relu'
)SeedGenerator for reproducible yet varied random sequencesInstall with Tessl CLI
npx tessl i tessl/pypi-keras