CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-stheno

Implementation of Gaussian processes in Python with support for AutoGrad, TensorFlow, PyTorch, and JAX

Overview
Eval results
Files

multi-output.mddocs/

Multi-Output Gaussian Processes

Support for multi-output Gaussian processes using specialized kernel and mean functions that handle vector-valued processes and cross-covariances between different outputs. This enables modeling of correlated time series, multi-task learning, and vector-valued function approximation.

Capabilities

Multi-Output Kernels

Kernels that handle multiple output dimensions and cross-covariances between different outputs within a vector-valued process.

class MultiOutputKernel:
    def __init__(self, measure, *ps):
        """
        Initialize multi-output kernel.
        
        Parameters:
        - measure: Measure to take kernels from
        - *ps: Processes that make up multi-valued process
        """

    measure: Measure        # Measure to take kernels from
    ps: Tuple[GP, ...]     # Processes that make up multi-valued process

    def render(self, formatter=None):
        """
        Render kernel as string with optional formatting.
        
        Parameters:
        - formatter: Optional custom formatter function
        
        Returns:
        - str: String representation of the kernel
        """

Multi-Output Means

Mean functions that handle multiple output dimensions and can compute mean values for vector-valued processes.

class MultiOutputMean:
    def __init__(self, measure, *ps):
        """
        Initialize multi-output mean.
        
        Parameters:
        - measure: Measure to take means from
        - *ps: Processes that make up multi-valued process
        """

    def __call__(self, x):
        """
        Evaluate mean at input locations.
        
        Parameters:
        - x: Input locations to evaluate at
        
        Returns:
        - Mean values for all outputs at input locations
        """

    measure: Measure        # Measure to take means from
    ps: Tuple[GP, ...]     # Processes that make up multi-valued process

    def render(self, formatter=None):
        """
        Render mean as string with optional formatting.
        
        Parameters:
        - formatter: Optional custom formatter function
        
        Returns:
        - str: String representation of the mean
        """

Ambiguous Dimensionality Kernels

Special kernel type for cases where the output dimensionality cannot be automatically inferred and must be specified explicitly.

class AmbiguousDimensionalityKernel:
    """
    Kernel whose dimensionality cannot be inferred automatically.
    Used as base class for kernels requiring explicit dimensionality specification.
    """

Utility Functions

Helper functions for working with multi-output kernels and inferring their properties.

def infer_size(kernel, x):
    """
    Infer size of kernel evaluated at input.
    
    Parameters:
    - kernel: Kernel to evaluate
    - x: Input to evaluate kernel at
    
    Returns:
    - int: Inferred size of kernel output
    """

def dimensionality(kernel):
    """
    Infer output dimensionality of kernel.
    
    Parameters:
    - kernel: Kernel to analyze
    
    Returns:
    - int: Output dimensionality of kernel
    """

Usage Examples

Basic Multi-Output GP Construction

import stheno
import numpy as np

# Create individual processes for each output
temp_gp = stheno.GP(kernel=stheno.EQ().stretch(2.0), name="temperature")
pressure_gp = stheno.GP(kernel=stheno.Matern52().stretch(1.5), name="pressure")
humidity_gp = stheno.GP(kernel=stheno.EQ().stretch(0.8), name="humidity")

# Create multi-output process using cross product
multi_gp = stheno.cross(temp_gp, pressure_gp, humidity_gp)

# Evaluate at input points
x = np.linspace(0, 10, 50)
multi_fdd = multi_gp(x)

print(f"Multi-output FDD shape: {multi_fdd.mean.shape}")  # Should be (150, 1) for 3×50

Correlated Outputs with Shared Components

# Create shared latent process
latent = stheno.GP(kernel=stheno.EQ().stretch(3.0), name="latent")

# Create correlated outputs
output1 = latent + 0.5 * stheno.GP(kernel=stheno.Matern32(), name="output1_noise")
output2 = 0.8 * latent + stheno.GP(kernel=stheno.EQ().stretch(0.5), name="output2_noise")
output3 = -0.6 * latent + stheno.GP(kernel=stheno.Linear(), name="output3_noise")

# Name the outputs
measure = stheno.Measure()
with measure:
    measure.name(output1, "output1")
    measure.name(output2, "output2") 
    measure.name(output3, "output3")

# Create multi-output process
correlated_multi = stheno.cross(output1, output2, output3)

# Sample to see correlations
x = np.linspace(0, 5, 30)
samples = correlated_multi(x).sample(3)

# Reshape to see individual outputs
n_outputs = 3
n_points = len(x)
reshaped_samples = samples.reshape(n_outputs, n_points, -1)

print(f"Output 1 correlation with Output 2: {np.corrcoef(reshaped_samples[0,:,0], reshaped_samples[1,:,0])[0,1]:.3f}")

Multi-Task Learning

# Multi-task regression with shared and task-specific components
shared_component = stheno.GP(kernel=stheno.EQ().stretch(2.0), name="shared")

# Task-specific components
task1_specific = stheno.GP(kernel=stheno.Matern52().stretch(0.5), name="task1_specific")
task2_specific = stheno.GP(kernel=stheno.Matern52().stretch(0.8), name="task2_specific") 
task3_specific = stheno.GP(kernel=stheno.Matern52().stretch(1.2), name="task3_specific")

# Combine shared and specific components
task1 = shared_component + 0.5 * task1_specific
task2 = shared_component + 0.3 * task2_specific
task3 = shared_component + 0.7 * task3_specific

# Create multi-task GP
multi_task_gp = stheno.cross(task1, task2, task3)

# Generate synthetic multi-task data
x_train = np.random.uniform(0, 5, 40)
y_train = multi_task_gp(x_train).sample()

# Condition on observations for all tasks
obs = stheno.Observations(multi_task_gp(x_train), y_train)
posterior_multi = multi_task_gp.condition(obs)

# Make predictions
x_test = np.linspace(0, 5, 50)
predictions = posterior_multi(x_test)
mean, lower, upper = predictions.marginal_credible_bounds()

# Extract predictions for each task
n_tasks = 3
pred_mean_per_task = mean.reshape(n_tasks, len(x_test))

Custom Multi-Output Kernels

# Create measure to manage multi-output relationships
measure = stheno.Measure()

# Define individual processes
process1 = stheno.GP(name="proc1")
process2 = stheno.GP(name="proc2")

with measure:
    # Add processes with custom kernel relationships
    measure.add_independent_gp(process1, stheno.ZeroMean(), stheno.EQ())
    measure.add_independent_gp(process2, stheno.ZeroMean(), stheno.Matern52())

# Create multi-output kernel
multi_kernel = stheno.MultiOutputKernel(measure, process1, process2)
multi_mean = stheno.MultiOutputMean(measure, process1, process2)

print(f"Multi-output kernel: {multi_kernel.render()}")
print(f"Multi-output mean: {multi_mean.render()}")

# Use in GP construction
combined_gp = stheno.GP(mean=multi_mean, kernel=multi_kernel, measure=measure)

Working with Different Output Dimensions

# Processes with different natural scales
small_scale = stheno.GP(kernel=stheno.EQ().stretch(0.1), name="small")
medium_scale = stheno.GP(kernel=stheno.EQ().stretch(1.0), name="medium")  
large_scale = stheno.GP(kernel=stheno.EQ().stretch(10.0), name="large")

# Multi-scale multi-output process
multi_scale = stheno.cross(small_scale, medium_scale, large_scale)

# Evaluate and check dimensionality
x = np.linspace(0, 1, 20)
fdd = multi_scale(x)

# Use dimensionality inference
kernel_dim = stheno.dimensionality(multi_scale.kernel)
inferred_size = stheno.infer_size(multi_scale.kernel, x)

print(f"Kernel dimensionality: {kernel_dim}")
print(f"Inferred size at input: {inferred_size}")
print(f"Actual FDD mean shape: {fdd.mean.shape}")

Sparse Multi-Output GPs

# Large-scale multi-output problem
n_outputs = 4
n_obs_per_output = 200
n_inducing = 30

# Create individual output processes
outputs = []
for i in range(n_outputs):
    gp = stheno.GP(
        kernel=stheno.EQ().stretch(np.random.uniform(0.5, 2.0)),
        name=f"output_{i}"
    )
    outputs.append(gp)

# Create multi-output process
multi_output = stheno.cross(*outputs)

# Generate observations
x_obs = np.random.uniform(0, 10, n_obs_per_output)
y_obs = multi_output(x_obs).sample()

# Inducing points shared across outputs
x_inducing = np.linspace(0, 10, n_inducing)
u_multi = multi_output(x_inducing)

# Create sparse observations
sparse_obs = stheno.PseudoObservations(
    u_multi,
    multi_output(x_obs),
    y_obs
)

# Condition using sparse approximation
posterior_sparse = multi_output.condition(sparse_obs)

# Evaluate ELBO for the approximation
elbo = sparse_obs.elbo(multi_output.measure)
print(f"Multi-output sparse GP ELBO: {elbo:.3f}")

Independent vs Correlated Outputs

# Independent outputs (no cross-correlations)
indep1 = stheno.GP(kernel=stheno.EQ(), name="independent1")
indep2 = stheno.GP(kernel=stheno.Matern52(), name="independent2")
independent_multi = stheno.cross(indep1, indep2)

# Correlated outputs (shared component)
shared = stheno.GP(kernel=stheno.EQ().stretch(2.0), name="shared")
corr1 = shared + 0.5 * stheno.GP(kernel=stheno.Matern32(), name="corr1_noise")
corr2 = shared + 0.3 * stheno.GP(kernel=stheno.Matern32(), name="corr2_noise")
correlated_multi = stheno.cross(corr1, corr2)

# Compare samples
x = np.linspace(0, 5, 50)

indep_samples = independent_multi(x).sample()
corr_samples = correlated_multi(x).sample()

# Reshape to analyze correlations
indep_reshaped = indep_samples.reshape(2, len(x))
corr_reshaped = corr_samples.reshape(2, len(x))

indep_correlation = np.corrcoef(indep_reshaped[0], indep_reshaped[1])[0,1]
corr_correlation = np.corrcoef(corr_reshaped[0], corr_reshaped[1])[0,1]

print(f"Independent outputs correlation: {indep_correlation:.3f}")
print(f"Correlated outputs correlation: {corr_correlation:.3f}")

Install with Tessl CLI

npx tessl i tessl/pypi-stheno

docs

core-gp.md

gp-operations.md

index.md

lazy.md

measure.md

multi-output.md

observations.md

random.md

tile.json