3D transformations for Python with comprehensive rotation representations, coordinate conversions, and visualization tools
—
Operations on uncertain transformations with statistical estimation, covariance propagation, and sensor fusion for handling measurement uncertainties in 3D transformations.
Functions for propagating uncertainty through transformation operations.
def concat_globally_uncertain_transforms(A2B, cov_A2B, B2C, cov_B2C):
"""
Concatenate transformations with global uncertainty propagation.
Parameters:
- A2B: array, shape (4, 4) - First transformation
- cov_A2B: array, shape (6, 6) - Covariance of first transformation
- B2C: array, shape (4, 4) - Second transformation
- cov_B2C: array, shape (6, 6) - Covariance of second transformation
Returns:
- A2C: array, shape (4, 4) - Composed transformation
- cov_A2C: array, shape (6, 6) - Propagated covariance
"""
def concat_locally_uncertain_transforms(A2B, cov_A2B, B2C, cov_B2C):
"""
Concatenate transformations with local uncertainty propagation.
Parameters:
- A2B: array, shape (4, 4) - First transformation
- cov_A2B: array, shape (6, 6) - Local covariance of first transformation
- B2C: array, shape (4, 4) - Second transformation
- cov_B2C: array, shape (6, 6) - Local covariance of second transformation
Returns:
- A2C: array, shape (4, 4) - Composed transformation
- cov_A2C: array, shape (6, 6) - Propagated local covariance
"""
def invert_uncertain_transform(A2B, cov_A2B):
"""
Invert uncertain transformation with covariance propagation.
Parameters:
- A2B: array, shape (4, 4) - Transformation matrix
- cov_A2B: array, shape (6, 6) - Covariance matrix
Returns:
- B2A: array, shape (4, 4) - Inverted transformation
- cov_B2A: array, shape (6, 6) - Propagated covariance
"""Functions for estimating mean transformations from samples with uncertainty quantification.
def frechet_mean(poses, weights=None, init_pose=None, max_iter=100, tol=1e-9):
"""
Compute Fréchet mean of pose samples.
Parameters:
- poses: array, shape (n_poses, 4, 4) - Transformation samples
- weights: array, shape (n_poses,), optional - Sample weights
- init_pose: array, shape (4, 4), optional - Initial estimate
- max_iter: int - Maximum iterations
- tol: float - Convergence tolerance
Returns:
- mean_pose: array, shape (4, 4) - Mean transformation
"""
def estimate_gaussian_transform_from_samples(A2Bs, max_iter=100):
"""
Estimate Gaussian distribution parameters from transformation samples.
Parameters:
- A2Bs: array, shape (n_samples, 4, 4) - Transformation samples
- max_iter: int - Maximum iterations for convergence
Returns:
- mean_A2B: array, shape (4, 4) - Mean transformation
- cov_A2B: array, shape (6, 6) - Covariance matrix
"""
def estimate_gaussian_rotation_matrix_from_samples(Rs, max_iter=100):
"""
Estimate mean rotation and covariance from rotation matrix samples.
Parameters:
- Rs: array, shape (n_samples, 3, 3) - Rotation matrix samples
- max_iter: int - Maximum iterations
Returns:
- mean_R: array, shape (3, 3) - Mean rotation matrix
- cov_R: array, shape (3, 3) - Rotation covariance
"""Multi-sensor pose fusion with uncertainty-weighted combination.
def pose_fusion(poses, covariances, check_inputs=True):
"""
Fuse multiple pose estimates with uncertainties.
Parameters:
- poses: list of arrays, shape (4, 4) - Pose estimates
- covariances: list of arrays, shape (6, 6) - Covariance matrices
- check_inputs: bool - Validate input poses and covariances
Returns:
- fused_pose: array, shape (4, 4) - Fused pose estimate
- fused_cov: array, shape (6, 6) - Fused covariance
"""Functions for visualizing uncertainty as ellipsoids and projections.
def to_ellipsoid(mean, cov, n_steps=20):
"""
Convert covariance matrix to ellipsoid surface points.
Parameters:
- mean: array, shape (3,) - Mean position
- cov: array, shape (3, 3) - Position covariance matrix
- n_steps: int - Number of surface discretization steps
Returns:
- ellipsoid: array, shape (n_steps**2, 3) - Ellipsoid surface points
"""
def to_projected_ellipsoid(mean, cov, n_steps=20):
"""
Project 3D uncertainty ellipsoid to 2D plane.
Parameters:
- mean: array, shape (3,) - Mean position
- cov: array, shape (3, 3) - Position covariance
- n_steps: int - Discretization steps
Returns:
- projection: array, shape (n_steps, 2) - 2D ellipse points
"""
def plot_projected_ellipsoid(ax, mean, cov, n_steps=20, **kwargs):
"""
Plot projected uncertainty ellipsoid on 2D axis.
Parameters:
- ax: matplotlib axis - 2D plotting axis
- mean: array, shape (3,) - Mean position
- cov: array, shape (3, 3) - Position covariance
- n_steps: int - Ellipse discretization
"""import numpy as np
import pytransform3d.uncertainty as pu
import pytransform3d.transformations as pt
# Create two uncertain transformations
T1 = pt.transform_from(p=[1, 0, 0])
cov1 = np.diag([0.01, 0.01, 0.01, 0.001, 0.001, 0.001]) # [translation, rotation] variances
T2 = pt.transform_from(p=[0, 1, 0])
cov2 = np.diag([0.02, 0.02, 0.02, 0.002, 0.002, 0.002])
# Compose with uncertainty propagation
T_composed, cov_composed = pu.concat_globally_uncertain_transforms(T1, cov1, T2, cov2)
print(f"Composed transformation:\n{T_composed}")
print(f"Propagated uncertainty diagonal: {np.diag(cov_composed)}")import numpy as np
import pytransform3d.uncertainty as pu
import pytransform3d.transformations as pt
# Simulate multiple pose measurements
true_pose = pt.transform_from(p=[2, 1, 0.5])
# Sensor 1: High precision in position, low precision in orientation
pose1 = true_pose + np.random.normal(0, 0.01, (4, 4))
pose1 = pt.transform_from(p=pose1[:3, 3]) # Extract position
cov1 = np.diag([0.01, 0.01, 0.01, 0.1, 0.1, 0.1])
# Sensor 2: Low precision in position, high precision in orientation
pose2 = true_pose + np.random.normal(0, 0.05, (4, 4))
pose2 = pt.transform_from(p=pose2[:3, 3])
cov2 = np.diag([0.05, 0.05, 0.05, 0.01, 0.01, 0.01])
# Sensor 3: Medium precision overall
pose3 = true_pose + np.random.normal(0, 0.02, (4, 4))
pose3 = pt.transform_from(p=pose3[:3, 3])
cov3 = np.diag([0.02, 0.02, 0.02, 0.02, 0.02, 0.02])
# Fuse measurements
poses = [pose1, pose2, pose3]
covariances = [cov1, cov2, cov3]
fused_pose, fused_cov = pu.pose_fusion(poses, covariances)
print(f"True position: {true_pose[:3, 3]}")
print(f"Fused position: {fused_pose[:3, 3]}")
print(f"Position error: {np.linalg.norm(fused_pose[:3, 3] - true_pose[:3, 3]):.4f}")
print(f"Fused uncertainty (diagonal): {np.diag(fused_cov)}")import numpy as np
import pytransform3d.uncertainty as pu
import pytransform3d.transformations as pt
# Generate noisy pose samples around true pose
true_pose = pt.transform_from(p=[1, 2, 3])
n_samples = 100
pose_samples = []
for _ in range(n_samples):
# Add noise to position and orientation
noise_pos = np.random.normal(0, 0.1, 3)
noise_rot = np.random.normal(0, 0.05, 3)
# Create noisy pose
T_noise = pt.transform_from(p=noise_pos)
R_noise = pr.matrix_from_euler(noise_rot, "xyz", extrinsic=True)
T_noise[:3, :3] = R_noise
noisy_pose = pt.concat(true_pose, T_noise)
pose_samples.append(noisy_pose)
pose_samples = np.array(pose_samples)
# Estimate mean and covariance
mean_pose, cov_estimate = pu.estimate_gaussian_transform_from_samples(pose_samples)
print(f"True position: {true_pose[:3, 3]}")
print(f"Estimated position: {mean_pose[:3, 3]}")
print(f"Position error: {np.linalg.norm(mean_pose[:3, 3] - true_pose[:3, 3]):.4f}")
print(f"Estimated covariance diagonal: {np.diag(cov_estimate)}")import numpy as np
import matplotlib.pyplot as plt
import pytransform3d.uncertainty as pu
# Define uncertain position
mean_pos = np.array([1, 2, 0])
cov_pos = np.array([[0.1, 0.05, 0.02],
[0.05, 0.2, 0.01],
[0.02, 0.01, 0.05]])
# Generate ellipsoid surface
ellipsoid_points = pu.to_ellipsoid(mean_pos, cov_pos, n_steps=20)
# 3D visualization
fig = plt.figure(figsize=(12, 5))
# 3D ellipsoid
ax1 = fig.add_subplot(121, projection='3d')
ax1.scatter(ellipsoid_points[:, 0], ellipsoid_points[:, 1], ellipsoid_points[:, 2],
alpha=0.6, s=1)
ax1.scatter(*mean_pos, c='red', s=100, label='Mean')
ax1.set_xlabel('X')
ax1.set_ylabel('Y')
ax1.set_zlabel('Z')
ax1.legend()
ax1.set_title('3D Uncertainty Ellipsoid')
# 2D projection
ax2 = fig.add_subplot(122)
projection = pu.to_projected_ellipsoid(mean_pos, cov_pos[:2, :2])
ax2.plot(projection[:, 0], projection[:, 1], 'b-', alpha=0.7)
ax2.scatter(*mean_pos[:2], c='red', s=100, label='Mean')
ax2.set_xlabel('X')
ax2.set_ylabel('Y')
ax2.legend()
ax2.set_title('2D Projection')
ax2.set_aspect('equal')
plt.tight_layout()
plt.show()import numpy as np
import pytransform3d.uncertainty as pu
import pytransform3d.transformations as pt
# Create chain of uncertain transformations
transforms = []
covariances = []
# Base to sensor mount
T_base_sensor = pt.transform_from(p=[0.1, 0, 0.5])
cov_base_sensor = np.diag([0.001, 0.001, 0.001, 0.0001, 0.0001, 0.0001])
transforms.append(T_base_sensor)
covariances.append(cov_base_sensor)
# Sensor mount to camera
T_sensor_cam = pt.transform_from(p=[0.05, 0.02, 0])
cov_sensor_cam = np.diag([0.002, 0.002, 0.002, 0.0005, 0.0005, 0.0005])
transforms.append(T_sensor_cam)
covariances.append(cov_sensor_cam)
# Propagate uncertainty through chain
T_result = transforms[0]
cov_result = covariances[0]
for i in range(1, len(transforms)):
T_result, cov_result = pu.concat_globally_uncertain_transforms(
T_result, cov_result, transforms[i], covariances[i]
)
print(f"Final transformation: {T_result[:3, 3]}")
print(f"Final uncertainty (diagonal): {np.diag(cov_result)}")
print(f"Position std dev: {np.sqrt(np.diag(cov_result)[:3])}")
print(f"Orientation std dev: {np.sqrt(np.diag(cov_result)[3:])}")Install with Tessl CLI
npx tessl i tessl/pypi-pytransform3d