CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-trimesh

Import, export, process, analyze and view triangular meshes.

Overview
Eval results
Files

point-clouds.mddocs/

Point Clouds and Alternative Representations

Point cloud processing, conversion between different geometric representations, and point-based analysis operations. Trimesh provides comprehensive support for working with point cloud data and converting between different geometric representations.

Capabilities

PointCloud Class

Main class for working with point cloud data.

class PointCloud(Geometry3D):
    """Point cloud representation with analysis capabilities"""
    
    def __init__(self, vertices, colors=None, **kwargs):
        """
        Initialize point cloud.
        
        Parameters:
        - vertices: (n, 3) point coordinates
        - colors: (n, 3) or (n, 4) point colors (optional)
        - **kwargs: additional point cloud properties
        """
    
    @property
    def vertices(self) -> np.ndarray:
        """Point coordinates as (n, 3) array"""
    
    @property
    def colors(self) -> np.ndarray:
        """Point colors as (n, 3) or (n, 4) array"""
    
    @colors.setter
    def colors(self, colors: np.ndarray) -> None:
        """Set point colors"""
    
    @property
    def bounds(self) -> np.ndarray:
        """Bounding box of point cloud"""
    
    @property
    def extents(self) -> np.ndarray:
        """Size in each dimension"""
    
    @property
    def centroid(self) -> np.ndarray:
        """Geometric center of points"""

Point Cloud Creation and Loading

Create point clouds from various sources.

def sample_surface(self, count, **kwargs) -> np.ndarray:
    """
    Sample points uniformly on mesh surface.
    
    Parameters:
    - count: int, number of points to sample
    - **kwargs: sampling options
    
    Returns:
    (count, 3) sampled surface points
    """

def sample_surface_even(self, count, **kwargs) -> np.ndarray:
    """
    Sample points with even distribution on surface.
    
    Parameters:
    - count: int, number of points to sample
    - **kwargs: sampling options
    
    Returns:
    (count, 3) evenly distributed surface points
    """

def sample_volume(self, count, **kwargs) -> np.ndarray:
    """
    Sample points inside mesh volume.
    
    Parameters:
    - count: int, number of points to sample
    - **kwargs: volume sampling options
    
    Returns:
    (count, 3) points inside mesh volume
    """

def load_point_cloud(file_obj, **kwargs) -> PointCloud:
    """
    Load point cloud from file.
    
    Parameters:
    - file_obj: file path or file-like object
    - **kwargs: loading options
    
    Returns:
    PointCloud object
    """

Point Cloud Analysis

Analyze point cloud properties and structure.

def convex_hull(self) -> 'Trimesh':
    """
    Compute convex hull of point cloud.
    
    Returns:
    Trimesh representing convex hull
    """

def bounding_box(self) -> 'Trimesh':
    """
    Axis-aligned bounding box as mesh.
    
    Returns:
    Box mesh containing all points
    """

def bounding_box_oriented(self) -> 'Trimesh':
    """
    Oriented bounding box as mesh.
    
    Returns:
    Oriented box mesh with minimal volume
    """

def principal_components(self) -> tuple:
    """
    Principal component analysis of point cloud.
    
    Returns:
    tuple: (eigenvalues, eigenvectors, centroid)
    - eigenvalues: (3,) principal component magnitudes
    - eigenvectors: (3, 3) principal directions
    - centroid: (3,) point cloud center
    """

Point Cloud Clustering and Segmentation

Group and segment points based on various criteria.

def k_means(self, k, **kwargs) -> tuple:
    """
    K-means clustering of points.
    
    Parameters:
    - k: int, number of clusters
    - **kwargs: clustering options
    
    Returns:
    tuple: (cluster_centers, point_labels)
    - cluster_centers: (k, 3) cluster center coordinates
    - point_labels: (n,) cluster assignment for each point
    """

def dbscan(self, eps=0.1, min_samples=10, **kwargs) -> np.ndarray:
    """
    DBSCAN density-based clustering.
    
    Parameters:
    - eps: float, neighborhood radius
    - min_samples: int, minimum points per cluster
    - **kwargs: DBSCAN options
    
    Returns:
    (n,) cluster labels (-1 for noise points)
    """

def remove_outliers(self, nb_neighbors=20, std_ratio=2.0) -> 'PointCloud':
    """
    Remove statistical outliers from point cloud.
    
    Parameters:
    - nb_neighbors: int, number of neighbors to analyze
    - std_ratio: float, standard deviation threshold
    
    Returns:
    Filtered PointCloud with outliers removed
    """

Point Cloud Processing

Filter, downsample, and process point cloud data.

def voxel_downsample(self, voxel_size) -> 'PointCloud':
    """
    Downsample using voxel grid.
    
    Parameters:
    - voxel_size: float, voxel size for downsampling
    
    Returns:
    Downsampled PointCloud
    """

def uniform_downsample(self, factor) -> 'PointCloud':
    """
    Uniform downsampling by factor.
    
    Parameters:
    - factor: int, downsampling factor
    
    Returns:
    Downsampled PointCloud (every factor-th point)
    """

def filter_radius(self, radius, min_neighbors=1) -> 'PointCloud':
    """
    Filter points based on local density.
    
    Parameters:
    - radius: float, neighborhood radius
    - min_neighbors: int, minimum neighbors required
    
    Returns:
    Filtered PointCloud
    """

def smooth(self, iterations=1, factor=0.5) -> 'PointCloud':
    """
    Smooth point positions using neighbor averaging.
    
    Parameters:
    - iterations: int, number of smoothing iterations
    - factor: float, smoothing strength (0-1)
    
    Returns:
    Smoothed PointCloud
    """

Normal Estimation

Estimate surface normals for point clouds.

def estimate_normals(self, radius=None, k_neighbors=30) -> np.ndarray:
    """
    Estimate surface normals at each point.
    
    Parameters:
    - radius: float, neighborhood radius (None for k-nearest)
    - k_neighbors: int, number of neighbors for estimation
    
    Returns:
    (n, 3) estimated normal vectors
    """

def orient_normals(self, normals, point=None, camera_location=None) -> np.ndarray:
    """
    Orient normals consistently.
    
    Parameters:
    - normals: (n, 3) normal vectors to orient
    - point: (3,) reference point for orientation
    - camera_location: (3,) camera position for orientation
    
    Returns:
    (n, 3) consistently oriented normals
    """

Surface Reconstruction

Reconstruct mesh surfaces from point clouds.

def poisson_reconstruction(self, normals=None, depth=8, **kwargs) -> 'Trimesh':
    """
    Poisson surface reconstruction.
    
    Parameters:
    - normals: (n, 3) surface normals (estimated if None)
    - depth: int, octree depth for reconstruction
    - **kwargs: Poisson reconstruction options
    
    Returns:
    Reconstructed mesh surface
    """

def ball_pivoting_reconstruction(self, radii=None, **kwargs) -> 'Trimesh':
    """
    Ball pivoting surface reconstruction.
    
    Parameters:
    - radii: array of ball radii to try
    - **kwargs: ball pivoting options
    
    Returns:
    Reconstructed mesh surface
    """

def alpha_shape_reconstruction(self, alpha) -> 'Trimesh':
    """
    Alpha shape surface reconstruction.
    
    Parameters:
    - alpha: float, alpha parameter
    
    Returns:
    Alpha shape mesh
    """

Registration and Alignment

Align point clouds to each other or reference frames.

def register_icp(self, target, **kwargs) -> tuple:
    """
    Iterative Closest Point registration.
    
    Parameters:
    - target: PointCloud or Trimesh to register to
    - **kwargs: ICP options
    
    Returns:
    tuple: (transform_matrix, rms_error, iterations)
    """

def register_colored_icp(self, target, **kwargs) -> tuple:
    """
    Colored ICP using both geometry and color information.
    
    Parameters:
    - target: PointCloud with colors
    - **kwargs: colored ICP options
    
    Returns:
    tuple: (transform_matrix, rms_error, iterations)
    """

def align_to_principal_axes(self) -> tuple:
    """
    Align point cloud to principal component axes.
    
    Returns:
    tuple: (aligned_points, transform_matrix)
    """

Point Cloud Comparison

Compare point clouds and compute metrics.

def distance_to_points(self, other_points) -> np.ndarray:
    """
    Distance from each point to nearest point in other set.
    
    Parameters:
    - other_points: (m, 3) comparison point coordinates
    
    Returns:
    (n,) distances to nearest points
    """

def hausdorff_distance(self, other) -> float:
    """
    Hausdorff distance to another point cloud.
    
    Parameters:
    - other: PointCloud or (m, 3) points
    
    Returns:
    float, Hausdorff distance
    """

def chamfer_distance(self, other) -> float:
    """
    Chamfer distance to another point cloud.
    
    Parameters:
    - other: PointCloud or (m, 3) points
    
    Returns:
    float, symmetric Chamfer distance
    """

Usage Examples

Point Cloud Creation and Sampling

import trimesh
import numpy as np
import matplotlib.pyplot as plt

# Load mesh and sample surface points
mesh = trimesh.load('model.stl')

# Sample points on surface
surface_points = mesh.sample_surface(5000)
print(f"Sampled {len(surface_points)} surface points")

# Create point cloud
point_cloud = trimesh.PointCloud(surface_points)
print(f"Point cloud bounds: {point_cloud.bounds}")
print(f"Point cloud extents: {point_cloud.extents}")
print(f"Point cloud centroid: {point_cloud.centroid}")

# Even distribution sampling
even_points = mesh.sample_surface_even(1000)
even_cloud = trimesh.PointCloud(even_points)

# Volume sampling (points inside mesh)
if mesh.is_watertight:
    volume_points = mesh.sample_volume(2000)
    volume_cloud = trimesh.PointCloud(volume_points)
    
    # Visualize different sampling methods
    scene = trimesh.Scene([
        point_cloud.apply_translation([-5, 0, 0]),  # Random surface
        even_cloud.apply_translation([0, 0, 0]),    # Even surface  
        volume_cloud.apply_translation([5, 0, 0])   # Volume
    ])
    scene.show()

Point Cloud Analysis

# Load point cloud from file
point_cloud = trimesh.load('scan.ply')

# Principal component analysis
eigenvalues, eigenvectors, centroid = point_cloud.principal_components()
print(f"Principal components: {eigenvalues}")
print(f"Centroid: {centroid}")

# Compute convex hull
hull = point_cloud.convex_hull()
print(f"Convex hull volume: {hull.volume:.4f}")
print(f"Points inside hull: {len(point_cloud.vertices)}")

# Oriented bounding box
obb = point_cloud.bounding_box_oriented()
print(f"OBB volume: {obb.volume:.4f}")

# Axis-aligned bounding box
aabb = point_cloud.bounding_box()
print(f"AABB volume: {aabb.volume:.4f}")

# Compare bounding methods
print(f"OBB vs AABB volume ratio: {obb.volume / aabb.volume:.3f}")

# Visualize analysis
scene = trimesh.Scene([
    point_cloud,
    hull.apply_translation([10, 0, 0]),
    obb.apply_translation([20, 0, 0])
])
scene.show()

Point Cloud Clustering

# Generate test point cloud with multiple clusters
np.random.seed(42)
cluster_centers = np.array([[0, 0, 0], [5, 0, 0], [0, 5, 0], [5, 5, 0]])
n_points_per_cluster = 500

points = []
true_labels = []
for i, center in enumerate(cluster_centers):
    cluster_points = np.random.normal(center, 0.5, (n_points_per_cluster, 3))
    points.append(cluster_points)
    true_labels.extend([i] * n_points_per_cluster)

all_points = np.vstack(points)
point_cloud = trimesh.PointCloud(all_points)

# K-means clustering
k = 4
cluster_centers_est, labels_kmeans = point_cloud.k_means(k)
print(f"K-means found {len(cluster_centers_est)} clusters")

# DBSCAN clustering
labels_dbscan = point_cloud.dbscan(eps=1.0, min_samples=10)
n_clusters_dbscan = len(set(labels_dbscan)) - (1 if -1 in labels_dbscan else 0)
n_noise = list(labels_dbscan).count(-1)
print(f"DBSCAN found {n_clusters_dbscan} clusters, {n_noise} noise points")

# Visualize clustering results
fig = plt.figure(figsize=(15, 5))

# Original clusters
ax1 = fig.add_subplot(131, projection='3d')
ax1.scatter(all_points[:, 0], all_points[:, 1], all_points[:, 2], c=true_labels, cmap='tab10')
ax1.set_title('True Clusters')

# K-means results
ax2 = fig.add_subplot(132, projection='3d')
ax2.scatter(all_points[:, 0], all_points[:, 1], all_points[:, 2], c=labels_kmeans, cmap='tab10')
ax2.scatter(cluster_centers_est[:, 0], cluster_centers_est[:, 1], cluster_centers_est[:, 2], 
           c='red', marker='x', s=100, label='Centers')
ax2.set_title('K-means Clustering')

# DBSCAN results
ax3 = fig.add_subplot(133, projection='3d')
unique_labels = set(labels_dbscan)
colors = [plt.cm.Spectral(each) for each in np.linspace(0, 1, len(unique_labels))]
for k, col in zip(unique_labels, colors):
    if k == -1:
        col = [0, 0, 0, 1]  # Black for noise
    class_member_mask = (labels_dbscan == k)
    xy = all_points[class_member_mask]
    ax3.scatter(xy[:, 0], xy[:, 1], xy[:, 2], c=[col], s=20)
ax3.set_title('DBSCAN Clustering')

plt.tight_layout()
plt.show()

Point Cloud Processing and Filtering

# Load noisy point cloud
point_cloud = trimesh.load('noisy_scan.ply')
print(f"Original point cloud: {len(point_cloud.vertices)} points")

# Remove statistical outliers
filtered_cloud = point_cloud.remove_outliers(nb_neighbors=20, std_ratio=2.0)
print(f"After outlier removal: {len(filtered_cloud.vertices)} points")

# Voxel downsampling
voxel_size = 0.02
downsampled_cloud = filtered_cloud.voxel_downsample(voxel_size)
print(f"After voxel downsampling: {len(downsampled_cloud.vertices)} points")

# Radius filtering
filtered_dense = downsampled_cloud.filter_radius(radius=0.05, min_neighbors=5)
print(f"After radius filtering: {len(filtered_dense.vertices)} points")

# Smoothing
smoothed_cloud = filtered_dense.smooth(iterations=3, factor=0.3)

# Compare processing steps
clouds = [
    ('Original', point_cloud),
    ('Outliers Removed', filtered_cloud),
    ('Downsampled', downsampled_cloud),
    ('Radius Filtered', filtered_dense),
    ('Smoothed', smoothed_cloud)
]

scene = trimesh.Scene()
for i, (name, cloud) in enumerate(clouds):
    transform = trimesh.transformations.translation_matrix([i * 10, 0, 0])
    scene.add_geometry(cloud, transform=transform)
    print(f"{name}: {len(cloud.vertices)} points")

scene.show()

Normal Estimation and Surface Reconstruction

# Load point cloud without normals
point_cloud = trimesh.load('scan_no_normals.ply')

# Estimate surface normals
normals = point_cloud.estimate_normals(k_neighbors=30)
print(f"Estimated normals for {len(normals)} points")

# Orient normals consistently (towards camera/viewpoint)
camera_location = point_cloud.centroid + [0, 0, 10]  # Above the object
oriented_normals = point_cloud.orient_normals(normals, camera_location=camera_location)

# Poisson surface reconstruction
reconstructed_mesh = point_cloud.poisson_reconstruction(
    normals=oriented_normals, 
    depth=9
)

if reconstructed_mesh is not None:
    print(f"Reconstructed mesh: {len(reconstructed_mesh.faces)} faces")
    print(f"Mesh volume: {reconstructed_mesh.volume:.4f}")
    
    # Compare point cloud and reconstruction
    scene = trimesh.Scene([
        point_cloud.apply_translation([-5, 0, 0]),
        reconstructed_mesh.apply_translation([5, 0, 0])
    ])
    scene.show()

# Alternative reconstruction methods
try:
    # Ball pivoting reconstruction
    radii = [0.1, 0.2, 0.4]  # Multiple radii
    ball_pivot_mesh = point_cloud.ball_pivoting_reconstruction(radii=radii)
    
    # Alpha shape reconstruction
    alpha_mesh = point_cloud.alpha_shape_reconstruction(alpha=0.3)
    
    # Compare reconstruction methods
    meshes = []
    if reconstructed_mesh is not None:
        meshes.append(('Poisson', reconstructed_mesh))
    if ball_pivot_mesh is not None:
        meshes.append(('Ball Pivoting', ball_pivot_mesh))
    if alpha_mesh is not None:
        meshes.append(('Alpha Shape', alpha_mesh))
    
    scene = trimesh.Scene()
    for i, (name, mesh) in enumerate(meshes):
        transform = trimesh.transformations.translation_matrix([i * 8, 0, 0])
        scene.add_geometry(mesh, transform=transform)
        print(f"{name}: {len(mesh.faces)} faces, volume: {mesh.volume:.4f}")
    
    scene.show()
    
except Exception as e:
    print(f"Some reconstruction methods failed: {e}")

Point Cloud Registration

# Create two point clouds from the same mesh with different transforms
mesh = trimesh.load('model.stl')

# Original point cloud
points1 = mesh.sample_surface(2000)
cloud1 = trimesh.PointCloud(points1)

# Transformed point cloud (with noise)
transform_true = trimesh.transformations.compose_matrix(
    translate=[1, 0.5, 0.2],
    angles=[0.1, 0.2, 0.05]
)
points2 = trimesh.transform_points(points1, transform_true)
# Add noise
points2 += np.random.normal(0, 0.01, points2.shape)
cloud2 = trimesh.PointCloud(points2)

print("Before registration:")
print(f"Cloud 1 centroid: {cloud1.centroid}")
print(f"Cloud 2 centroid: {cloud2.centroid}")

# ICP registration
transform_est, rms_error, iterations = cloud1.register_icp(cloud2)

print(f"\nICP Registration:")
print(f"RMS error: {rms_error:.6f}")
print(f"Iterations: {iterations}")
print(f"Estimated transform:\n{transform_est}")

# Apply estimated transform to align clouds
cloud2_aligned = cloud2.copy()
cloud2_aligned.apply_transform(transform_est)

print(f"\nAfter registration:")
print(f"Cloud 1 centroid: {cloud1.centroid}")
print(f"Cloud 2 aligned centroid: {cloud2_aligned.centroid}")

# Compute registration error
distances = cloud1.distance_to_points(cloud2_aligned.vertices)
print(f"Mean registration error: {distances.mean():.6f}")
print(f"Max registration error: {distances.max():.6f}")

# Visualize registration
scene = trimesh.Scene([
    cloud1.apply_translation([-5, 0, 0]),      # Original
    cloud2.apply_translation([0, 0, 0]),       # Transformed
    cloud2_aligned.apply_translation([5, 0, 0]) # Aligned
])
scene.show()

Point Cloud Comparison and Metrics

# Load two point clouds for comparison
cloud1 = trimesh.load('scan1.ply')
cloud2 = trimesh.load('scan2.ply')

# Basic comparison metrics
hausdorff_dist = cloud1.hausdorff_distance(cloud2)
chamfer_dist = cloud1.chamfer_distance(cloud2)

print(f"Hausdorff distance: {hausdorff_dist:.6f}")
print(f"Chamfer distance: {chamfer_dist:.6f}")

# Point-to-point distances
distances_1_to_2 = cloud1.distance_to_points(cloud2.vertices)
distances_2_to_1 = cloud2.distance_to_points(cloud1.vertices)

print(f"\nDistance statistics (cloud 1 to cloud 2):")
print(f"Mean: {distances_1_to_2.mean():.6f}")
print(f"Std:  {distances_1_to_2.std():.6f}")
print(f"Max:  {distances_1_to_2.max():.6f}")
print(f"95th percentile: {np.percentile(distances_1_to_2, 95):.6f}")

# Visualize distance distribution
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))

ax1.hist(distances_1_to_2, bins=50, alpha=0.7, label='Cloud 1 to Cloud 2')
ax1.hist(distances_2_to_1, bins=50, alpha=0.7, label='Cloud 2 to Cloud 1')
ax1.set_xlabel('Distance')
ax1.set_ylabel('Count')
ax1.set_title('Distance Distribution')
ax1.legend()

# Color-coded distance visualization
normalized_distances = distances_1_to_2 / distances_1_to_2.max()
colors = plt.cm.viridis(normalized_distances)

ax2 = fig.add_subplot(122, projection='3d')
ax2.scatter(cloud1.vertices[:, 0], cloud1.vertices[:, 1], cloud1.vertices[:, 2],
           c=distances_1_to_2, cmap='viridis', s=20)
ax2.set_title('Distance-Colored Points')

plt.tight_layout()
plt.show()

# Export comparison results
comparison_data = {
    'hausdorff_distance': float(hausdorff_dist),
    'chamfer_distance': float(chamfer_dist),
    'mean_distance_1_to_2': float(distances_1_to_2.mean()),
    'mean_distance_2_to_1': float(distances_2_to_1.mean()),
    'std_distance_1_to_2': float(distances_1_to_2.std()),
    'max_distance_1_to_2': float(distances_1_to_2.max())
}

import json
with open('point_cloud_comparison.json', 'w') as f:
    json.dump(comparison_data, f, indent=2)

print("Comparison results saved to point_cloud_comparison.json")

Install with Tessl CLI

npx tessl i tessl/pypi-trimesh

docs

advanced-features.md

analysis.md

core-operations.md

file-io.md

index.md

mesh-processing.md

point-clouds.md

spatial-queries.md

visualization.md

tile.json