Library to make reading, writing and modifying both binary and ascii STL files easy.
85
Evaluation — 85%
↑ 1.39xAgent success when using this tile
Clean and process mesh data including duplicate removal, empty area elimination, and normal vector calculations for mesh optimization and repair.
Calculate and manage triangle normal vectors for accurate geometric operations.
def update_normals(self, update_areas=True, update_centroids=True):
"""
Calculate normal vectors for all triangles.
Parameters:
- update_areas (bool): Whether to calculate triangle surface areas
- update_centroids (bool): Whether to calculate triangle centroids
Notes:
- Normals calculated using cross product: (v1-v0) × (v2-v0)
- Updates the normals property with computed vectors
- Optionally updates dependent geometric properties
- Essential for accurate volume and surface area calculations
"""
def get_unit_normals(self):
"""
Get normalized normal vectors without modifying originals.
Returns:
numpy.array: Unit normal vectors (N, 3)
Notes:
- Returns normalized versions of current normal vectors
- Zero-length normals (degenerate triangles) remain unchanged
- Does not modify the mesh's normals property
- Useful for orientation and lighting calculations
"""
def update_units(self):
"""
Calculate unit normal vectors and store in units property.
Notes:
- Computes normalized normals scaled by triangle areas
- Used internally for surface area and geometric calculations
- Results stored in units property for later access
"""Remove duplicate triangles to clean mesh topology and reduce file size.
@classmethod
def remove_duplicate_polygons(cls, data, value=RemoveDuplicates.SINGLE):
"""
Remove duplicate triangles from mesh data.
Parameters:
- data (numpy.array): Input mesh data array
- value (RemoveDuplicates): Duplicate handling strategy
- NONE: Keep all triangles (no removal)
- SINGLE: Keep only one copy of each unique triangle
- ALL: Remove all duplicates including originals (creates holes)
Returns:
numpy.array: Processed mesh data with duplicates handled
Notes:
- Identifies duplicates by summing triangle vertex coordinates
- Uses lexicographic sorting for efficient duplicate detection
- SINGLE mode is most commonly used for mesh cleaning
- ALL mode may create holes in the mesh surface
"""Remove triangles with zero or near-zero surface area to prevent calculation errors.
@classmethod
def remove_empty_areas(cls, data):
"""
Remove triangles with zero or negligible surface area.
Parameters:
- data (numpy.array): Input mesh data array
Returns:
numpy.array: Mesh data with empty triangles removed
Notes:
- Calculates triangle areas using cross product magnitudes
- Removes triangles smaller than AREA_SIZE_THRESHOLD (typically 0)
- Prevents division by zero in normal calculations
- Essential for robust geometric computations
"""Control mesh processing during construction for clean initialization.
def __init__(
self,
data,
calculate_normals=True,
remove_empty_areas=False,
remove_duplicate_polygons=RemoveDuplicates.NONE,
name='',
speedups=True,
**kwargs
):
"""
Initialize mesh with optional processing steps.
Parameters:
- data (numpy.array): Triangle data array
- calculate_normals (bool): Whether to calculate normal vectors
- remove_empty_areas (bool): Whether to remove zero-area triangles
- remove_duplicate_polygons (RemoveDuplicates): Duplicate handling
- name (str): Mesh name for identification
- speedups (bool): Whether to use Cython optimizations
- **kwargs: Additional arguments for base class
Notes:
- Processing steps applied in order: empty areas, duplicates, normals
- Processing during construction is more efficient than post-processing
- Clean meshes improve accuracy of all subsequent calculations
"""import numpy as np
from stl import mesh
# Load a mesh
my_mesh = mesh.Mesh.from_file('model.stl')
# Recalculate normals (e.g., after vertex modification)
my_mesh.update_normals()
# Get unit normals for lighting calculations
unit_normals = my_mesh.get_unit_normals()
# Check for degenerate triangles
normal_lengths = np.linalg.norm(my_mesh.normals, axis=1)
degenerate_count = np.sum(normal_lengths < 1e-6)
if degenerate_count > 0:
print(f"Warning: {degenerate_count} degenerate triangles found")
# Update only normals without recalculating areas
my_mesh.update_normals(update_areas=False, update_centroids=False)import numpy as np
from stl import mesh
# Create mesh with duplicate removal during construction
raw_data = np.zeros(1000, dtype=mesh.Mesh.dtype)
# ... populate raw_data ...
clean_mesh = mesh.Mesh(
raw_data,
remove_duplicate_polygons=mesh.RemoveDuplicates.SINGLE
)
# Or clean existing mesh data
duplicate_data = my_mesh.data.copy()
cleaned_data = mesh.Mesh.remove_duplicate_polygons(
duplicate_data,
mesh.RemoveDuplicates.SINGLE
)
clean_mesh = mesh.Mesh(cleaned_data)
print(f"Original triangles: {len(duplicate_data)}")
print(f"After cleanup: {len(cleaned_data)}")import numpy as np
from stl import mesh
# Remove empty triangles during construction
my_mesh = mesh.Mesh.from_file('model.stl', remove_empty_areas=True)
# Or clean existing mesh data
original_data = my_mesh.data.copy()
cleaned_data = mesh.Mesh.remove_empty_areas(original_data)
cleaned_mesh = mesh.Mesh(cleaned_data)
print(f"Removed {len(original_data) - len(cleaned_data)} empty triangles")import numpy as np
from stl import mesh
# Load and clean mesh in one step
clean_mesh = mesh.Mesh.from_file(
'noisy_model.stl',
calculate_normals=True,
remove_empty_areas=True,
remove_duplicate_polygons=mesh.RemoveDuplicates.SINGLE
)
# Manual multi-step cleaning
raw_mesh = mesh.Mesh.from_file('noisy_model.stl', calculate_normals=False)
print(f"Original triangle count: {len(raw_mesh)}")
# Step 1: Remove empty areas
step1_data = mesh.Mesh.remove_empty_areas(raw_mesh.data)
print(f"After removing empty areas: {len(step1_data)}")
# Step 2: Remove duplicates
step2_data = mesh.Mesh.remove_duplicate_polygons(
step1_data,
mesh.RemoveDuplicates.SINGLE
)
print(f"After removing duplicates: {len(step2_data)}")
# Step 3: Create final clean mesh
final_mesh = mesh.Mesh(step2_data, calculate_normals=True)
print(f"Final clean mesh: {len(final_mesh)} triangles")import numpy as np
from stl import mesh
my_mesh = mesh.Mesh.from_file('model.stl')
# Analyze normal vector quality
normals = my_mesh.normals
normal_lengths = np.linalg.norm(normals, axis=1)
# Find problematic triangles
zero_normals = normal_lengths < 1e-10
short_normals = (normal_lengths > 1e-10) & (normal_lengths < 1e-3)
print(f"Zero-length normals: {np.sum(zero_normals)}")
print(f"Very short normals: {np.sum(short_normals)}")
# Get consistent unit normals
unit_normals = my_mesh.get_unit_normals()
unit_lengths = np.linalg.norm(unit_normals, axis=1)
print(f"Unit normal length range: {np.min(unit_lengths):.6f} to {np.max(unit_lengths):.6f}")
# Check for flipped normals (if mesh should be outward-facing)
if my_mesh.is_closed():
# For closed meshes, normals should generally point outward
# This is a simplified check - more sophisticated methods exist
centroid = np.mean(my_mesh.vectors.reshape(-1, 3), axis=0)
triangle_centers = np.mean(my_mesh.vectors, axis=1)
to_center = centroid - triangle_centers
dot_products = np.sum(unit_normals * to_center, axis=1)
inward_normals = np.sum(dot_products > 0)
print(f"Potentially inward-facing normals: {inward_normals}")import numpy as np
from stl import mesh
# For large meshes, use speedups and batch processing
large_mesh = mesh.Mesh.from_file(
'large_model.stl',
speedups=True, # Use Cython extensions
remove_empty_areas=True,
remove_duplicate_polygons=mesh.RemoveDuplicates.SINGLE
)
# Batch normal updates for modified meshes
def batch_modify_vertices(mesh_obj, modifications):
"""Apply multiple vertex modifications efficiently."""
# Apply all modifications
for triangle_idx, vertex_idx, new_position in modifications:
mesh_obj.vectors[triangle_idx, vertex_idx] = new_position
# Single normal update for all changes
mesh_obj.update_normals()
# Example usage
modifications = [
(0, 0, [1, 2, 3]), # Triangle 0, vertex 0
(0, 1, [4, 5, 6]), # Triangle 0, vertex 1
(1, 2, [7, 8, 9]), # Triangle 1, vertex 2
]
batch_modify_vertices(large_mesh, modifications)import numpy as np
from stl import mesh
def validate_and_clean_mesh(filename):
"""Complete mesh validation and cleaning pipeline."""
# Load with minimal processing
raw_mesh = mesh.Mesh.from_file(filename, calculate_normals=False)
print(f"Loaded mesh: {len(raw_mesh)} triangles")
# Step 1: Remove degenerate triangles
cleaned_data = mesh.Mesh.remove_empty_areas(raw_mesh.data)
removed_empty = len(raw_mesh.data) - len(cleaned_data)
print(f"Removed {removed_empty} empty triangles")
# Step 2: Remove duplicates
deduped_data = mesh.Mesh.remove_duplicate_polygons(
cleaned_data,
mesh.RemoveDuplicates.SINGLE
)
removed_dupes = len(cleaned_data) - len(deduped_data)
print(f"Removed {removed_dupes} duplicate triangles")
# Step 3: Create final mesh with normals
final_mesh = mesh.Mesh(deduped_data, calculate_normals=True)
# Step 4: Validate results
normal_lengths = np.linalg.norm(final_mesh.normals, axis=1)
bad_normals = np.sum(normal_lengths < 1e-6)
if bad_normals > 0:
print(f"Warning: {bad_normals} triangles still have degenerate normals")
print(f"Final mesh: {len(final_mesh)} clean triangles")
return final_mesh
# Usage
clean_mesh = validate_and_clean_mesh('input_model.stl')AREA_SIZE_THRESHOLD = 0 # Minimum triangle area (typically 0)Install with Tessl CLI
npx tessl i tessl/pypi-numpy-stlevals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10