A python module for scientific visualization, analysis of 3D objects and point clouds.
Advanced algorithms for geometric analysis, point cloud processing, mesh operations, and scientific computations including fitting, clustering, and morphological operations. This module provides sophisticated tools for analyzing and processing 3D data.
Functions for combining multiple objects and managing complex assemblies.
def merge(*meshs, flag=False):
"""
Build a new Mesh or Points formed by the fusion of inputs.
Similar to Assembly, but creates a single entity rather than grouped objects.
Parameters:
- *meshs: variable arguments
Mesh or Points objects to merge
- flag: bool, default False
If True, keeps track of original identities in merged object
Returns:
Union[Mesh, Points, None]: Merged object containing all input data
"""
def procrustes_alignment(sources, rigid=False):
"""
Return an Assembly of aligned source meshes using Procrustes algorithm.
The Procrustes algorithm takes N sets of points and aligns them in a
least-squares sense to their mutual mean. The algorithm iterates until
convergence as the mean must be recomputed after each alignment.
Parameters:
- sources: list of Mesh objects
Source meshes to align (must have same number of points)
- rigid: bool, default False
If True, scaling is disabled (rigid body transformation only)
Returns:
Assembly: Assembly containing aligned meshes with normalized size
"""Advanced statistical analysis functions for point cloud data processing.
def pca_ellipse(points, pvalue=0.673, res=60):
"""
Create PCA ellipse from point distribution analysis.
Performs Principal Component Analysis on 2D/3D points and creates
an ellipse representing the data distribution at specified confidence level.
Parameters:
- points: array-like or Points object
Input point coordinates
- pvalue: float, default 0.673
Confidence level for ellipse size (0.673 ≈ 1σ for normal distribution)
- res: int, default 60
Resolution of ellipse boundary (number of points)
Returns:
Union[Circle, None]: Circle object representing PCA ellipse, or None if computation fails
"""
def pca_ellipsoid(points, pvalue=0.673, res=24):
"""
Create PCA ellipsoid from 3D point distribution analysis.
Extends PCA analysis to 3D, creating an ellipsoid that represents
the 3D distribution of points at the specified confidence level.
Parameters:
- points: array-like or Points object
Input 3D point coordinates
- pvalue: float, default 0.673
Confidence level for ellipsoid size
- res: int, default 24
Surface resolution of ellipsoid
Returns:
Union[Ellipsoid, None]: Ellipsoid object representing PCA ellipsoid, or None if computation fails
"""Utilities for monitoring long-running computations and optimization tasks.
class ProgressBar:
"""
Display progress bar for long-running operations.
Parameters:
- start: int or float
Starting value
- stop: int or float
Ending value
- step: int or float, default 1
Step increment
- c: str or tuple, default "red"
Progress bar color
- title: str, default ""
Progress bar title
- width: int, default 24
Progress bar width in characters
- char: str, default "█"
Character used for progress bar fill
- bg: str, default ""
Background character
- logFile: str, optional
Log file path for progress recording
- delay: float, default -1
Delay between updates (-1 for automatic)
- ETA: bool, default True
Show estimated time of arrival
"""
def __init__(
self,
start,
stop,
step=1,
c="red",
title="",
width=24,
char="█",
bg="",
logFile=None,
delay=-1,
ETA=True
): ...
def print(self, txt=""):
"""
Update progress bar with current status.
Parameters:
- txt: str, default ""
Additional text to display
"""
def progressbar(iterable, title="", c="red", width=24, char="█"):
"""
Wrap an iterable with a progress bar.
Parameters:
- iterable: iterable
Object to iterate over
- title: str, default ""
Progress bar title
- c: str or tuple, default "red"
Progress bar color
- width: int, default 24
Progress bar width
- char: str, default "█"
Fill character
Yields:
Items from the input iterable with progress display
"""
class Minimizer:
"""
Function minimization and optimization algorithms.
Parameters:
- function: callable
Function to minimize
- bounds: list, optional
Parameter bounds [(min1, max1), (min2, max2), ...]
- method: str, default "BFGS"
Optimization method
- options: dict, optional
Additional optimizer options
"""
def __init__(self, function, bounds=None, method="BFGS", options=None): ...
def minimize(self, x0):
"""
Perform function minimization.
Parameters:
- x0: array-like
Initial parameter guess
Returns:
dict: Optimization results including optimal parameters and function value
"""
def compute_hessian(function, x, h=1e-5):
"""
Compute Hessian matrix (second derivatives) of a function.
Parameters:
- function: callable
Function to compute Hessian for
- x: array-like
Point at which to evaluate Hessian
- h: float, default 1e-5
Finite difference step size
Returns:
numpy.ndarray: Hessian matrix
"""Core mathematical operations and utility functions for geometric computations.
def is_sequence(obj):
"""
Check if object is a sequence (list, tuple, array, etc.).
Parameters:
- obj: any
Object to test
Returns:
bool: True if object is sequence-like
"""
def lin_interpolate(x, y, n):
"""
Linear interpolation between points.
Parameters:
- x: array-like
X coordinates
- y: array-like
Y coordinates
- n: int
Number of interpolated points
Returns:
tuple: (x_interp, y_interp) interpolated coordinates
"""
def vector(x, y=None, z=None):
"""
Create 3D vector from components or normalize existing vector.
Parameters:
- x: float, array-like, or tuple
X component or complete vector
- y: float, optional
Y component
- z: float, optional
Z component
Returns:
numpy.ndarray: 3D vector
"""
def mag(vector):
"""
Calculate vector magnitude (Euclidean norm).
Parameters:
- vector: array-like
Input vector
Returns:
float: Vector magnitude
"""
def mag2(vector):
"""
Calculate squared vector magnitude (faster than mag for comparisons).
Parameters:
- vector: array-like
Input vector
Returns:
float: Squared vector magnitude
"""
def norm(vector):
"""
Normalize vector to unit length.
Parameters:
- vector: array-like
Input vector to normalize
Returns:
numpy.ndarray: Unit vector in same direction
"""Functions for converting between different data representations and formats.
def numpy2vtk(arr, dtype=None, deep=True, name=""):
"""
Convert NumPy array to VTK data array.
Parameters:
- arr: numpy.ndarray
Input NumPy array
- dtype: numpy.dtype, optional
Target data type
- deep: bool, default True
Create deep copy of data
- name: str, default ""
Name for VTK array
Returns:
vtkDataArray: VTK data array
"""
def vtk2numpy(vtk_array):
"""
Convert VTK data array to NumPy array.
Parameters:
- vtk_array: vtkDataArray
Input VTK data array
Returns:
numpy.ndarray: NumPy array containing the data
"""Advanced computational framework for building processing pipelines.
class OperationNode:
"""
Node-based operation system for building computational graphs.
Allows creation of complex data processing pipelines with
interconnected operations and data flow management.
Parameters:
- operation: callable
Function to execute at this node
- inputs: list, optional
Input connections from other nodes
- name: str, default ""
Node identifier
"""
def __init__(self, operation, inputs=None, name=""): ...
def connect(self, other_node):
"""
Connect this node's output to another node's input.
Parameters:
- other_node: OperationNode
Target node to connect to
"""
def execute(self, *args, **kwargs):
"""
Execute the operation with given inputs.
Parameters:
- *args: variable arguments
Input arguments for operation
- **kwargs: keyword arguments
Keyword arguments for operation
Returns:
Result of the operation
"""Functions for molecular visualization and chemical structure analysis.
def append_molecules(molecules):
"""
Combine multiple molecular structures into a single assembly.
Parameters:
- molecules: list
List of Molecule objects to combine
Returns:
Assembly: Combined molecular assembly
"""
class PeriodicTable:
"""
Periodic table of elements with atomic properties.
Provides access to atomic numbers, masses, radii, colors,
and other chemical properties for visualization purposes.
"""
def __init__(self): ...
def get_element(self, symbol):
"""
Get element properties by symbol.
Parameters:
- symbol: str
Chemical element symbol (e.g., 'C', 'O', 'N')
Returns:
dict: Element properties including mass, radius, color
"""
class Atom:
"""
Individual atom representation for molecular visualization.
Parameters:
- element: str
Chemical element symbol
- pos: tuple, default (0, 0, 0)
Atomic position coordinates
- r: float, optional
Atomic radius (uses periodic table default if not specified)
- c: str or tuple, optional
Atomic color (uses periodic table default if not specified)
"""
def __init__(self, element, pos=(0, 0, 0), r=None, c=None): ...
class Molecule:
"""
Molecular structure representation.
Parameters:
- atoms: list
List of Atom objects
- bonds: list, optional
List of (atom1_index, atom2_index) bond connections
"""
def __init__(self, atoms, bonds=None): ...
def add_bond(self, atom1_idx, atom2_idx):
"""
Add chemical bond between atoms.
Parameters:
- atom1_idx: int
Index of first atom
- atom2_idx: int
Index of second atom
"""
class Protein:
"""
Protein structure visualization and analysis.
Parameters:
- pdb_id: str, optional
PDB identifier for downloading structure
- filename: str, optional
Local PDB file path
"""
def __init__(self, pdb_id=None, filename=None): ...
def show_structure(self, representation="cartoon"):
"""
Display protein structure with specified representation.
Parameters:
- representation: str, default "cartoon"
Visualization style: "cartoon", "backbone", "ball_stick", "surface"
"""import vedo
import numpy as np
from scipy.optimize import minimize
# Object merging and assembly operations
sphere1 = vedo.Sphere(pos=(0, 0, 0), c='red')
sphere2 = vedo.Sphere(pos=(1, 0, 0), c='blue')
box = vedo.Box(pos=(0.5, 1, 0), c='green')
# Merge objects into single entity
merged_object = vedo.merge(sphere1, sphere2, box, flag=True)
vedo.save(merged_object, "merged_geometry.stl")
# Procrustes alignment of similar shapes
original_bunny = vedo.load("bunny.obj")
transformed_bunnies = []
# Create several transformed versions
for i in range(5):
bunny = original_bunny.clone()
bunny.rotate_x(np.random.uniform(0, 45))
bunny.rotate_y(np.random.uniform(0, 45))
bunny.scale(np.random.uniform(0.8, 1.2))
bunny.pos(np.random.uniform(-1, 1, 3))
transformed_bunnies.append(bunny)
# Align all bunnies to common reference
aligned_assembly = vedo.procrustes_alignment(transformed_bunnies, rigid=False)
mean_shape = aligned_assembly.info['mean'] # Access computed mean
# Point cloud statistical analysis
# Generate sample data with known distribution
np.random.seed(42)
data = np.random.multivariate_normal([0, 0], [[2, 0.5], [0.5, 1]], 1000)
z_coords = 0.1 * np.random.randn(1000)
points_3d = np.column_stack([data, z_coords])
point_cloud = vedo.Points(points_3d, c='blue', r=3)
# PCA analysis
pca_ellipse = vedo.pca_ellipse(points_3d, pvalue=0.95, res=100) # 95% confidence
pca_ellipsoid = vedo.pca_ellipsoid(points_3d, pvalue=0.68, res=50) # 1σ ellipsoid
vedo.show(point_cloud, pca_ellipse, pca_ellipsoid,
title="PCA Analysis", axes=True)
# Progress monitoring for long computations
def expensive_computation():
"""Simulate computationally expensive task."""
results = []
progress = vedo.ProgressBar(0, 100, title="Processing Data", c='green')
for i in range(100):
# Simulate work
result = np.sum(np.random.rand(10000))
results.append(result)
# Update progress
progress.print(f"Iteration {i+1}/100")
return results
# Run with progress monitoring
# results = expensive_computation()
# Using progress bar with iterables
data_list = range(1000)
processed_data = []
for item in vedo.progressbar(data_list, title="Processing Items", c='blue'):
# Simulate processing
processed_item = item ** 2 + np.random.normal(0, 0.1)
processed_data.append(processed_item)
# Function optimization
def objective_function(params):
"""Function to minimize - example: fit sphere to points."""
x0, y0, z0, r = params
center = np.array([x0, y0, z0])
# Calculate distances from sphere surface
distances = np.abs(np.linalg.norm(points_3d - center, axis=1) - r)
return np.sum(distances**2) # Sum of squared errors
# Set up optimizer
optimizer = vedo.Minimizer(
objective_function,
bounds=[(-2, 2), (-2, 2), (-1, 1), (0.5, 3)], # x0, y0, z0, r bounds
method='L-BFGS-B'
)
# Initial guess
initial_params = [0, 0, 0, 1]
result = optimizer.minimize(initial_params)
if result['success']:
optimal_params = result['x']
fitted_sphere = vedo.Sphere(
pos=optimal_params[:3],
r=optimal_params[3],
c='red',
alpha=0.3
)
vedo.show(point_cloud, fitted_sphere, title="Optimized Sphere Fit")
# Compute Hessian for uncertainty analysis
hessian = vedo.compute_hessian(objective_function, result['x'])
parameter_uncertainties = np.sqrt(np.diag(np.linalg.inv(hessian)))
print("Parameter uncertainties:", parameter_uncertainties)
# Mathematical utilities
v1 = vedo.vector(1, 2, 3)
v2 = vedo.vector([4, 5, 6])
magnitude = vedo.mag(v1)
normalized = vedo.norm(v1)
is_seq = vedo.is_sequence([1, 2, 3])
print(f"Vector: {v1}")
print(f"Magnitude: {magnitude:.2f}")
print(f"Normalized: {normalized}")
print(f"Is sequence: {is_seq}")
# Data conversion between NumPy and VTK
numpy_array = np.random.rand(100, 3)
vtk_array = vedo.numpy2vtk(numpy_array, name="random_points")
converted_back = vedo.vtk2numpy(vtk_array)
# Verify conversion accuracy
conversion_error = np.max(np.abs(numpy_array - converted_back))
print(f"Conversion error: {conversion_error:.2e}")
# Linear interpolation
x_data = np.array([0, 1, 2, 3, 4])
y_data = np.array([0, 1, 4, 9, 16]) # quadratic data
x_interp, y_interp = vedo.lin_interpolate(x_data, y_data, 50)
# Visualize interpolation
original_points = vedo.Points(np.column_stack([x_data, y_data, np.zeros(5)]),
c='red', r=8)
interp_line = vedo.Line(np.column_stack([x_interp, y_interp, np.zeros(50)]),
c='blue', lw=3)
vedo.show(original_points, interp_line, title="Linear Interpolation")
# Molecular visualization example
# Define atoms
carbon = vedo.Atom('C', pos=(0, 0, 0))
oxygen1 = vedo.Atom('O', pos=(1.2, 0, 0))
oxygen2 = vedo.Atom('O', pos=(-1.2, 0, 0))
# Create CO2 molecule
co2 = vedo.Molecule([carbon, oxygen1, oxygen2], bonds=[(0, 1), (0, 2)])
# Create protein structure (if PDB file available)
# protein = vedo.Protein(pdb_id="1crn") # Crambin
# protein.show_structure("cartoon")
# Computational graph example
def add_operation(a, b):
return a + b
def multiply_operation(a, b):
return a * b
def square_operation(x):
return x ** 2
# Build computational graph
input_node = vedo.OperationNode(lambda x: x, name="input")
add_node = vedo.OperationNode(add_operation, name="add")
multiply_node = vedo.OperationNode(multiply_operation, name="multiply")
square_node = vedo.OperationNode(square_operation, name="square")
# Connect nodes
input_node.connect(add_node)
add_node.connect(multiply_node)
multiply_node.connect(square_node)
# Execute computation: ((input + 5) * 2) ** 2
result = square_node.execute(multiply_node.execute(add_node.execute(10, 5), 2))
print(f"Computational graph result: {result}") # Should be 400
# Advanced progress monitoring with logging
import tempfile
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.log') as f:
log_file = f.name
detailed_progress = vedo.ProgressBar(
0, 50,
title="Detailed Analysis",
c='purple',
width=30,
char='▓',
bg='░',
logFile=log_file,
ETA=True
)
for i in range(50):
# Simulate variable-time operations
import time
time.sleep(0.1) # Simulate work
detailed_progress.print(f"Processing file {i+1}")
print(f"Progress log saved to: {log_file}")Install with Tessl CLI
npx tessl i tessl/pypi-vedo