Comprehensive Python library for diffusion MRI analysis including tensor imaging, tractography, and visualization
—
Essential utility functions for array manipulation, coordinate transformations, and mathematical operations. DIPY's core utilities provide fundamental building blocks for diffusion MRI analysis and data processing.
Utility functions for handling arrays, data types, and memory management in diffusion MRI processing.
def normalize_v3(vec):
"""
Normalize 3D vectors to unit length.
Parameters:
vec (array): input vectors (..., 3)
Returns:
array: normalized unit vectors
"""
def as_native_array(arr):
"""
Convert array to native byte order to avoid endianness issues.
Parameters:
arr (array): input array
Returns:
array: array in native byte order
"""
def vec2vec_rotmat(u, v):
"""
Calculate rotation matrix to align vector u with vector v.
Parameters:
u (array): source vector (3,)
v (array): target vector (3,)
Returns:
array: rotation matrix (3, 3)
"""
def vector_norm(vec, axis=-1, keepdims=False):
"""
Calculate L2 norm of vectors.
Parameters:
vec (array): input vectors
axis (int): axis along which to compute norm
keepdims (bool): preserve dimensions
Returns:
array: vector norms
"""
def nearest_pos_semi_def(matrix):
"""
Find nearest positive semi-definite matrix.
Parameters:
matrix (array): input matrix (..., N, N)
Returns:
array: nearest PSD matrix
"""Functions for converting between different coordinate systems and spatial transformations.
def sphere2cart(r, theta, phi, deg=True):
"""
Convert spherical to Cartesian coordinates.
Parameters:
r (array): radial distance
theta (array): polar angle
phi (array): azimuthal angle
deg (bool): angles in degrees (True) or radians (False)
Returns:
tuple: (x, y, z) Cartesian coordinates
"""
def cart2sphere(x, y, z, deg=True):
"""
Convert Cartesian to spherical coordinates.
Parameters:
x (array): x coordinates
y (array): y coordinates
z (array): z coordinates
deg (bool): return angles in degrees
Returns:
tuple: (r, theta, phi) spherical coordinates
"""
def euler_matrix(ai, aj, ak, axes='sxyz'):
"""
Return homogeneous rotation matrix from Euler angles.
Parameters:
ai (float): rotation about first axis
aj (float): rotation about second axis
ak (float): rotation about third axis
axes (str): axis specification
Returns:
array: rotation matrix (4, 4)
"""
def apply_affine(aff, pts):
"""
Apply affine transformation to points.
Parameters:
aff (array): affine transformation matrix (4, 4)
pts (array): points to transform (..., 3)
Returns:
array: transformed points
"""
def voxel_order(affine):
"""
Determine voxel ordering from affine matrix.
Parameters:
affine (array): voxel-to-world affine (4, 4)
Returns:
str: voxel order code (e.g., 'RAS', 'LPS')
"""Tools for data interpolation and resampling operations.
def trilinear_interp(data, indices):
"""
Trilinear interpolation of 3D data.
Parameters:
data (array): 3D data array
indices (array): interpolation coordinates (..., 3)
Returns:
array: interpolated values
"""
def trilinear_interpolate4d(data, indices):
"""
Trilinear interpolation for 4D data.
Parameters:
data (array): 4D data array
indices (array): coordinates for interpolation
Returns:
array: interpolated 4D values
"""
def reslice(data, affine, zooms, new_zooms, order=1):
"""
Reslice volume to new voxel size.
Parameters:
data (array): input volume
affine (array): voxel-to-world transformation
zooms (array): current voxel sizes
new_zooms (array): target voxel sizes
order (int): interpolation order
Returns:
tuple: (resliced_data, new_affine)
"""
class Interpolator:
"""Base class for data interpolation."""
def __init__(self, data, voxel_size):
"""
Initialize interpolator.
Parameters:
data (array): data to interpolate
voxel_size (array): voxel dimensions
"""
def __call__(self, coordinates):
"""Interpolate at given coordinates."""Mathematical functions and statistical tools for diffusion analysis.
def auto_attr(func):
"""
Decorator for automatic attribute caching.
Parameters:
func (callable): function to decorate
Returns:
callable: decorated function with caching
"""
def multi_voxel_fit(single_voxel_func):
"""
Decorator to extend single-voxel functions to multi-voxel data.
Parameters:
single_voxel_func (callable): single voxel fitting function
Returns:
callable: multi-voxel version of function
"""
def optional_package(name, trip_msg=None):
"""
Return package or None with optional warning.
Parameters:
name (str): package name
trip_msg (str): message if package not found
Returns:
module or None: imported package or None
"""
def warn_issue(issue_no, message):
"""
Issue warning with GitHub issue reference.
Parameters:
issue_no (int): GitHub issue number
message (str): warning message
"""
def floating_point_precision(dtype):
"""
Get floating point precision information.
Parameters:
dtype: numpy data type
Returns:
dict: precision information
"""Tools for memory management and performance optimization.
def chunk_size(data_shape, chunk_mb=100):
"""
Calculate optimal chunk size for processing.
Parameters:
data_shape (tuple): shape of data array
chunk_mb (float): target chunk size in MB
Returns:
int: optimal chunk size
"""
def memory_usage():
"""
Get current memory usage information.
Returns:
dict: memory usage statistics
"""
class ProgressBar:
"""Simple progress bar for long operations."""
def __init__(self, total, desc='Processing'):
"""
Initialize progress bar.
Parameters:
total (int): total number of steps
desc (str): description text
"""
def update(self, n=1):
"""Update progress by n steps."""
def close(self):
"""Close progress bar."""
def time_operation(func):
"""
Decorator to time function execution.
Parameters:
func (callable): function to time
Returns:
callable: timed function
"""Utilities for file handling, path manipulation, and data I/O operations.
def ensure_dir_exists(path):
"""
Ensure directory exists, create if necessary.
Parameters:
path (str): directory path
Returns:
str: absolute directory path
"""
def get_file_extension(filename):
"""
Get file extension handling compressed files.
Parameters:
filename (str): file name or path
Returns:
str: file extension
"""
def split_filename(filename):
"""
Split filename into components.
Parameters:
filename (str): file name or path
Returns:
tuple: (directory, basename, extension)
"""
def check_file_exists(filename, raise_error=True):
"""
Check if file exists with optional error raising.
Parameters:
filename (str): file path to check
raise_error (bool): raise error if file doesn't exist
Returns:
bool: True if file exists
"""
def get_data_dims(filename):
"""
Get dimensions of data file without loading.
Parameters:
filename (str): path to data file
Returns:
tuple: data dimensions
"""Utilities for reproducible random number generation in scientific computing.
class RandomState:
"""Thread-safe random number generator."""
def __init__(self, seed=None):
"""
Initialize random state.
Parameters:
seed (int): random seed for reproducibility
"""
def uniform(self, low=0.0, high=1.0, size=None):
"""Generate uniform random numbers."""
def normal(self, loc=0.0, scale=1.0, size=None):
"""Generate normal random numbers."""
def randint(self, low, high, size=None):
"""Generate random integers."""
def choice(self, a, size=None, replace=True, p=None):
"""Random sampling from array."""
def set_random_number_generator(seed):
"""
Set global random seed for reproducibility.
Parameters:
seed (int): random seed value
"""
def get_random_state():
"""Get current random state."""Tools for data validation, testing, and quality assurance.
def is_hemispherical(vectors):
"""
Check if vectors are hemispherical.
Parameters:
vectors (array): vectors to check (..., 3)
Returns:
bool: True if vectors are hemispherical
"""
def validate_gtab(gtab):
"""
Validate gradient table consistency.
Parameters:
gtab (GradientTable): gradient table to validate
Returns:
dict: validation results and warnings
"""
def check_multi_b(gtab, n_shells=None, tol=20):
"""
Check if gradient table has multiple b-value shells.
Parameters:
gtab (GradientTable): gradient table
n_shells (int): expected number of shells
tol (float): tolerance for b-value grouping
Returns:
tuple: (is_multi_b, shell_info)
"""
def assert_arrays_equal(arr1, arr2, rtol=1e-5, atol=1e-8):
"""
Assert arrays are equal within tolerance.
Parameters:
arr1 (array): first array
arr2 (array): second array
rtol (float): relative tolerance
atol (float): absolute tolerance
"""
class TempDirectory:
"""Context manager for temporary directories."""
def __init__(self, suffix='', prefix='dipy_'):
"""Initialize temporary directory context."""
def __enter__(self):
"""Enter context and create directory."""
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit context and cleanup directory."""# Array manipulation examples
from dipy.utils.arrfuncs import normalize_v3, as_native_array, vector_norm
import numpy as np
# Normalize vectors
vectors = np.random.randn(100, 3)
unit_vectors = normalize_v3(vectors)
norms = vector_norm(unit_vectors)
print(f"Unit vector norms: min={norms.min():.6f}, max={norms.max():.6f}")
# Handle byte order issues
big_endian_array = np.array([1.0, 2.0, 3.0], dtype='>f4') # Big endian
native_array = as_native_array(big_endian_array)
print(f"Original dtype: {big_endian_array.dtype}")
print(f"Native dtype: {native_array.dtype}")
# Coordinate transformations
from dipy.core.geometry import sphere2cart, cart2sphere
# Convert spherical to Cartesian
theta = np.linspace(0, 180, 10) # Polar angle
phi = np.linspace(0, 360, 20) # Azimuthal angle
r = 1.0
theta_grid, phi_grid = np.meshgrid(theta, phi, indexing='ij')
x, y, z = sphere2cart(r, theta_grid, phi_grid, deg=True)
# Convert back to spherical
r_back, theta_back, phi_back = cart2sphere(x, y, z, deg=True)
print(f"Coordinate conversion accuracy:")
print(f" Theta error: {np.abs(theta_grid - theta_back).max():.10f}")
print(f" Phi error: {np.abs(phi_grid - phi_back).max():.10f}")
# Interpolation example
from dipy.core.interpolation import trilinear_interp
from dipy.data import read_stanford_hardi
# Load example data
img, gtab = read_stanford_hardi()
data = img.get_fdata()
# Interpolate at fractional coordinates
coords = np.array([[10.5, 15.7, 8.3], [20.2, 25.1, 12.8]])
interpolated = trilinear_interp(data[..., 0], coords) # Interpolate b=0 volume
print(f"Interpolated values at fractional coordinates: {interpolated}")
# Random number generation for reproducibility
from dipy.utils.rng import RandomState
# Create reproducible random state
rng = RandomState(seed=42)
# Generate reproducible random data
random_data1 = rng.normal(0, 1, size=(5, 3))
rng = RandomState(seed=42) # Reset with same seed
random_data2 = rng.normal(0, 1, size=(5, 3))
print(f"Reproducible random generation: {np.allclose(random_data1, random_data2)}")
# File and path utilities
from dipy.utils.pathutils import ensure_dir_exists, get_file_extension, split_filename
# Ensure output directory exists
output_dir = ensure_dir_exists('./analysis_results')
print(f"Output directory: {output_dir}")
# Handle file extensions
filename = 'data.nii.gz'
extension = get_file_extension(filename)
directory, basename, ext = split_filename(filename)
print(f"File: {filename}")
print(f" Extension: {extension}")
print(f" Components: dir='{directory}', base='{basename}', ext='{ext}'")
# Progress tracking for long operations
from dipy.utils.progress import ProgressBar
import time
# Simulate long processing with progress bar
n_items = 100
pbar = ProgressBar(n_items, desc='Processing data')
for i in range(n_items):
# Simulate work
time.sleep(0.01)
pbar.update()
pbar.close()
print("Processing completed")
# Validation utilities
from dipy.utils.validation import validate_gtab, check_multi_b
# Validate gradient table
validation_results = validate_gtab(gtab)
is_multi_b, shell_info = check_multi_b(gtab)
print(f"Gradient table validation: {validation_results}")
print(f"Multi-shell data: {is_multi_b}")
if is_multi_b:
print(f"Shell information: {shell_info}")
# Memory and performance monitoring
from dipy.utils.memory import memory_usage, chunk_size
# Get memory usage
mem_info = memory_usage()
print(f"Memory usage: {mem_info}")
# Calculate optimal chunk size for processing
data_shape = data.shape
optimal_chunk = chunk_size(data_shape, chunk_mb=50)
print(f"Optimal chunk size for {data_shape}: {optimal_chunk}")
# Temporary directory context
from dipy.utils.tempdir import TempDirectory
# Use temporary directory for intermediate results
with TempDirectory(prefix='dipy_temp_') as temp_dir:
temp_file = temp_dir / 'intermediate_result.npy'
np.save(temp_file, random_data1)
# File exists within context
print(f"Temp file exists: {temp_file.exists()}")
# File is automatically cleaned up after context
print("Temporary directory cleaned up automatically")
# Mathematical utilities
from dipy.utils.decorators import auto_attr
class ExampleClass:
"""Example class using auto_attr decorator."""
def __init__(self, data):
self.data = data
@auto_attr
def expensive_computation(self):
"""Expensive computation cached automatically."""
print("Computing expensive result...")
return np.sum(self.data ** 2)
# Demonstrate caching
example = ExampleClass(np.random.randn(1000000))
result1 = example.expensive_computation # Computed
result2 = example.expensive_computation # Cached
print(f"Results equal: {result1 == result2}")Install with Tessl CLI
npx tessl i tessl/pypi-dipy