Comprehensive Python library for diffusion MRI analysis including tensor imaging, tractography, and visualization
—
Pre-built workflows and command-line interfaces for common diffusion MRI analysis pipelines and batch processing. DIPY provides comprehensive command-line tools for automated processing of large datasets.
Base classes and utilities for building reproducible processing workflows.
class Workflow:
"""Base class for DIPY workflows."""
def __init__(self):
"""Initialize workflow with default parameters."""
def run(self, input_files, out_dir='./dipy_workflows', **kwargs):
"""
Execute the workflow.
Parameters:
input_files (str/list): input file paths
out_dir (str): output directory
**kwargs: workflow-specific parameters
Returns:
dict: workflow execution results
"""
@classmethod
def get_short_name(cls):
"""Get workflow short name for CLI."""
def get_io_iterator(self):
"""Get input/output file iterator."""
class IntrospectiveArgumentParser:
"""Argument parser with workflow introspection."""
def __init__(self, workflow):
"""Initialize parser for specific workflow."""
def add_workflow_args(self):
"""Add workflow-specific arguments."""
def parse_args(self):
"""Parse command line arguments."""Workflows for data conversion, format handling, and file organization.
class IoInfoFlow(Workflow):
"""Get information about input files."""
def run(self, input_files, b0_threshold=50, bvecs_tol=0.01, **kwargs):
"""
Extract information from diffusion data files.
Parameters:
input_files (str): input data path
b0_threshold (float): b=0 identification threshold
bvecs_tol (float): b-vectors tolerance
Returns:
dict: file information and metadata
"""
class FetchFlow(Workflow):
"""Download datasets from remote sources."""
def run(self, dataset_name, out_dir='./dipy_data', **kwargs):
"""
Fetch dataset from DIPY data repository.
Parameters:
dataset_name (str): name of dataset to download
out_dir (str): output directory for downloaded data
"""
class ConvertFlow(Workflow):
"""Convert between different file formats."""
def run(self, input_files, out_format='nifti', **kwargs):
"""
Convert diffusion data between formats.
Parameters:
input_files (str): input file paths
out_format (str): target format ('nifti', 'dicom', 'dpy')
"""Complete preprocessing pipelines for diffusion MRI data preparation.
class DenoisingFlow(Workflow):
"""Denoising workflow with multiple algorithms."""
def run(self, input_files, method='nlmeans', patch_radius=1, block_radius=5, **kwargs):
"""
Apply denoising to diffusion data.
Parameters:
input_files (str): input diffusion data
method (str): denoising method ('nlmeans', 'lpca', 'mppca', 'patch2self')
patch_radius (int): patch radius for local methods
block_radius (int): block radius for non-local methods
"""
class GibbsRingingFlow(Workflow):
"""Gibbs ringing artifact removal."""
def run(self, input_files, slice_axis=2, n_points=3, **kwargs):
"""
Remove Gibbs ringing artifacts.
Parameters:
input_files (str): input diffusion volumes
slice_axis (int): axis along which to remove artifacts
n_points (int): number of points for correction
"""
class BiasCorrectionFlow(Workflow):
"""Bias field correction workflow."""
def run(self, input_files, method='n4', mask=None, **kwargs):
"""
Apply bias field correction.
Parameters:
input_files (str): input data
method (str): correction method ('n4', 'deepn4')
mask (str): brain mask file
"""
class MotionCorrectionFlow(Workflow):
"""Motion and eddy current correction."""
def run(self, input_files, bvals, bvecs, reference_b0=0, **kwargs):
"""
Correct for motion and eddy currents.
Parameters:
input_files (str): DWI data
bvals (str): b-values file
bvecs (str): b-vectors file
reference_b0 (int): reference b=0 volume index
"""Workflows for fitting diffusion models and extracting quantitative metrics.
class ReconstDtiFlow(Workflow):
"""Diffusion tensor imaging reconstruction."""
def run(self, input_files, bvals, bvecs, mask=None, fit_method='WLS', **kwargs):
"""
Fit DTI model and compute metrics.
Parameters:
input_files (str): diffusion data
bvals (str): b-values file
bvecs (str): b-vectors file
mask (str): brain mask
fit_method (str): fitting method ('WLS', 'OLS', 'NLLS')
"""
class ReconstDkiFlow(Workflow):
"""Diffusion kurtosis imaging reconstruction."""
def run(self, input_files, bvals, bvecs, mask=None, **kwargs):
"""
Fit DKI model for multi-shell data.
Parameters:
input_files (str): multi-shell diffusion data
bvals (str): b-values file
bvecs (str): b-vectors file
mask (str): brain mask
"""
class ReconstCsdFlow(Workflow):
"""Constrained spherical deconvolution reconstruction."""
def run(self, input_files, bvals, bvecs, mask=None, sh_order=8, response=None, **kwargs):
"""
Fit CSD model for fiber orientation estimation.
Parameters:
input_files (str): diffusion data
bvals (str): b-values file
bvecs (str): b-vectors file
mask (str): brain mask
sh_order (int): spherical harmonics order
response (str): response function file
"""
class ReconstIvimFlow(Workflow):
"""IVIM model reconstruction."""
def run(self, input_files, bvals, bvecs, mask=None, split_b=400, **kwargs):
"""
Fit IVIM model for perfusion analysis.
Parameters:
input_files (str): low b-value diffusion data
bvals (str): b-values file
bvecs (str): b-vectors file
mask (str): brain mask
split_b (float): b-value threshold for bi-exponential fitting
"""Complete tractography pipelines from peak detection to streamline generation.
class TrackingFlow(Workflow):
"""Deterministic and probabilistic tractography."""
def run(self, input_files, peaks=None stopping_criterion=None, seeds=None,
step_size=0.5, max_angle=30, **kwargs):
"""
Perform fiber tracking.
Parameters:
input_files (str): peak or ODF data
peaks (str): pre-computed peaks file
stopping_criterion (str): stopping criterion parameters
seeds (str): seeding strategy or seed points
step_size (float): integration step size
max_angle (float): maximum turning angle
"""
class TrackPftFlow(Workflow):
"""Particle Filtering Tractography."""
def run(self, input_files, wm_map, gm_map, csf_map, seeds=None, **kwargs):
"""
PFT with anatomical priors.
Parameters:
input_files (str): diffusion or ODF data
wm_map (str): white matter probability map
gm_map (str): gray matter probability map
csf_map (str): CSF probability map
seeds (str): seeding points
"""
class RecoBundlesFlow(Workflow):
"""Bundle recognition using streamline atlas."""
def run(self, streamlines, atlas_bundles, recognition_thr=5, **kwargs):
"""
Recognize anatomical bundles.
Parameters:
streamlines (str): input streamlines file
atlas_bundles (str): atlas bundle definitions
recognition_thr (float): recognition threshold
"""Workflows for tract-based analysis and statistical comparisons.
class BundleAnalysisFlow(Workflow):
"""Bundle-specific analysis workflow."""
def run(self, bundles, scalar_maps, affine=None, n_points=100, **kwargs):
"""
Analyze tract profiles along bundles.
Parameters:
bundles (str): streamline bundle files
scalar_maps (str): diffusion metric maps
affine (str): transformation matrix
n_points (int): profile sampling points
"""
class StatisticsFlow(Workflow):
"""Statistical analysis of tract properties."""
def run(self, profiles_group1, profiles_group2, test='ttest', **kwargs):
"""
Statistical comparison between groups.
Parameters:
profiles_group1 (str): tract profiles for group 1
profiles_group2 (str): tract profiles for group 2
test (str): statistical test ('ttest', 'permutation')
"""
class LifeFlow(Workflow):
"""Linear Fascicle Evaluation (LiFE) analysis."""
def run(self, streamlines, data, bvals, bvecs, **kwargs):
"""
Evaluate streamline evidence using LiFE.
Parameters:
streamlines (str): tractography streamlines
data (str): diffusion data
bvals (str): b-values file
bvecs (str): b-vectors file
"""Workflows for generating visualizations and reports.
class VisualizationFlow(Workflow):
"""Create visualizations of diffusion data."""
def run(self, input_files, output_type='png', slice_axis=2, **kwargs):
"""
Generate data visualizations.
Parameters:
input_files (str): input data files
output_type (str): output format ('png', 'gif', 'html')
slice_axis (int): slicing axis for volume data
"""
class ReportFlow(Workflow):
"""Generate analysis reports."""
def run(self, results_dir, template='standard', **kwargs):
"""
Create comprehensive analysis report.
Parameters:
results_dir (str): directory with analysis results
template (str): report template name
"""
class QualityAssuranceFlow(Workflow):
"""Quality assessment workflow."""
def run(self, input_files, bvals, bvecs, **kwargs):
"""
Assess data quality and generate QA metrics.
Parameters:
input_files (str): diffusion data
bvals (str): b-values file
bvecs (str): b-vectors file
"""Direct command-line access to all workflows through the unified CLI.
def run_workflow(workflow_name, args):
"""
Execute workflow from command line.
Parameters:
workflow_name (str): name of workflow to run
args (list): command line arguments
Returns:
int: exit code (0 for success)
"""
class CliRunner:
"""Command-line interface runner."""
def __init__(self):
"""Initialize CLI runner."""
def get_available_workflows(self):
"""List all available workflows."""
def run_workflow(self, workflow_name, argv):
"""Run specific workflow with arguments."""
def print_help(self):
"""Print CLI help information."""# Python API usage
from dipy.workflows.denoise import DenoisingFlow
from dipy.workflows.reconst import ReconstDtiFlow
from dipy.workflows.tracking import TrackingFlow
# Denoising workflow
denoise_flow = DenoisingFlow()
denoise_flow.run(
input_files='data.nii.gz',
method='nlmeans',
patch_radius=1,
block_radius=5,
out_dir='./denoised'
)
# DTI reconstruction workflow
dti_flow = ReconstDtiFlow()
dti_flow.run(
input_files='denoised/data_denoised.nii.gz',
bvals='data.bval',
bvecs='data.bvec',
mask='mask.nii.gz',
fit_method='WLS',
out_dir='./dti_results'
)
# Tractography workflow
tracking_flow = TrackingFlow()
tracking_flow.run(
input_files='dti_results/peaks.nii.gz',
stopping_criterion='fa:0.2',
seeds='mask.nii.gz',
step_size=0.5,
max_angle=30,
out_dir='./tractography'
)
print("Processing pipeline completed successfully")
# Command-line usage examples (as shell commands):
"""
# Denoising
dipy_denoise_nlmeans data.nii.gz --out_dir ./denoised
# DTI fitting
dipy_fit_dti denoised/data_denoised.nii.gz data.bval data.bvec --mask mask.nii.gz
# CSD fitting
dipy_fit_csd data.nii.gz data.bval data.bvec --out_dir ./csd_results --sh_order 8
# Tractography
dipy_track ./csd_results/csd_peaks.nii.gz --stopping_criterion fa:0.2 --seeds mask.nii.gz
# Bundle recognition
dipy_recobundles tractography/streamlines.trk atlas_bundles/ --out_dir ./bundles
# Statistics
dipy_bundle_analysis ./bundles/ ./dti_results/fa.nii.gz --out_dir ./analysis
# Visualization
dipy_horizon ./tractography/streamlines.trk --stealth
# Information about data
dipy_info data.nii.gz data.bval data.bvec
# Motion correction
dipy_correct_motion data.nii.gz data.bval data.bvec --out_dir ./corrected
# Gibbs ringing removal
dipy_gibbs_ringing data.nii.gz --out_dir ./gibbs_corrected
# Bias field correction
dipy_correct_biasfield data.nii.gz --method n4 --out_dir ./bias_corrected
# IVIM fitting
dipy_fit_ivim data.nii.gz data.bval data.bvec --split_b 400
# Bundle shape profiles
dipy_buan_profiles bundles/ fa.nii.gz --out_dir ./profiles
# Bundle shape analysis
dipy_buan_shapes ./profiles/ --out_dir ./shapes_analysis
"""
# Batch processing example
import os
from pathlib import Path
def batch_process_subjects(data_dir, workflows=['denoise', 'dti', 'tracking']):
"""
Process multiple subjects using DIPY workflows.
Parameters:
data_dir (str): directory containing subject data
workflows (list): list of workflows to apply
"""
subjects = [d for d in Path(data_dir).iterdir() if d.is_dir()]
for subject in subjects:
print(f"Processing subject: {subject.name}")
# Define file paths
dwi_file = subject / 'dwi.nii.gz'
bval_file = subject / 'dwi.bval'
bvec_file = subject / 'dwi.bvec'
mask_file = subject / 'mask.nii.gz'
if not all([f.exists() for f in [dwi_file, bval_file, bvec_file]]):
print(f" Missing required files for {subject.name}")
continue
# Create output directory
out_dir = subject / 'dipy_results'
out_dir.mkdir(exist_ok=True)
try:
# Denoising
if 'denoise' in workflows:
denoise_flow = DenoisingFlow()
denoise_flow.run(
input_files=str(dwi_file),
method='nlmeans',
out_dir=str(out_dir / 'denoised')
)
# DTI reconstruction
if 'dti' in workflows:
dti_flow = ReconstDtiFlow()
dti_flow.run(
input_files=str(out_dir / 'denoised' / 'dwi_denoised.nii.gz'),
bvals=str(bval_file),
bvecs=str(bvec_file),
mask=str(mask_file) if mask_file.exists() else None,
out_dir=str(out_dir / 'dti')
)
# Tractography
if 'tracking' in workflows:
tracking_flow = TrackingFlow()
tracking_flow.run(
input_files=str(out_dir / 'dti' / 'peaks.nii.gz'),
stopping_criterion='fa:0.2',
seeds=str(mask_file) if mask_file.exists() else None,
out_dir=str(out_dir / 'tractography')
)
print(f" Successfully processed {subject.name}")
except Exception as e:
print(f" Error processing {subject.name}: {str(e)}")
# Run batch processing
# batch_process_subjects('/path/to/subjects', workflows=['denoise', 'dti', 'tracking'])Install with Tessl CLI
npx tessl i tessl/pypi-dipy