0
# Workflows and Command-Line Tools
1
2
Pre-built workflows and command-line interfaces for common diffusion MRI analysis pipelines and batch processing. DIPY provides comprehensive command-line tools for automated processing of large datasets.
3
4
## Capabilities
5
6
### Core Workflow Infrastructure
7
8
Base classes and utilities for building reproducible processing workflows.
9
10
```python { .api }
11
class Workflow:
12
"""Base class for DIPY workflows."""
13
def __init__(self):
14
"""Initialize workflow with default parameters."""
15
16
def run(self, input_files, out_dir='./dipy_workflows', **kwargs):
17
"""
18
Execute the workflow.
19
20
Parameters:
21
input_files (str/list): input file paths
22
out_dir (str): output directory
23
**kwargs: workflow-specific parameters
24
25
Returns:
26
dict: workflow execution results
27
"""
28
29
@classmethod
30
def get_short_name(cls):
31
"""Get workflow short name for CLI."""
32
33
def get_io_iterator(self):
34
"""Get input/output file iterator."""
35
36
class IntrospectiveArgumentParser:
37
"""Argument parser with workflow introspection."""
38
def __init__(self, workflow):
39
"""Initialize parser for specific workflow."""
40
41
def add_workflow_args(self):
42
"""Add workflow-specific arguments."""
43
44
def parse_args(self):
45
"""Parse command line arguments."""
46
```
47
48
### I/O and Data Management Workflows
49
50
Workflows for data conversion, format handling, and file organization.
51
52
```python { .api }
53
class IoInfoFlow(Workflow):
54
"""Get information about input files."""
55
def run(self, input_files, b0_threshold=50, bvecs_tol=0.01, **kwargs):
56
"""
57
Extract information from diffusion data files.
58
59
Parameters:
60
input_files (str): input data path
61
b0_threshold (float): b=0 identification threshold
62
bvecs_tol (float): b-vectors tolerance
63
64
Returns:
65
dict: file information and metadata
66
"""
67
68
class FetchFlow(Workflow):
69
"""Download datasets from remote sources."""
70
def run(self, dataset_name, out_dir='./dipy_data', **kwargs):
71
"""
72
Fetch dataset from DIPY data repository.
73
74
Parameters:
75
dataset_name (str): name of dataset to download
76
out_dir (str): output directory for downloaded data
77
"""
78
79
class ConvertFlow(Workflow):
80
"""Convert between different file formats."""
81
def run(self, input_files, out_format='nifti', **kwargs):
82
"""
83
Convert diffusion data between formats.
84
85
Parameters:
86
input_files (str): input file paths
87
out_format (str): target format ('nifti', 'dicom', 'dpy')
88
"""
89
```
90
91
### Preprocessing Workflows
92
93
Complete preprocessing pipelines for diffusion MRI data preparation.
94
95
```python { .api }
96
class DenoisingFlow(Workflow):
97
"""Denoising workflow with multiple algorithms."""
98
def run(self, input_files, method='nlmeans', patch_radius=1, block_radius=5, **kwargs):
99
"""
100
Apply denoising to diffusion data.
101
102
Parameters:
103
input_files (str): input diffusion data
104
method (str): denoising method ('nlmeans', 'lpca', 'mppca', 'patch2self')
105
patch_radius (int): patch radius for local methods
106
block_radius (int): block radius for non-local methods
107
"""
108
109
class GibbsRingingFlow(Workflow):
110
"""Gibbs ringing artifact removal."""
111
def run(self, input_files, slice_axis=2, n_points=3, **kwargs):
112
"""
113
Remove Gibbs ringing artifacts.
114
115
Parameters:
116
input_files (str): input diffusion volumes
117
slice_axis (int): axis along which to remove artifacts
118
n_points (int): number of points for correction
119
"""
120
121
class BiasCorrectionFlow(Workflow):
122
"""Bias field correction workflow."""
123
def run(self, input_files, method='n4', mask=None, **kwargs):
124
"""
125
Apply bias field correction.
126
127
Parameters:
128
input_files (str): input data
129
method (str): correction method ('n4', 'deepn4')
130
mask (str): brain mask file
131
"""
132
133
class MotionCorrectionFlow(Workflow):
134
"""Motion and eddy current correction."""
135
def run(self, input_files, bvals, bvecs, reference_b0=0, **kwargs):
136
"""
137
Correct for motion and eddy currents.
138
139
Parameters:
140
input_files (str): DWI data
141
bvals (str): b-values file
142
bvecs (str): b-vectors file
143
reference_b0 (int): reference b=0 volume index
144
"""
145
```
146
147
### Signal Reconstruction Workflows
148
149
Workflows for fitting diffusion models and extracting quantitative metrics.
150
151
```python { .api }
152
class ReconstDtiFlow(Workflow):
153
"""Diffusion tensor imaging reconstruction."""
154
def run(self, input_files, bvals, bvecs, mask=None, fit_method='WLS', **kwargs):
155
"""
156
Fit DTI model and compute metrics.
157
158
Parameters:
159
input_files (str): diffusion data
160
bvals (str): b-values file
161
bvecs (str): b-vectors file
162
mask (str): brain mask
163
fit_method (str): fitting method ('WLS', 'OLS', 'NLLS')
164
"""
165
166
class ReconstDkiFlow(Workflow):
167
"""Diffusion kurtosis imaging reconstruction."""
168
def run(self, input_files, bvals, bvecs, mask=None, **kwargs):
169
"""
170
Fit DKI model for multi-shell data.
171
172
Parameters:
173
input_files (str): multi-shell diffusion data
174
bvals (str): b-values file
175
bvecs (str): b-vectors file
176
mask (str): brain mask
177
"""
178
179
class ReconstCsdFlow(Workflow):
180
"""Constrained spherical deconvolution reconstruction."""
181
def run(self, input_files, bvals, bvecs, mask=None, sh_order=8, response=None, **kwargs):
182
"""
183
Fit CSD model for fiber orientation estimation.
184
185
Parameters:
186
input_files (str): diffusion data
187
bvals (str): b-values file
188
bvecs (str): b-vectors file
189
mask (str): brain mask
190
sh_order (int): spherical harmonics order
191
response (str): response function file
192
"""
193
194
class ReconstIvimFlow(Workflow):
195
"""IVIM model reconstruction."""
196
def run(self, input_files, bvals, bvecs, mask=None, split_b=400, **kwargs):
197
"""
198
Fit IVIM model for perfusion analysis.
199
200
Parameters:
201
input_files (str): low b-value diffusion data
202
bvals (str): b-values file
203
bvecs (str): b-vectors file
204
mask (str): brain mask
205
split_b (float): b-value threshold for bi-exponential fitting
206
"""
207
```
208
209
### Tractography Workflows
210
211
Complete tractography pipelines from peak detection to streamline generation.
212
213
```python { .api }
214
class TrackingFlow(Workflow):
215
"""Deterministic and probabilistic tractography."""
216
def run(self, input_files, peaks=None stopping_criterion=None, seeds=None,
217
step_size=0.5, max_angle=30, **kwargs):
218
"""
219
Perform fiber tracking.
220
221
Parameters:
222
input_files (str): peak or ODF data
223
peaks (str): pre-computed peaks file
224
stopping_criterion (str): stopping criterion parameters
225
seeds (str): seeding strategy or seed points
226
step_size (float): integration step size
227
max_angle (float): maximum turning angle
228
"""
229
230
class TrackPftFlow(Workflow):
231
"""Particle Filtering Tractography."""
232
def run(self, input_files, wm_map, gm_map, csf_map, seeds=None, **kwargs):
233
"""
234
PFT with anatomical priors.
235
236
Parameters:
237
input_files (str): diffusion or ODF data
238
wm_map (str): white matter probability map
239
gm_map (str): gray matter probability map
240
csf_map (str): CSF probability map
241
seeds (str): seeding points
242
"""
243
244
class RecoBundlesFlow(Workflow):
245
"""Bundle recognition using streamline atlas."""
246
def run(self, streamlines, atlas_bundles, recognition_thr=5, **kwargs):
247
"""
248
Recognize anatomical bundles.
249
250
Parameters:
251
streamlines (str): input streamlines file
252
atlas_bundles (str): atlas bundle definitions
253
recognition_thr (float): recognition threshold
254
"""
255
```
256
257
### Analysis and Statistics Workflows
258
259
Workflows for tract-based analysis and statistical comparisons.
260
261
```python { .api }
262
class BundleAnalysisFlow(Workflow):
263
"""Bundle-specific analysis workflow."""
264
def run(self, bundles, scalar_maps, affine=None, n_points=100, **kwargs):
265
"""
266
Analyze tract profiles along bundles.
267
268
Parameters:
269
bundles (str): streamline bundle files
270
scalar_maps (str): diffusion metric maps
271
affine (str): transformation matrix
272
n_points (int): profile sampling points
273
"""
274
275
class StatisticsFlow(Workflow):
276
"""Statistical analysis of tract properties."""
277
def run(self, profiles_group1, profiles_group2, test='ttest', **kwargs):
278
"""
279
Statistical comparison between groups.
280
281
Parameters:
282
profiles_group1 (str): tract profiles for group 1
283
profiles_group2 (str): tract profiles for group 2
284
test (str): statistical test ('ttest', 'permutation')
285
"""
286
287
class LifeFlow(Workflow):
288
"""Linear Fascicle Evaluation (LiFE) analysis."""
289
def run(self, streamlines, data, bvals, bvecs, **kwargs):
290
"""
291
Evaluate streamline evidence using LiFE.
292
293
Parameters:
294
streamlines (str): tractography streamlines
295
data (str): diffusion data
296
bvals (str): b-values file
297
bvecs (str): b-vectors file
298
"""
299
```
300
301
### Visualization Workflows
302
303
Workflows for generating visualizations and reports.
304
305
```python { .api }
306
class VisualizationFlow(Workflow):
307
"""Create visualizations of diffusion data."""
308
def run(self, input_files, output_type='png', slice_axis=2, **kwargs):
309
"""
310
Generate data visualizations.
311
312
Parameters:
313
input_files (str): input data files
314
output_type (str): output format ('png', 'gif', 'html')
315
slice_axis (int): slicing axis for volume data
316
"""
317
318
class ReportFlow(Workflow):
319
"""Generate analysis reports."""
320
def run(self, results_dir, template='standard', **kwargs):
321
"""
322
Create comprehensive analysis report.
323
324
Parameters:
325
results_dir (str): directory with analysis results
326
template (str): report template name
327
"""
328
329
class QualityAssuranceFlow(Workflow):
330
"""Quality assessment workflow."""
331
def run(self, input_files, bvals, bvecs, **kwargs):
332
"""
333
Assess data quality and generate QA metrics.
334
335
Parameters:
336
input_files (str): diffusion data
337
bvals (str): b-values file
338
bvecs (str): b-vectors file
339
"""
340
```
341
342
### Command-Line Interface
343
344
Direct command-line access to all workflows through the unified CLI.
345
346
```python { .api }
347
def run_workflow(workflow_name, args):
348
"""
349
Execute workflow from command line.
350
351
Parameters:
352
workflow_name (str): name of workflow to run
353
args (list): command line arguments
354
355
Returns:
356
int: exit code (0 for success)
357
"""
358
359
class CliRunner:
360
"""Command-line interface runner."""
361
def __init__(self):
362
"""Initialize CLI runner."""
363
364
def get_available_workflows(self):
365
"""List all available workflows."""
366
367
def run_workflow(self, workflow_name, argv):
368
"""Run specific workflow with arguments."""
369
370
def print_help(self):
371
"""Print CLI help information."""
372
```
373
374
### Usage Examples
375
376
```python
377
# Python API usage
378
from dipy.workflows.denoise import DenoisingFlow
379
from dipy.workflows.reconst import ReconstDtiFlow
380
from dipy.workflows.tracking import TrackingFlow
381
382
# Denoising workflow
383
denoise_flow = DenoisingFlow()
384
denoise_flow.run(
385
input_files='data.nii.gz',
386
method='nlmeans',
387
patch_radius=1,
388
block_radius=5,
389
out_dir='./denoised'
390
)
391
392
# DTI reconstruction workflow
393
dti_flow = ReconstDtiFlow()
394
dti_flow.run(
395
input_files='denoised/data_denoised.nii.gz',
396
bvals='data.bval',
397
bvecs='data.bvec',
398
mask='mask.nii.gz',
399
fit_method='WLS',
400
out_dir='./dti_results'
401
)
402
403
# Tractography workflow
404
tracking_flow = TrackingFlow()
405
tracking_flow.run(
406
input_files='dti_results/peaks.nii.gz',
407
stopping_criterion='fa:0.2',
408
seeds='mask.nii.gz',
409
step_size=0.5,
410
max_angle=30,
411
out_dir='./tractography'
412
)
413
414
print("Processing pipeline completed successfully")
415
416
# Command-line usage examples (as shell commands):
417
"""
418
# Denoising
419
dipy_denoise_nlmeans data.nii.gz --out_dir ./denoised
420
421
# DTI fitting
422
dipy_fit_dti denoised/data_denoised.nii.gz data.bval data.bvec --mask mask.nii.gz
423
424
# CSD fitting
425
dipy_fit_csd data.nii.gz data.bval data.bvec --out_dir ./csd_results --sh_order 8
426
427
# Tractography
428
dipy_track ./csd_results/csd_peaks.nii.gz --stopping_criterion fa:0.2 --seeds mask.nii.gz
429
430
# Bundle recognition
431
dipy_recobundles tractography/streamlines.trk atlas_bundles/ --out_dir ./bundles
432
433
# Statistics
434
dipy_bundle_analysis ./bundles/ ./dti_results/fa.nii.gz --out_dir ./analysis
435
436
# Visualization
437
dipy_horizon ./tractography/streamlines.trk --stealth
438
439
# Information about data
440
dipy_info data.nii.gz data.bval data.bvec
441
442
# Motion correction
443
dipy_correct_motion data.nii.gz data.bval data.bvec --out_dir ./corrected
444
445
# Gibbs ringing removal
446
dipy_gibbs_ringing data.nii.gz --out_dir ./gibbs_corrected
447
448
# Bias field correction
449
dipy_correct_biasfield data.nii.gz --method n4 --out_dir ./bias_corrected
450
451
# IVIM fitting
452
dipy_fit_ivim data.nii.gz data.bval data.bvec --split_b 400
453
454
# Bundle shape profiles
455
dipy_buan_profiles bundles/ fa.nii.gz --out_dir ./profiles
456
457
# Bundle shape analysis
458
dipy_buan_shapes ./profiles/ --out_dir ./shapes_analysis
459
"""
460
461
# Batch processing example
462
import os
463
from pathlib import Path
464
465
def batch_process_subjects(data_dir, workflows=['denoise', 'dti', 'tracking']):
466
"""
467
Process multiple subjects using DIPY workflows.
468
469
Parameters:
470
data_dir (str): directory containing subject data
471
workflows (list): list of workflows to apply
472
"""
473
subjects = [d for d in Path(data_dir).iterdir() if d.is_dir()]
474
475
for subject in subjects:
476
print(f"Processing subject: {subject.name}")
477
478
# Define file paths
479
dwi_file = subject / 'dwi.nii.gz'
480
bval_file = subject / 'dwi.bval'
481
bvec_file = subject / 'dwi.bvec'
482
mask_file = subject / 'mask.nii.gz'
483
484
if not all([f.exists() for f in [dwi_file, bval_file, bvec_file]]):
485
print(f" Missing required files for {subject.name}")
486
continue
487
488
# Create output directory
489
out_dir = subject / 'dipy_results'
490
out_dir.mkdir(exist_ok=True)
491
492
try:
493
# Denoising
494
if 'denoise' in workflows:
495
denoise_flow = DenoisingFlow()
496
denoise_flow.run(
497
input_files=str(dwi_file),
498
method='nlmeans',
499
out_dir=str(out_dir / 'denoised')
500
)
501
502
# DTI reconstruction
503
if 'dti' in workflows:
504
dti_flow = ReconstDtiFlow()
505
dti_flow.run(
506
input_files=str(out_dir / 'denoised' / 'dwi_denoised.nii.gz'),
507
bvals=str(bval_file),
508
bvecs=str(bvec_file),
509
mask=str(mask_file) if mask_file.exists() else None,
510
out_dir=str(out_dir / 'dti')
511
)
512
513
# Tractography
514
if 'tracking' in workflows:
515
tracking_flow = TrackingFlow()
516
tracking_flow.run(
517
input_files=str(out_dir / 'dti' / 'peaks.nii.gz'),
518
stopping_criterion='fa:0.2',
519
seeds=str(mask_file) if mask_file.exists() else None,
520
out_dir=str(out_dir / 'tractography')
521
)
522
523
print(f" Successfully processed {subject.name}")
524
525
except Exception as e:
526
print(f" Error processing {subject.name}: {str(e)}")
527
528
# Run batch processing
529
# batch_process_subjects('/path/to/subjects', workflows=['denoise', 'dti', 'tracking'])
530
```