Skip to content

Commit f9e794a

Browse files
committed
updating dmri preprocess example
1 parent 1c3d77f commit f9e794a

File tree

1 file changed

+58
-18
lines changed

1 file changed

+58
-18
lines changed

examples/dmri_preprocessing.py

Lines changed: 58 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# @Author: oesteban
44
# @Date: 2014-08-31 20:32:22
55
# @Last Modified by: oesteban
6-
# @Last Modified time: 2014-09-01 18:12:14
6+
# @Last Modified time: 2014-09-02 09:41:46
77
"""
88
===================
99
dMRI: Preprocessing
@@ -23,13 +23,16 @@
2323
2424
Import necessary modules from nipype.
2525
"""
26-
26+
import os # system functions
2727
import nipype.interfaces.io as nio # Data i/o
2828
import nipype.interfaces.utility as util # utility
29-
import nipype.pipeline.engine as pe # pypeline engine
30-
import nipype.interfaces.fsl as fsl
3129
import nipype.algorithms.misc as misc
32-
import os # system functions
30+
31+
import nipype.pipeline.engine as pe # pypeline engine
32+
33+
from nipype.interfaces import fsl
34+
from nipype.interfaces import ants
35+
3336

3437

3538
"""
@@ -48,9 +51,9 @@
4851
"""
4952

5053
info = dict(dwi=[['subject_id', 'dwidata']],
51-
bvecs=[['subject_id','bvecs']],
52-
bvals=[['subject_id','bvals']],
53-
dwi_rev=[['subject_id','nodif_PA']])
54+
bvecs=[['subject_id', 'bvecs']],
55+
bvals=[['subject_id', 'bvals']],
56+
dwi_rev=[['subject_id', 'nodif_PA']])
5457

5558
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
5659
name="infosource")
@@ -81,20 +84,22 @@
8184
"""
8285

8386
datasource = pe.Node(nio.DataGrabber(infields=['subject_id'],
84-
outfields=info.keys()), name = 'datasource')
87+
outfields=info.keys()), name='datasource')
8588

8689
datasource.inputs.template = "%s/%s"
8790

8891
# This needs to point to the fdt folder you can find after extracting
8992
# http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz
9093
datasource.inputs.base_directory = os.path.abspath('fdt1')
91-
datasource.inputs.field_template = dict(dwi='%s/%s.nii.gz', dwi_rev='%s/%s.nii.gz')
94+
datasource.inputs.field_template = dict(dwi='%s/%s.nii.gz',
95+
dwi_rev='%s/%s.nii.gz')
9296
datasource.inputs.template_args = info
9397
datasource.inputs.sort_filelist = True
9498

9599

96100
"""
97-
An inputnode is used to pass the data obtained by the data grabber to the actual processing functions
101+
An inputnode is used to pass the data obtained by the data grabber to the
102+
actual processing functions
98103
"""
99104

100105
inputnode = pe.Node(util.IdentityInterface(fields=["dwi", "bvecs", "bvals",
@@ -106,10 +111,20 @@
106111
Setup for dMRI preprocessing
107112
============================
108113
109-
In this section we initialize the appropriate workflow for preprocessing of diffusion images.
110-
Particularly, we look into the ``acqparams.txt`` file of the selected subject to gather the
111-
encoding direction, acceleration factor (in parallel sequences it is > 1), and readout time or
112-
echospacing.
114+
In this section we initialize the appropriate workflow for preprocessing of
115+
diffusion images.
116+
117+
Artifacts correction
118+
--------------------
119+
120+
We will use the combination of ```topup``` and ```eddy``` as suggested by FSL.
121+
122+
In order to configure the susceptibility distortion correction (SDC), we first
123+
write the specific parameters of our echo-planar imaging (EPI) images.
124+
125+
Particularly, we look into the ``acqparams.txt`` file of the selected subject
126+
to gather the encoding direction, acceleration factor (in parallel sequences
127+
it is > 1), and readout time or echospacing.
113128
114129
"""
115130

@@ -118,13 +133,28 @@
118133
prep = all_peb_pipeline(epi_params=epi_AP, altepi_params=epi_PA)
119134

120135

136+
"""
137+
138+
Bias field correction
139+
---------------------
140+
141+
Finally, we set up a node to estimate a single multiplicative bias field from
142+
the *b0* image, as suggested in [Jeurissen2014]_.
143+
144+
"""
145+
146+
n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias_b0')
147+
split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
148+
merge = pe.Node(niu.Function(input_names=['in_dwi', 'in_bval', 'in_corrected'],
149+
output_names=['out_file'], function=recompose_dwi), name='MergeDWIs')
150+
121151
"""
122152
Connect nodes in workflow
123153
=========================
124154
125-
We create a higher level workflow to connect the nodes. Please excuse the author for
126-
writing the arguments of the ``connect`` function in a not-standard fashion with readability
127-
aims.
155+
We create a higher level workflow to connect the nodes. Please excuse the
156+
author for writing the arguments of the ``connect`` function in a not-standard
157+
style with readability aims.
128158
"""
129159

130160
wf = pe.Workflow(name="dMRI_Preprocessing")
@@ -145,3 +175,13 @@
145175
if __name__ == '__main__':
146176
wf.run()
147177
wf.write_graph()
178+
179+
180+
"""
181+
182+
.. admonition:: References
183+
184+
.. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained spherical deconvolution
185+
for improved analysis of multi-shell diffusion MRI data
186+
<http://dx.doi.org/10.1016/j.neuroimage.2014.07.061>`_. NeuroImage (2014).
187+
doi: 10.1016/j.neuroimage.2014.07.061

0 commit comments

Comments
 (0)