Skip to content

Commit f177e3f

Browse files
committed
new parallel bedpostx finished :)
1 parent 6c01665 commit f177e3f

File tree

2 files changed

+107
-1
lines changed

2 files changed

+107
-1
lines changed

nipype/workflows/dmri/fsl/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from dti import create_bedpostx_pipeline
1+
from dti import create_bedpostx_pipeline, bedpostx_parallel
22

33
from epi import (fieldmap_correction, topup_correction,
44
create_eddy_correct_pipeline,

nipype/workflows/dmri/fsl/dti.py

Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import nipype.pipeline.engine as pe
44
from nipype.interfaces import utility as niu
55
from nipype.interfaces import fsl
6+
from nipype.algorithms import misc
67
import os
78

89
#backwards compatibility
@@ -121,3 +122,108 @@ def merge_and_mean(name='mm'):
121122
(mean, outputnode, [('out_file', 'mean')])
122123
])
123124
return wf
125+
126+
127+
def bedpostx_parallel(name='bedpostx_parallel', params={}):
128+
"""
129+
Does the same as :func:`.create_bedpostx_pipeline` by splitting
130+
the input dMRI in small ROIs that are better suited for parallel
131+
processing).
132+
133+
Example
134+
-------
135+
136+
>>> from nipype.workflows.dmri.fsl.dti import bedpostx_parallel
137+
>>> params = dict(n_fibres = 2, fudge = 1, burn_in = 1000,
138+
... n_jumps = 1250, sample_every = 25)
139+
>>> bpwf = bedpostx_parallel('nipype_bedpostx_parallel', params)
140+
>>> bpwf.inputs.inputnode.dwi = 'diffusion.nii'
141+
>>> bpwf.inputs.inputnode.mask = 'mask.nii'
142+
>>> bpwf.inputs.inputnode.bvecs = 'bvecs'
143+
>>> bpwf.inputs.inputnode.bvals = 'bvals'
144+
>>> bpwf.run(plugin='CondorDAGMan') # doctest: +SKIP
145+
146+
Inputs::
147+
148+
inputnode.dwi
149+
inputnode.mask
150+
inputnode.bvecs
151+
inputnode.bvals
152+
153+
Outputs::
154+
155+
outputnode wraps all XFibres outputs
156+
157+
"""
158+
159+
inputnode = pe.Node(niu.IdentityInterface(fields=['dwi', 'mask',
160+
'bvecs', 'bvals']), name='inputnode')
161+
slice_dwi = pe.Node(misc.SplitROIs(roi_size=(5, 5, 1)), name='slice_dwi')
162+
xfib_if = fsl.XFibres(**params)
163+
xfibres = pe.MapNode(xfib_if, name='xfibres',
164+
iterfield=['dwi', 'mask'])
165+
166+
make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="make_dyads",
167+
iterfield=['theta_vol', 'phi_vol'])
168+
out_fields = ['dyads', 'dyads_disp',
169+
'thsamples', 'phsamples', 'fsamples',
170+
'mean_thsamples', 'mean_phsamples', 'mean_fsamples']
171+
172+
outputnode = pe.Node(niu.IdentityInterface(fields=out_fields),
173+
name='outputnode')
174+
175+
wf = pe.Workflow(name=name)
176+
wf.connect([
177+
(inputnode, slice_dwi, [('dwi', 'in_file'),
178+
('mask', 'in_mask')]),
179+
(slice_dwi, xfibres, [('out_files', 'dwi'),
180+
('out_masks', 'mask')]),
181+
(inputnode, xfibres, [('bvecs', 'bvecs'),
182+
('bvals', 'bvals')]),
183+
(inputnode, make_dyads, [('mask', 'mask')])
184+
])
185+
186+
mms = {}
187+
for k in ['thsamples', 'phsamples', 'fsamples']:
188+
mms[k] = merge_and_mean_parallel(k)
189+
wf.connect([
190+
(slice_dwi, mms[k], [('out_index', 'inputnode.in_index')]),
191+
(inputnode, mms[k], [('mask', 'inputnode.in_reference')]),
192+
(xfibres, mms[k], [(k, 'inputnode.in_files')]),
193+
(mms[k], outputnode, [('outputnode.merged', k),
194+
('outputnode.mean', 'mean_%s' % k)])
195+
196+
])
197+
198+
# m_mdsamples = pe.Node(fsl.Merge(dimension="z"),
199+
# name="merge_mean_dsamples")
200+
wf.connect([
201+
(mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]),
202+
(mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]),
203+
#(xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]),
204+
(make_dyads, outputnode, [('dyads', 'dyads'),
205+
('dispersion', 'dyads_disp')])
206+
])
207+
return wf
208+
209+
210+
def merge_and_mean_parallel(name='mm'):
211+
inputnode = pe.Node(niu.IdentityInterface(fields=['in_files',
212+
'in_reference', 'in_index']), name='inputnode')
213+
outputnode = pe.Node(niu.IdentityInterface(fields=['merged', 'mean']),
214+
name='outputnode')
215+
merge = pe.MapNode(misc.MergeROIs(), name='Merge',
216+
iterfield=['in_files'])
217+
mean = pe.MapNode(fsl.ImageMaths(op_string='-Tmean'), name='Mean',
218+
iterfield=['in_file'])
219+
220+
wf = pe.Workflow(name=name)
221+
wf.connect([
222+
(inputnode, merge, [(('in_files', transpose), 'in_files'),
223+
('in_reference', 'in_reference'),
224+
('in_index', 'in_index')]),
225+
(merge, mean, [('merged_file', 'in_file')]),
226+
(merge, outputnode, [('merged_file', 'merged')]),
227+
(mean, outputnode, [('out_file', 'mean')])
228+
])
229+
return wf

0 commit comments

Comments
 (0)