Skip to content

Commit 1f00894

Browse files
committed
bedpostx_parallel avoid computation of dyads uncertainty available
1 parent 6e8c5ae commit 1f00894

File tree

4 files changed

+53
-41
lines changed

4 files changed

+53
-41
lines changed

nipype/interfaces/afni/preprocess.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1935,7 +1935,7 @@ def _overload_extension(self, value):
19351935

19361936
def _gen_filename(self, name):
19371937
return os.path.abspath(super(AFNItoNIFTI, self)._gen_filename(name))
1938-
1938+
19391939
class EvalInputSpec(AFNICommandInputSpec):
19401940
in_file_a = File(desc='input file to 1deval',
19411941
argstr='-a %s', position=0, mandatory=True, exists=True)
@@ -1955,15 +1955,15 @@ class EvalInputSpec(AFNICommandInputSpec):
19551955
requires=['start_idx'])
19561956
single_idx = traits.Int(desc='volume index for in_file_a')
19571957
other = File(desc='other options', argstr='')
1958-
1958+
19591959
class Eval(AFNICommand):
19601960
"""Evaluates an expression that may include columns of data from one or more text files
19611961
19621962
see AFNI Documentation: <http://afni.nimh.nih.gov/pub/dist/doc/program_help/1deval.html>
1963-
1963+
19641964
Examples
19651965
========
1966-
1966+
19671967
>>> from nipype.interfaces import afni as afni
19681968
>>> eval = afni.Eval()
19691969
>>> eval.inputs.in_file_a = 'seed.1D'
@@ -1996,7 +1996,7 @@ def _parse_inputs(self, skip=None):
19961996
"""
19971997
return super(Eval, self)._parse_inputs(
19981998
skip=('start_idx', 'stop_idx', 'out1D', 'other'))
1999-
1999+
20002000
class MeansInputSpec(AFNICommandInputSpec):
20012001
in_file_a = File(desc='input file to 3dMean',
20022002
argstr='%s',

nipype/interfaces/afni/svm.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -24,28 +24,28 @@
2424

2525
warn = warnings.warn
2626
warnings.filterwarnings('always', category=UserWarning)
27-
27+
2828
class SVMTrainInputSpec(AFNICommandInputSpec):
2929
#training options
30-
ttype = traits.Str(desc='tname: classification or regression',
30+
ttype = traits.Str(desc='tname: classification or regression',
3131
argstr='-type %s',
3232
mandatory=True)
3333
in_file = File(desc='A 3D+t AFNI brik dataset to be used for training.',
3434
argstr='-trainvol %s',
3535
mandatory=True,
3636
exists=True,
3737
copyfile=False)
38-
out_file = File(name_template="%s_vectors",
38+
out_file = File(name_template="%s_vectors",
3939
desc='output sum of weighted linear support vectors file name',
4040
argstr='-bucket %s',
41-
suffix='_bucket',
41+
suffix='_bucket',
4242
name_source="in_file")
4343
model = File(name_template="%s_model",
4444
desc='basename for the brik containing the SVM model',
4545
argstr='-model %s',
46-
suffix='_model',
47-
name_source="in_file")
48-
alphas = File(name_template="%s_alphas",
46+
suffix='_model',
47+
name_source="in_file")
48+
alphas = File(name_template="%s_alphas",
4949
desc='output alphas file name',
5050
argstr='-alpha %s',
5151
suffix='_alphas',
@@ -62,20 +62,20 @@ class SVMTrainInputSpec(AFNICommandInputSpec):
6262
exists=True)
6363
censor = File(desc='.1D censor file that allows the user to ignore certain samples in the training data.',
6464
argstr='-censor %s',
65-
exists=True)
65+
exists=True)
6666
kernel = traits.Str(desc='string specifying type of kernel function:linear, polynomial, rbf, sigmoid',
6767
argstr='-kernel %s')
6868
max_iterations = traits.Int(desc='Specify the maximum number of iterations for the optimization.',
6969
argstr='-max_iterations %d')
7070
w_out = traits.Bool(desc='output sum of weighted linear support vectors',
7171
argstr='-wout')
7272
options = traits.Str(desc='additional options for SVM-light', argstr='%s')
73-
73+
7474
class SVMTrainOutputSpec(TraitedSpec):
7575
out_file = File(desc='sum of weighted linear support vectors file name')
7676
model = File(desc='brik containing the SVM model file name')
7777
alphas = File(desc='output alphas file name')
78-
78+
7979
class SVMTrain(AFNICommand):
8080
"""Temporally predictive modeling with the support vector machine
8181
SVM Train Only
@@ -130,7 +130,7 @@ class SVMTestInputSpec(AFNICommandInputSpec):
130130
multiclass = traits.Bool(desc='Specifies multiclass algorithm for classification',
131131
argstr='-multiclass %s')
132132
options = traits.Str(desc='additional options for SVM-light', argstr='%s')
133-
133+
134134
class SVMTest(AFNICommand):
135135
"""Temporally predictive modeling with the support vector machine
136136
SVM Test Only

nipype/interfaces/camino/connectivity.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ class ConmatInputSpec(CommandLineInputSpec):
3737
desc=("Tract statistic to use. See TractStats for other options."),
3838
requires=['scalar_file'],xor=['tract_prop'])
3939

40-
tract_prop = traits.Enum("length", "endpointsep", argstr='-tractstat %s',
40+
tract_prop = traits.Enum("length", "endpointsep", argstr='-tractstat %s',
4141
units='NA', xor=['tract_stat'],
4242
desc=('Tract property average to compute in the connectivity matrix. '
4343
'See TractStats for details.'))

nipype/workflows/dmri/fsl/dti.py

Lines changed: 36 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,9 @@ def merge_and_mean(name='mm'):
124124
return wf
125125

126126

127-
def bedpostx_parallel(name='bedpostx_parallel', params={}):
127+
def bedpostx_parallel(name='bedpostx_parallel',
128+
compute_all_outputs=True,
129+
params={}):
128130
"""
129131
Does the same as :func:`.create_bedpostx_pipeline` by splitting
130132
the input dMRI in small ROIs that are better suited for parallel
@@ -163,11 +165,16 @@ def bedpostx_parallel(name='bedpostx_parallel', params={}):
163165
xfibres = pe.MapNode(xfib_if, name='xfibres',
164166
iterfield=['dwi', 'mask'])
165167

166-
make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="make_dyads",
168+
mrg_dyads = pe.MapNode(misc.MergeROIs(), name='Merge_dyads',
169+
iterfield=['in_files'])
170+
make_dyads = pe.MapNode(fsl.MakeDyadicVectors(), name="Make_dyads",
167171
iterfield=['theta_vol', 'phi_vol'])
168-
out_fields = ['dyads', 'dyads_disp',
169-
'thsamples', 'phsamples', 'fsamples',
170-
'mean_thsamples', 'mean_phsamples', 'mean_fsamples']
172+
out_fields = ['dyads']
173+
174+
if compute_all_outputs:
175+
out_fields += ['dyads_disp', 'thsamples', 'phsamples',
176+
'fsamples', 'mean_thsamples', 'mean_phsamples',
177+
'mean_fsamples']
171178

172179
outputnode = pe.Node(niu.IdentityInterface(fields=out_fields),
173180
name='outputnode')
@@ -180,30 +187,35 @@ def bedpostx_parallel(name='bedpostx_parallel', params={}):
180187
('out_masks', 'mask')]),
181188
(inputnode, xfibres, [('bvecs', 'bvecs'),
182189
('bvals', 'bvals')]),
183-
(inputnode, make_dyads, [('mask', 'mask')])
190+
(inputnode, make_dyads, [('mask', 'mask')]),
191+
(inputnode, mrg_dyads, [('mask', 'in_reference')]),
192+
(xfibres, mrg_dyads, [(('dyads', transpose), 'in_files')]),
193+
(slice_dwi, mrg_dyads, [('out_index', 'in_index')])
194+
(mrg_dyads, outputnode, [('merged_file', 'dyads')])
184195
])
185196

186-
mms = {}
187-
for k in ['thsamples', 'phsamples', 'fsamples']:
188-
mms[k] = merge_and_mean_parallel(k)
197+
if compute_all_outputs:
198+
mms = {}
199+
for k in ['thsamples', 'phsamples', 'fsamples']:
200+
mms[k] = merge_and_mean_parallel(k)
201+
wf.connect([
202+
(slice_dwi, mms[k], [('out_index', 'inputnode.in_index')]),
203+
(inputnode, mms[k], [('mask', 'inputnode.in_reference')]),
204+
(xfibres, mms[k], [(k, 'inputnode.in_files')]),
205+
(mms[k], outputnode, [('outputnode.merged', k),
206+
('outputnode.mean', 'mean_%s' % k)])
207+
208+
])
209+
210+
# m_mdsamples = pe.Node(fsl.Merge(dimension="z"),
211+
# name="merge_mean_dsamples")
189212
wf.connect([
190-
(slice_dwi, mms[k], [('out_index', 'inputnode.in_index')]),
191-
(inputnode, mms[k], [('mask', 'inputnode.in_reference')]),
192-
(xfibres, mms[k], [(k, 'inputnode.in_files')]),
193-
(mms[k], outputnode, [('outputnode.merged', k),
194-
('outputnode.mean', 'mean_%s' % k)])
195-
213+
(mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]),
214+
(mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]),
215+
#(xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]),
216+
(make_dyads, outputnode, [('dispersion', 'dyads_disp')])
196217
])
197218

198-
# m_mdsamples = pe.Node(fsl.Merge(dimension="z"),
199-
# name="merge_mean_dsamples")
200-
wf.connect([
201-
(mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]),
202-
(mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]),
203-
#(xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]),
204-
(make_dyads, outputnode, [('dyads', 'dyads'),
205-
('dispersion', 'dyads_disp')])
206-
])
207219
return wf
208220

209221

0 commit comments

Comments
 (0)