22
22
import nipype .interfaces .spm as spm # spm
23
23
import nipype .workflows .fmri .spm as spm_wf # spm
24
24
import nipype .interfaces .fsl as fsl # fsl
25
- import nipype .interfaces . utility as util # utility
25
+ from nipype .interfaces import utility as niu # Utilities
26
26
import nipype .pipeline .engine as pe # pypeline engine
27
27
import nipype .algorithms .rapidart as ra # artifact detection
28
28
import nipype .algorithms .modelgen as model # model specification
68
68
and register all images to the mean image.
69
69
"""
70
70
71
- realign = pe .Node (interface = spm .Realign (), name = "realign" )
71
+ realign = pe .Node (spm .Realign (), name = "realign" )
72
72
realign .inputs .register_to_mean = True
73
73
74
74
"""Use :class:`nipype.algorithms.rapidart` to determine which of the
75
75
images in the functional series are outliers based on deviations in
76
76
intensity or movement.
77
77
"""
78
78
79
- art = pe .Node (interface = ra .ArtifactDetect (), name = "art" )
79
+ art = pe .Node (ra .ArtifactDetect (), name = "art" )
80
80
art .inputs .use_differences = [True , False ]
81
81
art .inputs .use_norm = True
82
82
art .inputs .norm_threshold = 1
88
88
:class:`nipype.interfaces.fsl.BET`.
89
89
"""
90
90
91
- skullstrip = pe .Node (interface = fsl .BET (), name = "skullstrip" )
91
+ skullstrip = pe .Node (fsl .BET (), name = "skullstrip" )
92
92
skullstrip .inputs .mask = True
93
93
94
94
"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid
95
95
body registration of the functional data to the structural data.
96
96
"""
97
97
98
- coregister = pe .Node (interface = spm .Coregister (), name = "coregister" )
98
+ coregister = pe .Node (spm .Coregister (), name = "coregister" )
99
99
coregister .inputs .jobtype = 'estimate'
100
100
101
101
134
134
:class:`nipype.interfaces.spm.SpecifyModel`.
135
135
"""
136
136
137
- modelspec = pe .Node (interface = model .SpecifySPMModel (), name = "modelspec" )
137
+ modelspec = pe .Node (model .SpecifySPMModel (), name = "modelspec" )
138
138
modelspec .inputs .concatenate_runs = True
139
139
140
140
"""Generate a first level SPM.mat file for analysis
141
141
:class:`nipype.interfaces.spm.Level1Design`.
142
142
"""
143
143
144
- level1design = pe .Node (interface = spm .Level1Design (), name = "level1design" )
144
+ level1design = pe .Node (spm .Level1Design (), name = "level1design" )
145
145
level1design .inputs .bases = {'hrf' : {'derivs' : [0 , 0 ]}}
146
146
147
147
"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the
148
148
parameters of the model.
149
149
"""
150
150
151
- level1estimate = pe .Node (interface = spm .EstimateModel (), name = "level1estimate" )
151
+ level1estimate = pe .Node (spm .EstimateModel (), name = "level1estimate" )
152
152
level1estimate .inputs .estimation_method = {'Classical' : 1 }
153
153
154
154
"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the
155
155
first level contrasts specified in a few steps above.
156
156
"""
157
157
158
- contrastestimate = pe .Node (interface = spm .EstimateContrast (), name = "contrastestimate" )
158
+ contrastestimate = pe .Node (spm .EstimateContrast (), name = "contrastestimate" )
159
159
160
160
"""Use :class: `nipype.interfaces.utility.Select` to select each contrast for
161
161
reporting.
162
162
"""
163
163
164
- selectcontrast = pe .Node (interface = util .Select (), name = "selectcontrast" )
164
+ selectcontrast = pe .Node (niu .Select (), name = "selectcontrast" )
165
165
166
166
"""Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of
167
167
the contrast estimate and a background image into one volume.
168
168
"""
169
169
170
- overlaystats = pe .Node (interface = fsl .Overlay (), name = "overlaystats" )
170
+ overlaystats = pe .Node (fsl .Overlay (), name = "overlaystats" )
171
171
overlaystats .inputs .stat_thresh = (3 , 10 )
172
172
overlaystats .inputs .show_negative_stats = True
173
173
overlaystats .inputs .auto_thresh_bg = True
176
176
statistical volumes for a report of the first-level results.
177
177
"""
178
178
179
- slicestats = pe .Node (interface = fsl .Slicer (), name = "slicestats" )
179
+ slicestats = pe .Node (fsl .Slicer (), name = "slicestats" )
180
180
slicestats .inputs .all_axial = True
181
181
slicestats .inputs .image_width = 750
182
182
232
232
"""
233
233
234
234
# Specify the location of the data.
235
- data_dir = os .path .abspath ('data' )
235
+ # data_dir = os.path.abspath('data')
236
236
# Specify the subject directories
237
237
subject_list = ['s1' , 's3' ]
238
238
# Map field names to individual subject runs.
239
239
info = dict (func = [['subject_id' , ['f3' , 'f5' , 'f7' , 'f10' ]]],
240
240
struct = [['subject_id' , 'struct' ]])
241
241
242
- infosource = pe .Node (interface = util .IdentityInterface (fields = ['subject_id' ]), name = "infosource" )
242
+ infosource = pe .Node (niu .IdentityInterface (fields = ['subject_id' ]), name = "infosource" )
243
243
244
244
"""Here we set up iteration over all the subjects. The following line
245
245
is a particular example of the flexibility of the system. The
260
260
functionality.
261
261
"""
262
262
263
- datasource = pe .Node (interface = nio .DataGrabber (infields = ['subject_id' ],
263
+ inputnode = pe .Node (niu .IdentityInterface (fields = ['in_data' ]), name = 'inputnode' )
264
+ datasource = pe .Node (nio .DataGrabber (infields = ['subject_id' ],
264
265
outfields = ['func' , 'struct' ]),
265
266
name = 'datasource' )
266
- datasource .inputs .base_directory = data_dir
267
267
datasource .inputs .template = '%s/%s.nii'
268
268
datasource .inputs .template_args = info
269
269
datasource .inputs .sort_filelist = True
270
270
271
271
"""We need to create a separate workflow to make the DARTEL template
272
272
"""
273
273
274
- datasource_dartel = pe .MapNode (interface = nio .DataGrabber (infields = ['subject_id' ],
274
+ datasource_dartel = pe .MapNode (nio .DataGrabber (infields = ['subject_id' ],
275
275
outfields = ['struct' ]),
276
276
name = 'datasource_dartel' ,
277
277
iterfield = ['subject_id' ])
278
- datasource_dartel .inputs .base_directory = data_dir
279
278
datasource_dartel .inputs .template = '%s/%s.nii'
280
279
datasource_dartel .inputs .template_args = dict (struct = [['subject_id' , 'struct' ]])
281
280
datasource_dartel .inputs .sort_filelist = True
285
284
This way we will be able to pick the right field flows later.
286
285
"""
287
286
288
- rename_dartel = pe .MapNode (util .Rename (format_string = "subject_id_%(subject_id)s_struct" ),
287
+ rename_dartel = pe .MapNode (niu .Rename (format_string = "subject_id_%(subject_id)s_struct" ),
289
288
iterfield = ['in_file' , 'subject_id' ],
290
289
name = 'rename_dartel' )
291
290
rename_dartel .inputs .subject_id = subject_list
@@ -307,7 +306,7 @@ def pickFieldFlow(dartel_flow_fields, subject_id):
307
306
308
307
raise Exception
309
308
310
- pick_flow = pe .Node (util .Function (input_names = ['dartel_flow_fields' ,
309
+ pick_flow = pe .Node (niu .Function (input_names = ['dartel_flow_fields' ,
311
310
'subject_id' ],
312
311
output_names = ['dartel_flow_field' ],
313
312
function = pickFieldFlow ),
@@ -399,7 +398,9 @@ def subjectinfo(subject_id):
399
398
level1 = pe .Workflow (name = "level1" )
400
399
level1 .base_dir = os .path .abspath ('spm_dartel_tutorial/workingdir' )
401
400
402
- level1 .connect ([(datasource_dartel , rename_dartel , [('struct' , 'in_file' )]),
401
+ level1 .connect ([(inputnode , datasource , [('in_data' , 'base_directory' )]),
402
+ (inputnode , datasource_dartel , [('in_data' , 'base_directory' )]),
403
+ (datasource_dartel , rename_dartel , [('struct' , 'in_file' )]),
403
404
(rename_dartel , dartel_workflow , [('out_file' , 'inputspec.structural_files' )]),
404
405
405
406
(infosource , datasource , [('subject_id' , 'subject_id' )]),
@@ -437,9 +438,9 @@ def subjectinfo(subject_id):
437
438
the mean image would be copied to that directory.
438
439
"""
439
440
440
- datasink = pe .Node (interface = nio .DataSink (), name = "datasink" )
441
+ datasink = pe .Node (nio .DataSink (), name = "datasink" )
441
442
datasink .inputs .base_directory = os .path .abspath ('spm_dartel_tutorial/l1output' )
442
- report = pe .Node (interface = nio .DataSink (), name = 'report' )
443
+ report = pe .Node (nio .DataSink (), name = 'report' )
443
444
report .inputs .base_directory = os .path .abspath ('spm_dartel_tutorial/report' )
444
445
report .inputs .parameterization = False
445
446
@@ -501,10 +502,10 @@ def getstripdir(subject_id):
501
502
"""
502
503
503
504
# setup a 1-sample t-test node
504
- onesamplettestdes = pe .Node (interface = spm .OneSampleTTestDesign (), name = "onesampttestdes" )
505
- l2estimate = pe .Node (interface = spm .EstimateModel (), name = "level2estimate" )
505
+ onesamplettestdes = pe .Node (spm .OneSampleTTestDesign (), name = "onesampttestdes" )
506
+ l2estimate = pe .Node (spm .EstimateModel (), name = "level2estimate" )
506
507
l2estimate .inputs .estimation_method = {'Classical' : 1 }
507
- l2conestimate = pe .Node (interface = spm .EstimateContrast (), name = "level2conestimate" )
508
+ l2conestimate = pe .Node (spm .EstimateContrast (), name = "level2conestimate" )
508
509
cont1 = ('Group' , 'T' , ['mean' ], [1 ])
509
510
l2conestimate .inputs .contrasts = [cont1 ]
510
511
l2conestimate .inputs .group_contrast = True
0 commit comments