@@ -70,11 +70,11 @@ def create_reg_workflow(name='registration'):
70
70
register = pe .Workflow (name = name )
71
71
72
72
inputnode = pe .Node (interface = niu .IdentityInterface (fields = ['source_files' ,
73
- 'mean_image' ,
74
- 'anatomical_image' ,
75
- 'target_image' ,
76
- 'target_image_brain' ,
77
- 'config_file' ]),
73
+ 'mean_image' ,
74
+ 'anatomical_image' ,
75
+ 'target_image' ,
76
+ 'target_image_brain' ,
77
+ 'config_file' ]),
78
78
name = 'inputspec' )
79
79
outputnode = pe .Node (interface = niu .IdentityInterface (fields = ['func2anat_transform' ,
80
80
'anat2target_transform' ,
@@ -136,7 +136,7 @@ def create_reg_workflow(name='registration'):
136
136
convert2itk .inputs .fsl2ras = True
137
137
convert2itk .inputs .itk_transform = True
138
138
register .connect (mean2anatbbr , 'out_matrix_file' , convert2itk , 'transform_file' )
139
- register .connect (inputnode , 'mean_image' ,convert2itk , 'source_file' )
139
+ register .connect (inputnode , 'mean_image' , convert2itk , 'source_file' )
140
140
register .connect (stripper , 'out_file' , convert2itk , 'reference_file' )
141
141
142
142
"""
@@ -177,8 +177,7 @@ def create_reg_workflow(name='registration'):
177
177
reg .inputs .num_threads = 2
178
178
reg .plugin_args = {'qsub_args' : '-pe orte 2' }
179
179
register .connect (stripper , 'out_file' , reg , 'moving_image' )
180
- register .connect (inputnode ,'target_image_brain' , reg ,'fixed_image' )
181
-
180
+ register .connect (inputnode , 'target_image_brain' , reg , 'fixed_image' )
182
181
183
182
"""
184
183
Concatenate the affine and ants transforms into a list
@@ -190,7 +189,6 @@ def create_reg_workflow(name='registration'):
190
189
register .connect (convert2itk , 'itk_transform' , merge , 'in2' )
191
190
register .connect (reg , ('composite_transform' , pickfirst ), merge , 'in1' )
192
191
193
-
194
192
"""
195
193
Transform the mean image. First to anatomical and then to target
196
194
"""
@@ -202,7 +200,7 @@ def create_reg_workflow(name='registration'):
202
200
warpmean .inputs .invert_transform_flags = [False , False ]
203
201
warpmean .inputs .terminal_output = 'file'
204
202
205
- register .connect (inputnode ,'target_image_brain' , warpmean ,'reference_image' )
203
+ register .connect (inputnode , 'target_image_brain' , warpmean , 'reference_image' )
206
204
register .connect (inputnode , 'mean_image' , warpmean , 'input_image' )
207
205
register .connect (merge , 'out' , warpmean , 'transforms' )
208
206
@@ -218,11 +216,10 @@ def create_reg_workflow(name='registration'):
218
216
warpall .inputs .invert_transform_flags = [False , False ]
219
217
warpall .inputs .terminal_output = 'file'
220
218
221
- register .connect (inputnode ,'target_image_brain' ,warpall ,'reference_image' )
222
- register .connect (inputnode ,'source_files' , warpall , 'input_image' )
219
+ register .connect (inputnode , 'target_image_brain' , warpall , 'reference_image' )
220
+ register .connect (inputnode , 'source_files' , warpall , 'input_image' )
223
221
register .connect (merge , 'out' , warpall , 'transforms' )
224
222
225
-
226
223
"""
227
224
Assign all the output files
228
225
"""
@@ -240,6 +237,7 @@ def create_reg_workflow(name='registration'):
240
237
Get info for a given subject
241
238
"""
242
239
240
+
243
241
def get_subjectinfo (subject_id , base_dir , task_id , model_id ):
244
242
"""Get info for a given subject
245
243
@@ -284,7 +282,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
284
282
for idx in range (n_tasks ):
285
283
taskidx = np .where (taskinfo [:, 0 ] == 'task%03d' % (idx + 1 ))
286
284
conds .append ([condition .replace (' ' , '_' ) for condition
287
- in taskinfo [taskidx [0 ], 2 ]]) # if 'junk' not in condition])
285
+ in taskinfo [taskidx [0 ], 2 ]]) # if 'junk' not in condition])
288
286
files = sorted (glob (os .path .join (base_dir ,
289
287
subject_id ,
290
288
'BOLD' ,
@@ -298,6 +296,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
298
296
Analyzes an open fmri dataset
299
297
"""
300
298
299
+
301
300
def analyze_openfmri_dataset (data_dir , subject = None , model_id = None ,
302
301
task_id = None , output_dir = None , subj_prefix = '*' ,
303
302
hpcutoff = 120. , use_derivatives = True ,
@@ -368,15 +367,15 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
368
367
has_contrast = os .path .exists (contrast_file )
369
368
if has_contrast :
370
369
datasource = pe .Node (nio .DataGrabber (infields = ['subject_id' , 'run_id' ,
371
- 'task_id' , 'model_id' ],
372
- outfields = ['anat' , 'bold' , 'behav' ,
373
- 'contrasts' ]),
374
- name = 'datasource' )
370
+ 'task_id' , 'model_id' ],
371
+ outfields = ['anat' , 'bold' , 'behav' ,
372
+ 'contrasts' ]),
373
+ name = 'datasource' )
375
374
else :
376
375
datasource = pe .Node (nio .DataGrabber (infields = ['subject_id' , 'run_id' ,
377
- 'task_id' , 'model_id' ],
378
- outfields = ['anat' , 'bold' , 'behav' ]),
379
- name = 'datasource' )
376
+ 'task_id' , 'model_id' ],
377
+ outfields = ['anat' , 'bold' , 'behav' ]),
378
+ name = 'datasource' )
380
379
datasource .inputs .base_directory = data_dir
381
380
datasource .inputs .template = '*'
382
381
@@ -388,19 +387,19 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
388
387
'contrasts' : ('models/model%03d/'
389
388
'task_contrasts.txt' )}
390
389
datasource .inputs .template_args = {'anat' : [['subject_id' ]],
391
- 'bold' : [['subject_id' , 'task_id' ]],
392
- 'behav' : [['subject_id' , 'model_id' ,
393
- 'task_id' , 'run_id' ]],
394
- 'contrasts' : [['model_id' ]]}
390
+ 'bold' : [['subject_id' , 'task_id' ]],
391
+ 'behav' : [['subject_id' , 'model_id' ,
392
+ 'task_id' , 'run_id' ]],
393
+ 'contrasts' : [['model_id' ]]}
395
394
else :
396
395
datasource .inputs .field_template = {'anat' : '%s/anatomy/highres001.nii.gz' ,
397
396
'bold' : '%s/BOLD/task%03d_r*/bold.nii.gz' ,
398
397
'behav' : ('%s/model/model%03d/onsets/task%03d_'
399
398
'run%03d/cond*.txt' )}
400
399
datasource .inputs .template_args = {'anat' : [['subject_id' ]],
401
- 'bold' : [['subject_id' , 'task_id' ]],
402
- 'behav' : [['subject_id' , 'model_id' ,
403
- 'task_id' , 'run_id' ]]}
400
+ 'bold' : [['subject_id' , 'task_id' ]],
401
+ 'behav' : [['subject_id' , 'model_id' ,
402
+ 'task_id' , 'run_id' ]]}
404
403
405
404
datasource .inputs .sort_filelist = True
406
405
@@ -443,7 +442,7 @@ def get_contrasts(contrast_file, task_id, conds):
443
442
for row in contrast_def :
444
443
if row [0 ] != 'task%03d' % task_id :
445
444
continue
446
- con = [row [1 ], 'T' , ['cond%03d' % (i + 1 ) for i in range (len (conds ))],
445
+ con = [row [1 ], 'T' , ['cond%03d' % (i + 1 ) for i in range (len (conds ))],
447
446
row [2 :].astype (float ).tolist ()]
448
447
contrasts .append (con )
449
448
# add auto contrasts for each column
@@ -469,7 +468,7 @@ def get_contrasts(contrast_file, task_id, conds):
469
468
name = "art" )
470
469
471
470
modelspec = pe .Node (interface = model .SpecifyModel (),
472
- name = "modelspec" )
471
+ name = "modelspec" )
473
472
modelspec .inputs .input_units = 'secs'
474
473
475
474
def check_behav_list (behav , run_id , conds ):
@@ -483,9 +482,9 @@ def check_behav_list(behav, run_id, conds):
483
482
return behav_array .reshape (num_elements / num_conds , num_conds ).tolist ()
484
483
485
484
reshape_behav = pe .Node (niu .Function (input_names = ['behav' , 'run_id' , 'conds' ],
486
- output_names = ['behav' ],
487
- function = check_behav_list ),
488
- name = 'reshape_behav' )
485
+ output_names = ['behav' ],
486
+ function = check_behav_list ),
487
+ name = 'reshape_behav' )
489
488
490
489
wf .connect (subjinfo , 'TR' , modelspec , 'time_repetition' )
491
490
wf .connect (datasource , 'behav' , reshape_behav , 'behav' )
@@ -553,7 +552,7 @@ def sort_copes(copes, varcopes, contrasts):
553
552
('varcopes' , 'inputspec.varcopes' ),
554
553
('n_runs' , 'l2model.num_copes' )]),
555
554
(modelfit , fixed_fx , [('outputspec.dof_file' ,
556
- 'inputspec.dof_files' ),
555
+ 'inputspec.dof_files' ),
557
556
])
558
557
])
559
558
@@ -576,9 +575,9 @@ def merge_files(copes, varcopes, zstats):
576
575
577
576
mergefunc = pe .Node (niu .Function (input_names = ['copes' , 'varcopes' ,
578
577
'zstats' ],
579
- output_names = ['out_files' , 'splits' ],
580
- function = merge_files ),
581
- name = 'merge_files' )
578
+ output_names = ['out_files' , 'splits' ],
579
+ function = merge_files ),
580
+ name = 'merge_files' )
582
581
wf .connect ([(fixed_fx .get_node ('outputspec' ), mergefunc ,
583
582
[('copes' , 'copes' ),
584
583
('varcopes' , 'varcopes' ),
@@ -596,24 +595,23 @@ def split_files(in_files, splits):
596
595
output_names = ['copes' , 'varcopes' ,
597
596
'zstats' ],
598
597
function = split_files ),
599
- name = 'split_files' )
598
+ name = 'split_files' )
600
599
wf .connect (mergefunc , 'splits' , splitfunc , 'splits' )
601
600
wf .connect (registration , 'outputspec.transformed_files' ,
602
601
splitfunc , 'in_files' )
603
602
604
-
605
603
"""
606
604
Connect to a datasink
607
605
"""
608
606
609
607
def get_subs (subject_id , conds , model_id , task_id ):
610
608
subs = [('_subject_id_%s_' % subject_id , '' )]
611
- subs .append (('_model_id_%d' % model_id , 'model%03d' % model_id ))
609
+ subs .append (('_model_id_%d' % model_id , 'model%03d' % model_id ))
612
610
subs .append (('task_id_%d/' % task_id , '/task%03d_' % task_id ))
613
611
subs .append (('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp' ,
614
- 'mean' ))
612
+ 'mean' ))
615
613
subs .append (('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt' ,
616
- 'affine' ))
614
+ 'affine' ))
617
615
618
616
for i in range (len (conds )):
619
617
subs .append (('_flameo%d/cope1.' % i , 'cope%02d.' % (i + 1 )))
@@ -699,7 +697,7 @@ def get_subs(subject_id, conds, model_id, task_id):
699
697
help = "Model index" + defstr )
700
698
parser .add_argument ('-x' , '--subjectprefix' , default = 'sub*' ,
701
699
help = "Subject prefix" + defstr )
702
- parser .add_argument ('-t' , '--task' , default = 1 , #nargs='+',
700
+ parser .add_argument ('-t' , '--task' , default = 1 ,
703
701
type = int , help = "Task index" + defstr )
704
702
parser .add_argument ('--hpfilter' , default = 120. ,
705
703
type = float , help = "High pass filter cutoff (in secs)" + defstr )
@@ -729,16 +727,16 @@ def get_subs(subject_id, conds, model_id, task_id):
729
727
'task%03d' % int (args .task ))
730
728
derivatives = args .derivatives
731
729
if derivatives is None :
732
- derivatives = False
730
+ derivatives = False
733
731
wf = analyze_openfmri_dataset (data_dir = os .path .abspath (args .datasetdir ),
734
- subject = args .subject ,
735
- model_id = int (args .model ),
736
- task_id = [int (args .task )],
737
- subj_prefix = args .subjectprefix ,
738
- output_dir = outdir ,
739
- hpcutoff = args .hpfilter ,
740
- use_derivatives = derivatives ,
741
- fwhm = args .fwhm )
732
+ subject = args .subject ,
733
+ model_id = int (args .model ),
734
+ task_id = [int (args .task )],
735
+ subj_prefix = args .subjectprefix ,
736
+ output_dir = outdir ,
737
+ hpcutoff = args .hpfilter ,
738
+ use_derivatives = derivatives ,
739
+ fwhm = args .fwhm )
742
740
wf .base_dir = work_dir
743
741
if args .plugin_args :
744
742
wf .run (args .plugin , plugin_args = eval (args .plugin_args ))
0 commit comments