62
62
from nipype .interfaces .fsl .epi import EPIDeWarp
63
63
from nipype .interfaces .io import FreeSurferSource
64
64
from nipype .interfaces .c3 import C3dAffineTool
65
- from nipype .interfaces .utility import Merge , IdentityInterface
65
+ from nipype .interfaces .utility import Merge , IdentityInterface , Rename
66
66
from nipype .utils .filemanip import filename_to_list
67
67
68
68
import numpy as np
69
69
import scipy as sp
70
70
import nibabel as nb
71
- from dcmstack .extract import default_extractor
72
- from dicom import read_file
73
71
74
72
imports = ['import os' ,
75
73
'import nibabel as nb' ,
@@ -89,6 +87,8 @@ def get_info(dicom_files):
89
87
Slice Acquisition Times
90
88
Spacing between slices
91
89
"""
90
+ from dcmstack .extract import default_extractor
91
+ from dicom import read_file
92
92
meta = default_extractor (read_file (filename_to_list (dicom_files )[0 ],
93
93
stop_before_pixels = True ,
94
94
force = True ))
@@ -217,14 +217,17 @@ def extract_noise_components(realigned_file, mask_file, num_components=5,
217
217
imgseries = nb .load (realigned_file )
218
218
components = None
219
219
for filename in filename_to_list (mask_file ):
220
- mask = nb .load (filename )
221
- voxel_timecourses = imgseries .get_data ()[np .nonzero (mask )]
222
- voxel_timecourses = voxel_timecourses .byteswap ().newbyteorder ()
220
+ mask = nb .load (filename ).get_data ()
221
+ voxel_timecourses = imgseries .get_data ()[mask > 0 ]
223
222
voxel_timecourses [np .isnan (np .sum (voxel_timecourses , axis = 1 )), :] = 0
224
223
# remove mean and normalize by variance
225
224
# voxel_timecourses.shape == [nvoxels, time]
226
225
X = voxel_timecourses .T
227
- X = (X - np .mean (X , axis = 0 ))/ np .std (X , axis = 0 )
226
+ stdX = np .std (X , axis = 0 )
227
+ stdX [stdX == 0 ] = 1.
228
+ stdX [np .isnan (stdX )] = 1.
229
+ stdX [np .isinf (stdX )] = 1.
230
+ X = (X - np .mean (X , axis = 0 ))/ stdX
228
231
u , _ , _ = sp .linalg .svd (X , full_matrices = False )
229
232
if components is None :
230
233
components = u [:, :num_components ]
@@ -323,6 +326,18 @@ def combine_hemi(left, right):
323
326
fmt = ',' .join (['%d' ] + ['%.10f' ] * (all_data .shape [1 ] - 1 )))
324
327
return os .path .abspath (filename )
325
328
329
+ def rename (in_files , suffix = None ):
330
+ from nipype .utils .filemanip import (filename_to_list , split_filename ,
331
+ list_to_filename )
332
+ out_files = []
333
+ for idx , filename in enumerate (filename_to_list (in_files )):
334
+ _ , name , ext = split_filename (filename )
335
+ if suffix is None :
336
+ out_files .append (name + ('_%03d' % idx ) + ext )
337
+ else :
338
+ out_files .append (name + suffix + ext )
339
+ return list_to_filename (out_files )
340
+
326
341
327
342
"""
328
343
Creates the main preprocessing workflow
@@ -354,29 +369,41 @@ def create_workflow(files,
354
369
355
370
wf = Workflow (name = name )
356
371
372
+ # Rename files in case they are named identically
373
+ name_unique = MapNode (Rename (format_string = 'rest_%(run)02d' ),
374
+ iterfield = ['in_file' , 'run' ],
375
+ name = 'rename' )
376
+ name_unique .inputs .keep_ext = True
377
+ name_unique .inputs .run = range (1 , len (files ) + 1 )
378
+ name_unique .inputs .in_file = files
379
+
357
380
# Skip starting volumes
358
381
remove_vol = MapNode (fsl .ExtractROI (t_min = n_vol , t_size = - 1 ),
359
382
iterfield = ['in_file' ],
360
383
name = "remove_volumes" )
361
- remove_vol . inputs . in_file = files
384
+ wf . connect ( name_unique , 'out_file' , remove_vol , 'in_file' )
362
385
363
386
# Run AFNI's despike. This is always run, however, whether this is fed to
364
387
# realign depends on the input configuration
365
388
despiker = MapNode (afni .Despike (outputtype = 'NIFTI_GZ' ),
366
389
iterfield = ['in_file' ],
367
390
name = 'despike' )
368
- despiker .plugin_args = {'qsub_args' :
369
- '-l nodes=1:ppn=%s' % os .environ ['OMP_NUM_THREADS' ]}
391
+ num_threads = 4
392
+ despiker .inputs .environ = {'OMP_NUM_THREADS' : '%d' % num_threads }
393
+ despiker .plugin_args = {'sbatch_args' : '-c %d' % num_threads }
394
+ #despiker.plugin_args = {'qsub_args':
395
+ # '-l nodes=1:ppn=%s' % os.environ['OMP_NUM_THREADS']}
370
396
371
397
wf .connect (remove_vol , 'roi_file' , despiker , 'in_file' )
372
398
373
399
# Run Nipy joint slice timing and realignment algorithm
374
400
realign = Node (nipy .SpaceTimeRealigner (), name = 'realign' )
375
- realign .inputs .tr = TR
376
- realign .inputs .slice_times = slice_times
377
- realign .inputs .slice_info = 2
378
- realign .plugin_args = {'qsub_args' :
379
- '-l nodes=1:ppn=%s' % os .environ ['MKL_NUM_THREADS' ]}
401
+ if slice_times is not None :
402
+ realign .inputs .tr = TR
403
+ realign .inputs .slice_times = slice_times
404
+ realign .inputs .slice_info = 2
405
+ #realign.plugin_args = {'qsub_args': '-l nodes=1:ppn=%s' % os.environ['MKL_NUM_THREADS']}
406
+ realign .plugin_args = {'sbatch_args' : '-c %s' % os .environ ['MKL_NUM_THREADS' ]}
380
407
381
408
if despike :
382
409
wf .connect (despiker , 'out_file' , realign , 'in_file' )
@@ -508,17 +535,19 @@ def create_workflow(files,
508
535
wf .connect (art , 'outlier_files' , createfilter1 , 'outliers' )
509
536
510
537
# Filter the motion and art confounds and detrend
511
- filter1 = MapNode (fsl .GLM (out_res_name = 'timeseries.nii.gz' ,
512
- out_f_name = 'F_mcart.nii.gz' ,
538
+ filter1 = MapNode (fsl .GLM (out_f_name = 'F_mcart.nii.gz' ,
513
539
out_pf_name = 'pF_mcart.nii.gz' ,
514
540
demean = True ),
515
- iterfield = ['in_file' , 'design' ],
541
+ iterfield = ['in_file' , 'design' , 'out_res_name' ],
516
542
name = 'filtermotion' )
517
543
if fieldmap_images :
518
544
wf .connect (dewarper , 'unwarped_file' , filter1 , 'in_file' )
545
+ wf .connect (dewarper , ('unwarped_file' , rename , '_filtermotart' ),
546
+ filter1 , 'out_res_name' )
519
547
else :
520
548
wf .connect (realign , 'out_file' , filter1 , 'in_file' )
521
-
549
+ wf .connect (realign , ('out_file' , rename , '_filtermotart' ),
550
+ filter1 , 'out_res_name' )
522
551
wf .connect (createfilter1 , 'out_files' , filter1 , 'design' )
523
552
wf .connect (masktransform , 'transformed_file' , filter1 , 'mask' )
524
553
@@ -537,17 +566,19 @@ def create_workflow(files,
537
566
wf .connect (wmcsftransform , 'transformed_file' , createfilter2 , 'mask_file' )
538
567
539
568
# Filter noise components from unsmoothed data
540
- filter2 = MapNode (fsl .GLM (out_res_name = 'timeseries_unsmooth_cleaned.nii.gz' ,
541
- out_f_name = 'F.nii.gz' ,
569
+ filter2 = MapNode (fsl .GLM (out_f_name = 'F.nii.gz' ,
542
570
out_pf_name = 'pF.nii.gz' ,
543
571
demean = True ),
544
- iterfield = ['in_file' , 'design' ],
572
+ iterfield = ['in_file' , 'design' , 'out_res_name' ],
545
573
name = 'filter_noise_nosmooth' )
546
-
547
574
if fieldmap_images :
548
575
wf .connect (dewarper , 'unwarped_file' , filter2 , 'in_file' )
576
+ wf .connect (dewarper , ('unwarped_file' , rename , '_unsmooth_cleaned' ),
577
+ filter2 , 'out_res_name' )
549
578
else :
550
579
wf .connect (realign , 'out_file' , filter2 , 'in_file' )
580
+ wf .connect (realign , ('out_file' , rename , '_unsmooth_cleaned' ),
581
+ filter2 , 'out_res_name' )
551
582
wf .connect (createfilter2 , 'out_files' , filter2 , 'design' )
552
583
wf .connect (masktransform , 'transformed_file' , filter2 , 'mask' )
553
584
@@ -588,13 +619,13 @@ def smooth_passthrough(in_file):
588
619
wf .connect (realign , 'out_file' , smooth , 'in_file' )
589
620
590
621
# Filter noise components from smoothed data
591
- filter3 = MapNode (fsl .GLM (out_res_name = 'timeseries_smooth_cleaned.nii.gz' ,
592
- out_f_name = 'F.nii.gz' ,
622
+ filter3 = MapNode (fsl .GLM (out_f_name = 'F.nii.gz' ,
593
623
out_pf_name = 'pF.nii.gz' ,
594
624
demean = True ),
595
- iterfield = ['in_file' , 'design' ],
625
+ iterfield = ['in_file' , 'design' , 'out_res_name' ],
596
626
name = 'filter_noise_smooth' )
597
-
627
+ wf .connect (smooth , ('out_file' , rename , '_cleaned' ),
628
+ filter3 , 'out_res_name' )
598
629
wf .connect (smooth , 'out_file' , filter3 , 'in_file' )
599
630
wf .connect (createfilter2 , 'out_files' , filter3 , 'design' )
600
631
wf .connect (masktransform , 'transformed_file' , filter3 , 'mask' )
@@ -746,9 +777,10 @@ def get_names(files, suffix):
746
777
reg .inputs .fixed_image = \
747
778
os .path .abspath ('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz' )
748
779
reg .inputs .num_threads = 4
749
- reg .plugin_args = {'qsub_args' :
750
- '-q max10 -l nodes=1:ppn=%d' % reg .inputs .num_threads ,
751
- 'overwrite' : True }
780
+ #reg.plugin_args = {'qsub_args':
781
+ # '-q max10 -l nodes=1:ppn=%d' % reg.inputs.num_threads,
782
+ # 'overwrite': True}
783
+ reg .plugin_args = {'sbatch_args' : '-c %d' % reg .inputs .num_threads }
752
784
753
785
# Convert T1.mgz to nifti for using with ANTS
754
786
convert = Node (freesurfer .MRIConvert (out_type = 'niigz' ), name = 'convert2nii' )
@@ -788,9 +820,9 @@ def get_names(files, suffix):
788
820
os .path .abspath ('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz' )
789
821
sample2mni .inputs .terminal_output = 'file'
790
822
sample2mni .inputs .num_threads = 4
791
- sample2mni .plugin_args = { 'qsub_args' :
792
- '-q max10 -l nodes=1:ppn= %d' % sample2mni .inputs .num_threads ,
793
- 'overwrite' : True }
823
+ sample2mni .inputs . args = '--float'
824
+ sample2mni . plugin_args = { 'sbatch_args' : '-c %d' % sample2mni .inputs .num_threads }
825
+
794
826
wf .connect (bandpass , 'out_file' , sample2mni , 'input_image' )
795
827
wf .connect (merge , 'out' , sample2mni , 'transforms' )
796
828
@@ -813,7 +845,7 @@ def get_names(files, suffix):
813
845
datasink = Node (interface = DataSink (), name = "datasink" )
814
846
datasink .inputs .base_directory = sink_directory
815
847
datasink .inputs .container = subject_id
816
- datasink .inputs .substitutions = [('_target_subject_' , '' )]
848
+ # datasink.inputs.substitutions = [('_target_subject_', '')]
817
849
#datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
818
850
wf .connect (despiker , 'out_file' , datasink , 'resting.qa.despike' )
819
851
wf .connect (realign , 'par_file' , datasink , 'resting.qa.motion' )
@@ -858,7 +890,7 @@ def get_names(files, suffix):
858
890
datasink2 = Node (interface = DataSink (), name = "datasink2" )
859
891
datasink2 .inputs .base_directory = sink_directory
860
892
datasink2 .inputs .container = subject_id
861
- datasink2 .inputs .substitutions = [('_target_subject_' , '' )]
893
+ # datasink2.inputs.substitutions = [('_target_subject_', '')]
862
894
#datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
863
895
wf .connect (combiner , 'out_file' ,
864
896
datasink2 , 'resting.parcellations.grayo.@surface' )
@@ -955,4 +987,4 @@ def create_resting_workflow(args, name='resting'):
955
987
if args .plugin_args :
956
988
wf .run (args .plugin , plugin_args = eval (args .plugin_args ))
957
989
else :
958
- wf .run (args .plugin )
990
+ wf .run (args .plugin )
0 commit comments