31
31
from nipype .utils .filemanip import filename_to_list
32
32
33
33
import numpy as np
34
+ import scipy as sp
35
+ import nibabel as nb
34
36
37
+ imports = ['import os' ,
38
+ 'import nibabel as nb' ,
39
+ 'import numpy as np' ,
40
+ 'import scipy as sp' ,
41
+ 'from nipype.utils.filemanip import filename_to_list'
42
+ ]
35
43
36
44
def median (in_files ):
37
45
"""Computes an average of the median of each realigned timeseries
@@ -47,10 +55,6 @@ def median(in_files):
47
55
out_file: a 3D Nifti file
48
56
"""
49
57
50
- import os
51
- import nibabel as nb
52
- import numpy as np
53
- from nipype .utils .filemanip import filename_to_list
54
58
average = None
55
59
for idx , filename in enumerate (filename_to_list (in_files )):
56
60
img = nb .load (filename )
@@ -98,9 +102,6 @@ def motion_regressors(motion_params, order=2, derivatives=2):
98
102
99
103
motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic)
100
104
"""
101
- from nipype .utils .filemanip import filename_to_list
102
- import numpy as np
103
- import os
104
105
out_files = []
105
106
for idx , filename in enumerate (filename_to_list (motion_params )):
106
107
params = np .genfromtxt (filename )
@@ -137,9 +138,6 @@ def build_filter1(motion_params, comp_norm, outliers):
137
138
components_file: a text file containing all the regressors
138
139
139
140
"""
140
- from nipype .utils .filemanip import filename_to_list
141
- import numpy as np
142
- import os
143
141
out_files = []
144
142
for idx , filename in enumerate (filename_to_list (motion_params )):
145
143
params = np .genfromtxt (filename )
@@ -172,14 +170,8 @@ def extract_noise_components(realigned_file, mask_file, num_components=6):
172
170
-------
173
171
components_file: a text file containing the noise components
174
172
"""
175
-
176
- import os
177
- from nibabel import load
178
- import numpy as np
179
- import scipy as sp
180
-
181
- imgseries = load (realigned_file )
182
- noise_mask = load (mask_file )
173
+ imgseries = nb .load (realigned_file )
174
+ noise_mask = nb .load (mask_file )
183
175
voxel_timecourses = imgseries .get_data ()[np .nonzero (noise_mask .get_data ())]
184
176
voxel_timecourses = voxel_timecourses .byteswap ().newbyteorder ()
185
177
voxel_timecourses [np .isnan (np .sum (voxel_timecourses , axis = 1 )), :] = 0
@@ -206,9 +198,6 @@ def extract_subrois(timeseries_file, label_file, indices):
206
198
The first four columns are: freesurfer index, i, j, k positions in the
207
199
label file
208
200
"""
209
- import os
210
- import nibabel as nb
211
- import numpy as np
212
201
img = nb .load (timeseries_file )
213
202
data = img .get_data ()
214
203
roiimg = nb .load (label_file )
@@ -228,11 +217,8 @@ def extract_subrois(timeseries_file, label_file, indices):
228
217
def combine_hemi (left , right ):
229
218
"""Combine left and right hemisphere time series into a single text file
230
219
"""
231
- import os
232
- from nibabel import load
233
- import numpy as np
234
- lh_data = load (left ).get_data ()
235
- rh_data = load (right ).get_data ()
220
+ lh_data = nb .load (left ).get_data ()
221
+ rh_data = nb .load (right ).get_data ()
236
222
237
223
indices = np .vstack ((1000000 + np .arange (0 , lh_data .shape [0 ])[:, None ],
238
224
2000000 + np .arange (0 , rh_data .shape [0 ])[:, None ]))
@@ -675,6 +661,42 @@ def create_workflow(files,
675
661
datasink2 , 'resting.parcellations.grayo.@surface' )
676
662
return wf
677
663
664
+
665
+ """
666
+ Creates the full workflow including getting information from dicom files
667
+ """
668
+
669
+ def create_resting_workflow (args ):
670
+ TR = args .TR
671
+ slice_times = args .slice_times
672
+ slice_thickness = None
673
+ if args .dicom_file :
674
+ TR , slice_times , slice_thickness = get_info (args .dicom_file )
675
+
676
+ if slice_thickness is None :
677
+ from nibabel import load
678
+ img = load (args .files [0 ])
679
+ slice_thickness = max (img .get_header ().get_zooms ()[:3 ])
680
+
681
+ kwargs = dict (files = [os .path .abspath (filename ) for
682
+ filename in args .files ],
683
+ subject_id = args .subject_id ,
684
+ n_vol = args .n_vol ,
685
+ despike = args .despike ,
686
+ TR = TR ,
687
+ slice_times = slice_times ,
688
+ slice_thickness = slice_thickness ,
689
+ lowpass_freq = args .lowpass_freq ,
690
+ highpass_freq = args .highpass_freq ,
691
+ sink_directory = os .path .abspath (args .sink ))
692
+ if args .field_maps :
693
+ kwargs .update (** dict (fieldmap_images = args .field_maps ,
694
+ FM_TEdiff = args .TE_diff ,
695
+ FM_echo_spacing = args .echo_spacing ,
696
+ FM_sigma = args .sigma ))
697
+ wf = create_workflow (** kwargs )
698
+ return wf
699
+
678
700
if __name__ == "__main__" :
679
701
from argparse import ArgumentParser
680
702
parser = ArgumentParser (description = __doc__ )
@@ -714,52 +736,13 @@ def create_workflow(files,
714
736
help = "field map sigma value" )
715
737
args = parser .parse_args ()
716
738
717
- TR = args .TR
718
- slice_times = args .slice_times
719
- slice_thickness = None
720
- if args .dicom_file :
721
- TR , slice_times , slice_thickness = get_info (args .dicom_file )
722
-
723
- if slice_thickness is None :
724
- from nibabel import load
725
- img = load (args .files [0 ])
726
- slice_thickness = max (img .get_header ().get_zooms ()[:3 ])
727
-
728
- if args .field_maps :
729
- wf = create_workflow ([os .path .abspath (filename ) for
730
- filename in args .files ],
731
- subject_id = args .subject_id ,
732
- n_vol = args .n_vol ,
733
- despike = args .despike ,
734
- TR = TR ,
735
- slice_times = slice_times ,
736
- slice_thickness = slice_thickness ,
737
- lowpass_freq = args .lowpass_freq ,
738
- highpass_freq = args .highpass_freq ,
739
- sink_directory = os .path .abspath (args .sink ),
740
- fieldmap_images = args .field_maps ,
741
- FM_TEdiff = args .TE_diff ,
742
- FM_echo_spacing = args .echo_spacing ,
743
- FM_sigma = args .sigma )
744
- else :
745
- wf = create_workflow ([os .path .abspath (filename ) for
746
- filename in args .files ],
747
- subject_id = args .subject_id ,
748
- n_vol = args .n_vol ,
749
- despike = args .despike ,
750
- TR = TR ,
751
- slice_times = slice_times ,
752
- slice_thickness = slice_thickness ,
753
- lowpass_freq = args .lowpass_freq ,
754
- highpass_freq = args .highpass_freq ,
755
- sink_directory = os .path .abspath (args .sink ))
739
+ wf = create_resting_workflow (args )
756
740
757
741
if args .work_dir :
758
742
work_dir = os .path .abspath (args .work_dir )
759
743
else :
760
744
work_dir = os .getcwd ()
761
745
762
- wf .config ['execution' ].update (** {'remove_unnecessary_outputs' : False })
763
746
wf .base_dir = work_dir
764
747
exec args .plugin_args
765
748
wf .run (** plugin_args )
0 commit comments