Skip to content

Commit 7603c3b

Browse files
committed
fix: updated the resting script to map to cortical surfaces
1 parent 44c0fdd commit 7603c3b

File tree

1 file changed

+186
-7
lines changed

1 file changed

+186
-7
lines changed

examples/rsfmri_conn_ants_fs_preprocessing.py

Lines changed: 186 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -269,6 +269,63 @@ def rename(in_files, suffix=None):
269269
out_files.append(name + suffix + ext)
270270
return list_to_filename(out_files)
271271

272+
273+
def get_aparc_aseg(files):
274+
"""Return the aparc+aseg.mgz file"""
275+
for name in files:
276+
if 'aparc+aseg.mgz' in name:
277+
return name
278+
raise ValueError('aparc+aseg.mgz not found')
279+
280+
281+
def extract_subrois(timeseries_file, label_file, indices):
282+
"""Extract voxel time courses for each subcortical roi index
283+
284+
Parameters
285+
----------
286+
287+
timeseries_file: a 4D Nifti file
288+
label_file: a 3D file containing rois in the same space/size of the 4D file
289+
indices: a list of indices for ROIs to extract.
290+
291+
Returns
292+
-------
293+
out_file: a text file containing time courses for each voxel of each roi
294+
The first four columns are: freesurfer index, i, j, k positions in the
295+
label file
296+
"""
297+
img = nb.load(timeseries_file)
298+
data = img.get_data()
299+
roiimg = nb.load(label_file)
300+
rois = roiimg.get_data()
301+
out_ts_file = os.path.join(os.getcwd(), 'subcortical_timeseries.txt')
302+
with open(out_ts_file, 'wt') as fp:
303+
for fsindex in indices:
304+
ijk = np.nonzero(rois == fsindex)
305+
ts = data[ijk]
306+
for i0, row in enumerate(ts):
307+
fp.write('%d,%d,%d,%d,' % (fsindex, ijk[0][i0],
308+
ijk[1][i0], ijk[2][i0]) +
309+
','.join(['%.10f' % val for val in row]) + '\n')
310+
return out_ts_file
311+
312+
313+
def combine_hemi(left, right):
314+
"""Combine left and right hemisphere time series into a single text file
315+
"""
316+
lh_data = nb.load(left).get_data()
317+
rh_data = nb.load(right).get_data()
318+
319+
indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None],
320+
2000000 + np.arange(0, rh_data.shape[0])[:, None]))
321+
all_data = np.hstack((indices, np.vstack((lh_data.squeeze(),
322+
rh_data.squeeze()))))
323+
filename = 'combined_surf.txt'
324+
np.savetxt(filename, all_data,
325+
fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] - 1)))
326+
return os.path.abspath(filename)
327+
328+
272329
def create_reg_workflow(name='registration'):
273330
"""Create a FEAT preprocessing workflow together with freesurfer
274331
@@ -308,11 +365,13 @@ def create_reg_workflow(name='registration'):
308365
name='inputspec')
309366

310367
outputnode = Node(interface=IdentityInterface(fields=['func2anat_transform',
368+
'out_reg_file',
311369
'anat2target_transform',
312370
'transforms',
313371
'transformed_mean',
314372
'segmentation_files',
315-
'anat2target'
373+
'anat2target',
374+
'aparc'
316375
]),
317376
name='outputspec')
318377

@@ -369,6 +428,18 @@ def create_reg_workflow(name='registration'):
369428
register.connect(binarize, 'out_file', applyxfm, 'target_file')
370429
register.connect(inputnode, 'mean_image', applyxfm, 'source_file')
371430

431+
"""
432+
Apply inverse transform to aparc file
433+
"""
434+
aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True,
435+
interp='nearest'),
436+
name='aparc_inverse_transform')
437+
register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir')
438+
register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file')
439+
register.connect(fssource, ('aparc_aseg', get_aparc_aseg),
440+
aparcxfm, 'target_file')
441+
register.connect(inputnode, 'mean_image', aparcxfm, 'source_file')
442+
372443
"""
373444
Convert the BBRegister transformation to ANTS ITK format
374445
"""
@@ -453,9 +524,14 @@ def create_reg_workflow(name='registration'):
453524

454525
register.connect(reg, 'warped_image', outputnode, 'anat2target')
455526
register.connect(warpmean, 'output_image', outputnode, 'transformed_mean')
456-
register.connect(applyxfm, 'transformed_file', outputnode, 'segmentation_files')
527+
register.connect(applyxfm, 'transformed_file',
528+
outputnode, 'segmentation_files')
529+
register.connect(aparcxfm, 'transformed_file',
530+
outputnode, 'aparc')
457531
register.connect(bbregister, 'out_fsl_file',
458532
outputnode, 'func2anat_transform')
533+
register.connect(bbregister, 'out_reg_file',
534+
outputnode, 'out_reg_file')
459535
register.connect(reg, 'composite_transform',
460536
outputnode, 'anat2target_transform')
461537
register.connect(merge, 'out', outputnode, 'transforms')
@@ -477,10 +553,12 @@ def create_workflow(files,
477553
norm_threshold=1,
478554
num_components=5,
479555
vol_fwhm=None,
556+
surf_fwhm=None,
480557
lowpass_freq=-1,
481558
highpass_freq=-1,
482559
subjects_dir=None,
483560
sink_directory=os.getcwd(),
561+
target_subject=['fsaverage3', 'fsaverage4'],
484562
name='resting'):
485563

486564
wf = Workflow(name=name)
@@ -647,7 +725,6 @@ def merge_files(in1, in2):
647725

648726
wf.connect(bandpass, 'out_files', smooth, 'in_files')
649727

650-
651728
collector = Node(Merge(2), name='collect_streams')
652729
wf.connect(smooth, 'smoothed_files', collector, 'in1')
653730
wf.connect(bandpass, 'out_files', collector, 'in2')
@@ -684,6 +761,93 @@ def merge_files(in1, in2):
684761
# extract target space ROIs
685762
# combine subcortical and cortical rois into a single cifti file
686763

764+
#######
765+
# Convert aparc to subject functional space
766+
767+
# Sample the average time series in aparc ROIs
768+
sampleaparc = MapNode(freesurfer.SegStats(default_color_table=True),
769+
iterfield=['in_file', 'summary_file',
770+
'avgwf_txt_file'],
771+
name='aparc_ts')
772+
sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] +
773+
range(49, 55) + [58] + range(1001, 1036) +
774+
range(2001, 2036))
775+
776+
wf.connect(registration, 'outputspec.aparc',
777+
sampleaparc, 'segmentation_file')
778+
wf.connect(collector, 'out', sampleaparc, 'in_file')
779+
780+
def get_names(files, suffix):
781+
"""Generate appropriate names for output files
782+
"""
783+
from nipype.utils.filemanip import (split_filename, filename_to_list,
784+
list_to_filename)
785+
out_names = []
786+
for filename in files:
787+
_, name, _ = split_filename(filename)
788+
out_names.append(name + suffix)
789+
return list_to_filename(out_names)
790+
791+
wf.connect(collector, ('out', get_names, '_avgwf.txt'),
792+
sampleaparc, 'avgwf_txt_file')
793+
wf.connect(collector, ('out', get_names, '_summary.stats'),
794+
sampleaparc, 'summary_file')
795+
796+
# Sample the time series onto the surface of the target surface. Performs
797+
# sampling into left and right hemisphere
798+
target = Node(IdentityInterface(fields=['target_subject']), name='target')
799+
target.iterables = ('target_subject', filename_to_list(target_subject))
800+
801+
samplerlh = MapNode(freesurfer.SampleToSurface(),
802+
iterfield=['source_file'],
803+
name='sampler_lh')
804+
samplerlh.inputs.sampling_method = "average"
805+
samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
806+
samplerlh.inputs.sampling_units = "frac"
807+
samplerlh.inputs.interp_method = "trilinear"
808+
samplerlh.inputs.smooth_surf = surf_fwhm
809+
#samplerlh.inputs.cortex_mask = True
810+
samplerlh.inputs.out_type = 'niigz'
811+
samplerlh.inputs.subjects_dir = subjects_dir
812+
813+
samplerrh = samplerlh.clone('sampler_rh')
814+
815+
samplerlh.inputs.hemi = 'lh'
816+
wf.connect(collector, 'out', samplerlh, 'source_file')
817+
wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
818+
wf.connect(target, 'target_subject', samplerlh, 'target_subject')
819+
820+
samplerrh.set_input('hemi', 'rh')
821+
wf.connect(collector, 'out', samplerrh, 'source_file')
822+
wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
823+
wf.connect(target, 'target_subject', samplerrh, 'target_subject')
824+
825+
# Combine left and right hemisphere to text file
826+
combiner = MapNode(Function(input_names=['left', 'right'],
827+
output_names=['out_file'],
828+
function=combine_hemi,
829+
imports=imports),
830+
iterfield=['left', 'right'],
831+
name="combiner")
832+
wf.connect(samplerlh, 'out_file', combiner, 'left')
833+
wf.connect(samplerrh, 'out_file', combiner, 'right')
834+
835+
# Sample the time series file for each subcortical roi
836+
ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file',
837+
'indices'],
838+
output_names=['out_file'],
839+
function=extract_subrois,
840+
imports=imports),
841+
iterfield=['timeseries_file'],
842+
name='getsubcortts')
843+
ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] +\
844+
range(49, 55) + [58]
845+
ts2txt.inputs.label_file = \
846+
os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
847+
'2mm_v2.nii.gz'))
848+
wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')
849+
850+
######
687851

688852
# Save the relevant data into an output directory
689853
datasink = Node(interface=DataSink(), name="datasink")
@@ -710,6 +874,20 @@ def merge_files(in1, in2):
710874
wf.connect(createfilter2, 'out_files',
711875
datasink, 'resting.regress.@compcorr')
712876
wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
877+
wf.connect(sampleaparc, 'summary_file',
878+
datasink, 'resting.parcellations.aparc')
879+
wf.connect(sampleaparc, 'avgwf_txt_file',
880+
datasink, 'resting.parcellations.aparc.@avgwf')
881+
wf.connect(ts2txt, 'out_file',
882+
datasink, 'resting.parcellations.grayo.@subcortical')
883+
884+
datasink2 = Node(interface=DataSink(), name="datasink2")
885+
datasink2.inputs.base_directory = sink_directory
886+
datasink2.inputs.container = subject_id
887+
#datasink2.inputs.substitutions = [('_target_subject_', '')]
888+
#datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
889+
wf.connect(combiner, 'out_file',
890+
datasink2, 'resting.parcellations.grayo.@surface')
713891
return wf
714892

715893

@@ -722,13 +900,14 @@ def merge_files(in1, in2):
722900
anat_file = glob(os.path.abspath('%s/EO/anat/anat.nii' % subj_id))[0]
723901
target_file = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
724902
wf = create_workflow(files, anat_file, target_file, subj_id,
725-
2.0, 33, vol_fwhm=6.0,
903+
2.0, 33, vol_fwhm=6.0, surf_fwhm=15.0,
726904
lowpass_freq=0.1, highpass_freq=0.01,
727905
subjects_dir=fsdir,
906+
target_subject=['fsaverage4'],
728907
sink_directory=os.getcwd(),
729908
name='resting_' + subj_id)
730909
wf.base_dir = os.getcwd()
731-
#wf.run(plugin='MultiProc', plugin_args={'nprocs': 4})
732-
wf.write_graph(graph2use='colored')
733-
wf.run()
910+
wf.run(plugin='MultiProc', plugin_args={'nprocs': 4})
911+
#wf.write_graph(graph2use='colored')
912+
#wf.run()
734913

0 commit comments

Comments
 (0)