Skip to content

Commit 1625bdb

Browse files
committed
update per @oesteban 's review
1 parent fe430f5 commit 1625bdb

File tree

1 file changed

+46
-55
lines changed

1 file changed

+46
-55
lines changed

nipype/algorithms/confounds.py

Lines changed: 46 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,9 @@
88
absolute_import)
99
from builtins import range
1010

11-
# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict
12-
from future import standard_library
13-
standard_library.install_aliases()
14-
from collections import OrderedDict
15-
1611
import os
1712
import os.path as op
13+
from collections import OrderedDict
1814

1915
import nibabel as nb
2016
import numpy as np
@@ -615,52 +611,48 @@ def _run_interface(self, runtime):
615611

616612
save_pre_filter = self.inputs.save_pre_filter
617613
if save_pre_filter:
618-
if isinstance(save_pre_filter, bool):
614+
self._results['pre_filter_file'] = save_pre_filter
615+
if save_pre_filter is True:
619616
pre_filter_file = os.path.abspath('pre_filter.tsv')
620-
else:
621-
pre_filter_file = save_pre_filter
622-
self._results['pre_filter_file'] = pre_filter_file
623-
if self.inputs.pre_filter and save_pre_filter:
624-
ftype = {
625-
'polynomial': 'Legendre',
626-
'cosine': 'Cosine'
627-
}[self.inputs.pre_filter]
628-
ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0
629-
header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)]
630-
if skip_vols:
631-
old_basis = filter_basis
632-
# nrows defined above
633-
filter_basis = np.zeros(
634-
(nrows, ncols + skip_vols), dtype=filter_basis.dtype)
635-
if old_basis.size > 0:
636-
filter_basis[skip_vols:, :ncols] = old_basis
637-
filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols)
638-
header.extend([
639-
'NonSteadyStateOutlier{:02d}'.format(i)
640-
for i in range(skip_vols)
641-
])
642-
np.savetxt(
643-
pre_filter_file,
644-
filter_basis,
645-
fmt=b'%.10f',
646-
delimiter='\t',
647-
header='\t'.join(header),
648-
comments='')
649-
650-
save_metadata = self.inputs.save_metadata
651-
if save_metadata:
652-
if isinstance(save_metadata, bool):
617+
if self.inputs.pre_filter:
618+
ftype = {
619+
'polynomial': 'Legendre',
620+
'cosine': 'Cosine'
621+
}[self.inputs.pre_filter]
622+
ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0
623+
header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)]
624+
if skip_vols:
625+
old_basis = filter_basis
626+
# nrows defined above
627+
filter_basis = np.zeros(
628+
(nrows, ncols + skip_vols), dtype=filter_basis.dtype)
629+
if old_basis.size > 0:
630+
filter_basis[skip_vols:, :ncols] = old_basis
631+
filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols)
632+
header.extend([
633+
'NonSteadyStateOutlier{:02d}'.format(i)
634+
for i in range(skip_vols)
635+
])
636+
np.savetxt(
637+
self._results['pre_filter_file'],
638+
filter_basis,
639+
fmt=b'%.10f',
640+
delimiter='\t',
641+
header='\t'.join(header),
642+
comments='')
643+
644+
metadata_file = self.inputs.save_metadata
645+
if metadata_file:
646+
self._results['metadata_file'] = metadata_file
647+
if metadata_file is True:
653648
metadata_file = os.path.abspath('component_metadata.tsv')
654-
else:
655-
metadata_file = save_metadata
656649
components_names = np.empty(len(metadata['mask']),
657650
dtype='object_')
658651
retained = np.where(metadata['retained'])
659652
not_retained = np.where(np.logical_not(metadata['retained']))
660653
components_names[retained] = components_header
661654
components_names[not_retained] = ([
662655
'dropped{}'.format(i) for i in range(len(not_retained[0]))])
663-
self._results['metadata_file'] = metadata_file
664656
with open(metadata_file, 'w') as f:
665657
f.write('{}\t{}\t{}\t{}\t{}\n'.format('component',
666658
*list(metadata.keys())))
@@ -1200,7 +1192,7 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None):
12001192
def compute_noise_components(imgseries, mask_images, components_criterion=0.5,
12011193
filter_type=False, degree=0, period_cut=128,
12021194
repetition_time=None, failure_mode='error',
1203-
mask_names=''):
1195+
mask_names=None):
12041196
"""Compute the noise components from the imgseries for each mask
12051197
12061198
Parameters
@@ -1245,9 +1237,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5,
12451237
basis = np.array([])
12461238
if components_criterion == 'all':
12471239
components_criterion = -1
1248-
if not mask_names:
1249-
mask_names = range(len(mask_images))
1250-
for i, img in zip(mask_names, mask_images):
1240+
mask_names = mask_names or range(len(mask_images))
1241+
for name, img in zip(mask_names, mask_images):
12511242
mask = img.get_data().astype(np.bool).squeeze()
12521243
if imgseries.shape[:3] != mask.shape:
12531244
raise ValueError(
@@ -1267,20 +1258,20 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5,
12671258
voxel_timecourses, repetition_time, period_cut)
12681259
elif filter_type in ('polynomial', False):
12691260
# from paper:
1270-
# "The constant and linear trends of the columns in the matrix M
1271-
# were removed [prior to ...]"
1261+
# "The constant and linear trends of the columns in the matrix M were
1262+
# removed [prior to ...]"
12721263
voxel_timecourses, basis = regress_poly(degree, voxel_timecourses)
12731264

1274-
# "Voxel time series from the noise ROI (either anatomical or tSTD)
1275-
# were placed in a matrix M of size Nxm, with time along the row
1276-
# dimension and voxels along the column dimension."
1265+
# "Voxel time series from the noise ROI (either anatomical or tSTD) were
1266+
# placed in a matrix M of size Nxm, with time along the row dimension
1267+
# and voxels along the column dimension."
12771268
M = voxel_timecourses.T
12781269

12791270
# "[... were removed] prior to column-wise variance normalization."
12801271
M = M / _compute_tSTD(M, 1.)
12811272

1282-
# "The covariance matrix C = MMT was constructed and decomposed into
1283-
# its principal components using a singular value decomposition."
1273+
# "The covariance matrix C = MMT was constructed and decomposed into its
1274+
# principal components using a singular value decomposition."
12841275
try:
12851276
u, s, _ = fallback_svd(M, full_matrices=False)
12861277
except np.linalg.LinAlgError:
@@ -1308,15 +1299,15 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5,
13081299
if components is None:
13091300
components = u[:, :num_components]
13101301
metadata = OrderedDict()
1311-
metadata['mask'] = [i] * len(s)
1302+
metadata['mask'] = [name] * len(s)
13121303
metadata['singular_value'] = s
13131304
metadata['variance_explained'] = variance_explained
13141305
metadata['cumulative_variance_explained'] = (
13151306
cumulative_variance_explained)
13161307
metadata['retained'] = [i < num_components for i in range(len(s))]
13171308
else:
13181309
components = np.hstack((components, u[:, :num_components]))
1319-
metadata['mask'] = metadata['mask'] + [i] * len(s)
1310+
metadata['mask'] = metadata['mask'] + [name] * len(s)
13201311
metadata['singular_value'] = (
13211312
np.hstack((metadata['singular_value'], s)))
13221313
metadata['variance_explained'] = (

0 commit comments

Comments
 (0)