Skip to content

Commit cb365f4

Browse files
committed
Merge remote-tracking branch 'upstream/master' into fix/bunchmapnode
* upstream/master: (28 commits) less brittle test don't calculate var/stddev twice don't let divide by zero errors pass by specify tab delimiter try longer timeout revert identity transform use from io import open fix up test (e.g. pep8) chdir back to original dir before deleting tempdir revert 4d validation, fix input spec desc add headers to outputs of compcor, framewise displacement + test fix pull from nipy/nipype master merge with master make specs don't load the whole thing into memory check and error if input to fsl ApplyTopUp is not 4 dimensional better test less mysterious error messages Add more informative error msg test specs auto ...
2 parents 78afc2a + f00ab33 commit cb365f4

File tree

10 files changed

+181
-84
lines changed

10 files changed

+181
-84
lines changed

circle.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ test:
3838
- docker run -v /etc/localtime:/etc/localtime:ro -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /root/src/nipype nipype/nipype_test:py35 /usr/bin/run_nosetests.sh py35 :
3939
timeout: 2600
4040
- docker run -v /etc/localtime:/etc/localtime:ro -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /root/src/nipype nipype/nipype_test:py27 /usr/bin/run_nosetests.sh py27 :
41-
timeout: 2600
41+
timeout: 5200
4242
- docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow3d :
4343
timeout: 1600
4444
- docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow4d :

nipype/algorithms/confounds.py

Lines changed: 63 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
class ComputeDVARSInputSpec(BaseInterfaceInputSpec):
3333
in_file = File(exists=True, mandatory=True, desc='functional data, after HMC')
3434
in_mask = File(exists=True, mandatory=True, desc='a brain mask')
35-
remove_zerovariance = traits.Bool(False, usedefault=True,
35+
remove_zerovariance = traits.Bool(True, usedefault=True,
3636
desc='remove voxels with zero variance')
3737
save_std = traits.Bool(True, usedefault=True,
3838
desc='save standardized DVARS')
@@ -255,7 +255,7 @@ def _run_interface(self, runtime):
255255
'out_file': op.abspath(self.inputs.out_file),
256256
'fd_average': float(fd_res.mean())
257257
}
258-
np.savetxt(self.inputs.out_file, fd_res)
258+
np.savetxt(self.inputs.out_file, fd_res, header='framewise_displacement')
259259

260260
if self.inputs.save_plot:
261261
tr = None
@@ -291,6 +291,8 @@ class CompCorInputSpec(BaseInterfaceInputSpec):
291291
'pre-component extraction')
292292
regress_poly_degree = traits.Range(low=1, default=1, usedefault=True,
293293
desc='the degree polynomial to use')
294+
header = traits.Str(desc='the desired header for the output tsv file (one column).'
295+
'If undefined, will default to "CompCor"')
294296

295297
class CompCorOutputSpec(TraitedSpec):
296298
components_file = File(exists=True,
@@ -329,6 +331,13 @@ class CompCor(BaseInterface):
329331
def _run_interface(self, runtime):
330332
imgseries = nb.load(self.inputs.realigned_file).get_data()
331333
mask = nb.load(self.inputs.mask_file).get_data()
334+
335+
if imgseries.shape[:3] != mask.shape:
336+
raise ValueError('Inputs for CompCor, func {} and mask {}, do not have matching '
337+
'spatial dimensions ({} and {}, respectively)'
338+
.format(self.inputs.realigned_file, self.inputs.mask_file,
339+
imgseries.shape[:3], mask.shape))
340+
332341
voxel_timecourses = imgseries[mask > 0]
333342
# Zero-out any bad values
334343
voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0
@@ -352,7 +361,10 @@ def _run_interface(self, runtime):
352361
u, _, _ = linalg.svd(M, full_matrices=False)
353362
components = u[:, :self.inputs.num_components]
354363
components_file = os.path.join(os.getcwd(), self.inputs.components_file)
355-
np.savetxt(components_file, components, fmt=b"%.10f")
364+
365+
self._set_header()
366+
np.savetxt(components_file, components, fmt=b"%.10f", delimiter='\t',
367+
header=self._make_headers(components.shape[1]))
356368
return runtime
357369

358370
def _list_outputs(self):
@@ -367,6 +379,26 @@ def _compute_tSTD(self, M, x):
367379
stdM[np.isnan(stdM)] = x
368380
return stdM
369381

382+
def _set_header(self, header='CompCor'):
383+
self.inputs.header = self.inputs.header if isdefined(self.inputs.header) else header
384+
385+
def _make_headers(self, num_col):
386+
headers = []
387+
for i in range(num_col):
388+
headers.append(self.inputs.header + str(i))
389+
return '\t'.join(headers)
390+
391+
392+
class ACompCor(CompCor):
393+
''' Anatomical compcor; for input/output, see CompCor.
394+
If the mask provided is an anatomical mask, CompCor == ACompCor '''
395+
396+
def __init__(self, *args, **kwargs):
397+
''' exactly the same as compcor except the header '''
398+
super(ACompCor, self).__init__(*args, **kwargs)
399+
self._set_header('aCompCor')
400+
401+
370402
class TCompCorInputSpec(CompCorInputSpec):
371403
# and all the fields in CompCorInputSpec
372404
percentile_threshold = traits.Range(low=0., high=1., value=.02,
@@ -401,6 +433,11 @@ class TCompCor(CompCor):
401433
def _run_interface(self, runtime):
402434
imgseries = nb.load(self.inputs.realigned_file).get_data()
403435

436+
if imgseries.ndim != 4:
437+
raise ValueError('tCompCor expected a 4-D nifti file. Input {} has {} dimensions '
438+
'(shape {})'
439+
.format(self.inputs.realigned_file, imgseries.ndim, imgseries.shape))
440+
404441
# From the paper:
405442
# "For each voxel time series, the temporal standard deviation is
406443
# defined as the standard deviation of the time series after the removal
@@ -419,18 +456,19 @@ def _run_interface(self, runtime):
419456
threshold_index = int(num_voxels * (1. - self.inputs.percentile_threshold))
420457
threshold_std = sortSTD[threshold_index]
421458
mask = tSTD >= threshold_std
422-
mask = mask.astype(int)
459+
mask = mask.astype(int).T
423460

424461
# save mask
425-
mask_file = 'mask.nii'
462+
mask_file = os.path.abspath('mask.nii')
426463
nb.nifti1.save(nb.Nifti1Image(mask, np.eye(4)), mask_file)
464+
IFLOG.debug('tCompcor computed and saved mask of shape {} to mask_file {}'
465+
.format(mask.shape, mask_file))
427466
self.inputs.mask_file = mask_file
467+
self._set_header('tCompCor')
428468

429469
super(TCompCor, self)._run_interface(runtime)
430470
return runtime
431471

432-
ACompCor = CompCor
433-
434472
class TSNRInputSpec(BaseInterfaceInputSpec):
435473
in_file = InputMultiPath(File(exists=True), mandatory=True,
436474
desc='realigned 4D file or a list of 3D files')
@@ -512,6 +550,8 @@ def regress_poly(degree, data, remove_mean=True, axis=-1):
512550
If remove_mean is True (default), the data is demeaned (i.e. degree 0).
513551
If remove_mean is false, the data is not.
514552
'''
553+
IFLOG.debug('Performing polynomial regression on data of shape ' + str(data.shape))
554+
515555
datashape = data.shape
516556
timepoints = datashape[axis]
517557

@@ -570,6 +610,7 @@ def compute_dvars(in_file, in_mask, remove_zerovariance=False):
570610
import numpy as np
571611
import nibabel as nb
572612
from nitime.algorithms import AR_est_YW
613+
import warnings
573614

574615
func = nb.load(in_file).get_data().astype(np.float32)
575616
mask = nb.load(in_mask).get_data().astype(np.uint8)
@@ -585,7 +626,7 @@ def compute_dvars(in_file, in_mask, remove_zerovariance=False):
585626

586627
if remove_zerovariance:
587628
# Remove zero-variance voxels across time axis
588-
mask = zero_variance(func, mask)
629+
mask = zero_remove(func_sd, mask)
589630

590631
idx = np.where(mask > 0)
591632
mfunc = func[idx[0], idx[1], idx[2], :]
@@ -609,31 +650,28 @@ def compute_dvars(in_file, in_mask, remove_zerovariance=False):
609650
# standardization
610651
dvars_stdz = dvars_nstd / diff_sd_mean
611652

612-
# voxelwise standardization
613-
diff_vx_stdz = func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T
614-
dvars_vx_stdz = diff_vx_stdz.std(axis=0, ddof=1)
653+
with warnings.catch_warnings(): # catch, e.g., divide by zero errors
654+
warnings.filterwarnings('error')
655+
656+
# voxelwise standardization
657+
diff_vx_stdz = func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T
658+
dvars_vx_stdz = diff_vx_stdz.std(axis=0, ddof=1)
615659

616660
return (dvars_stdz, dvars_nstd, dvars_vx_stdz)
617661

618-
def zero_variance(func, mask):
662+
def zero_remove(data, mask):
619663
"""
620-
Mask out voxels with zero variance across t-axis
664+
Modify inputted mask to also mask out zero values
621665
622-
:param numpy.ndarray func: input fMRI dataset, after motion correction
623-
:param numpy.ndarray mask: 3D brain mask
624-
:return: the 3D mask of voxels with nonzero variance across :math:`t`.
666+
:param numpy.ndarray data: e.g. voxelwise stddev of fMRI dataset, after motion correction
667+
:param numpy.ndarray mask: brain mask (same dimensions as data)
668+
:return: the mask with any additional zero voxels removed (same dimensions as inputs)
625669
:rtype: numpy.ndarray
626670
627671
"""
628-
idx = np.where(mask > 0)
629-
func = func[idx[0], idx[1], idx[2], :]
630-
tvariance = func.var(axis=1)
631-
tv_mask = np.zeros_like(tvariance, dtype=np.uint8)
632-
tv_mask[tvariance > 0] = 1
633-
634-
newmask = np.zeros_like(mask, dtype=np.uint8)
635-
newmask[idx] = tv_mask
636-
return newmask
672+
new_mask = mask.copy()
673+
new_mask[data == 0] = 0
674+
return new_mask
637675

638676
def plot_confound(tseries, figsize, name, units=None,
639677
series_tr=None, normalize=False):

nipype/algorithms/tests/test_compcor.py

Lines changed: 45 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import nibabel as nb
99
import numpy as np
1010

11-
from ...testing import assert_equal, assert_true, utils
11+
from ...testing import assert_equal, assert_true, utils, assert_in
1212
from ..confounds import CompCor, TCompCor, ACompCor
1313

1414
class TestCompCor(unittest.TestCase):
@@ -38,25 +38,20 @@ def test_compcor(self):
3838
['0.4206466244', '-0.3361270124'],
3939
['-0.1246655485', '-0.1235705610']]
4040

41-
ccresult = self.run_cc(CompCor(realigned_file=self.realigned_file,
42-
mask_file=self.mask_file),
43-
expected_components)
41+
self.run_cc(CompCor(realigned_file=self.realigned_file, mask_file=self.mask_file),
42+
expected_components)
4443

45-
accresult = self.run_cc(ACompCor(realigned_file=self.realigned_file,
46-
mask_file=self.mask_file,
47-
components_file='acc_components_file'),
48-
expected_components)
49-
50-
assert_equal(os.path.getsize(ccresult.outputs.components_file),
51-
os.path.getsize(accresult.outputs.components_file))
44+
self.run_cc(ACompCor(realigned_file=self.realigned_file, mask_file=self.mask_file,
45+
components_file='acc_components_file'),
46+
expected_components, 'aCompCor')
5247

5348
def test_tcompcor(self):
5449
ccinterface = TCompCor(realigned_file=self.realigned_file, percentile_threshold=0.75)
5550
self.run_cc(ccinterface, [['-0.1114536190', '-0.4632908609'],
5651
['0.4566907310', '0.6983205193'],
5752
['-0.7132557407', '0.1340170559'],
5853
['0.5022537643', '-0.5098322262'],
59-
['-0.1342351356', '0.1407855119']])
54+
['-0.1342351356', '0.1407855119']], 'tCompCor')
6055

6156
def test_tcompcor_no_percentile(self):
6257
ccinterface = TCompCor(realigned_file=self.realigned_file)
@@ -74,7 +69,29 @@ def test_compcor_no_regress_poly(self):
7469
['-0.5367548139', '0.0059943226'],
7570
['-0.0520809054', '0.2940637551']])
7671

77-
def run_cc(self, ccinterface, expected_components):
72+
def test_tcompcor_asymmetric_dim(self):
73+
asymmetric_shape = (2, 3, 4, 5)
74+
asymmetric_data = utils.save_toy_nii(np.zeros(asymmetric_shape), 'asymmetric.nii')
75+
76+
TCompCor(realigned_file=asymmetric_data).run()
77+
self.assertEqual(nb.load('mask.nii').get_data().shape, asymmetric_shape[:3])
78+
79+
def test_compcor_bad_input_shapes(self):
80+
shape_less_than = (1, 2, 2, 5) # dim 0 is < dim 0 of self.mask_file (2)
81+
shape_more_than = (3, 3, 3, 5) # dim 0 is > dim 0 of self.mask_file (2)
82+
83+
for data_shape in (shape_less_than, shape_more_than):
84+
data_file = utils.save_toy_nii(np.zeros(data_shape), 'temp.nii')
85+
interface = CompCor(realigned_file=data_file, mask_file=self.mask_file)
86+
self.assertRaisesRegexp(ValueError, "dimensions", interface.run)
87+
88+
def test_tcompcor_bad_input_dim(self):
89+
bad_dims = (2, 2, 2)
90+
data_file = utils.save_toy_nii(np.zeros(bad_dims), 'temp.nii')
91+
interface = TCompCor(realigned_file=data_file)
92+
self.assertRaisesRegexp(ValueError, '4-D', interface.run)
93+
94+
def run_cc(self, ccinterface, expected_components, expected_header='CompCor'):
7895
# run
7996
ccresult = ccinterface.run()
8097

@@ -86,12 +103,21 @@ def run_cc(self, ccinterface, expected_components):
86103
assert_equal(ccinterface.inputs.num_components, 6)
87104

88105
with open(ccresult.outputs.components_file, 'r') as components_file:
89-
components_data = [line.split() for line in components_file]
90-
num_got_components = len(components_data)
91-
assert_true(num_got_components == ccinterface.inputs.num_components
92-
or num_got_components == self.fake_data.shape[3])
93-
first_two = [row[:2] for row in components_data]
94-
assert_equal(first_two, expected_components)
106+
expected_n_components = min(ccinterface.inputs.num_components, self.fake_data.shape[3])
107+
108+
components_data = [line.split('\t') for line in components_file]
109+
110+
header = components_data.pop(0) # the first item will be '#', we can throw it out
111+
expected_header = [expected_header + str(i) for i in range(expected_n_components)]
112+
for i, heading in enumerate(header):
113+
assert_in(expected_header[i], heading)
114+
115+
num_got_timepoints = len(components_data)
116+
assert_equal(num_got_timepoints, self.fake_data.shape[3])
117+
for index, timepoint in enumerate(components_data):
118+
assert_true(len(timepoint) == ccinterface.inputs.num_components
119+
or len(timepoint) == self.fake_data.shape[3])
120+
assert_equal(timepoint[:2], expected_components[index])
95121
return ccresult
96122

97123
def tearDown(self):

nipype/algorithms/tests/test_confounds.py

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44
from tempfile import mkdtemp
55
from shutil import rmtree
66

7-
from nipype.testing import (assert_equal, example_data, skipif, assert_true)
7+
from io import open
8+
9+
from nipype.testing import (assert_equal, example_data, skipif, assert_true, assert_in)
810
from nipype.algorithms.confounds import FramewiseDisplacement, ComputeDVARS
911
import numpy as np
1012

@@ -24,8 +26,14 @@ def test_fd():
2426
out_file=tempdir + '/fd.txt')
2527
res = fdisplacement.run()
2628

29+
with open(res.outputs.out_file) as all_lines:
30+
for line in all_lines:
31+
yield assert_in, 'framewise_displacement', line
32+
break
33+
2734
yield assert_true, np.allclose(ground_truth, np.loadtxt(res.outputs.out_file), atol=.16)
2835
yield assert_true, np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2
36+
2937
rmtree(tempdir)
3038

3139
@skipif(nonitime)
@@ -35,8 +43,14 @@ def test_dvars():
3543
dvars = ComputeDVARS(in_file=example_data('ds003_sub-01_mc.nii.gz'),
3644
in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'),
3745
save_all=True)
46+
47+
origdir = os.getcwd()
3848
os.chdir(tempdir)
49+
3950
res = dvars.run()
4051

4152
dv1 = np.loadtxt(res.outputs.out_std)
4253
yield assert_equal, (np.abs(dv1 - ground_truth).sum()/ len(dv1)) < 0.05, True
54+
55+
os.chdir(origdir)
56+
rmtree(tempdir)

nipype/algorithms/tests/test_tsnr.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
22
# vi: set ft=python sts=4 ts=4 sw=4 et:
33

4-
from ...testing import (assert_equal, assert_true, assert_almost_equal,
5-
skipif, utils)
4+
from ...testing import (assert_equal, assert_almost_equal, assert_in, utils)
65
from ..confounds import TSNR
76
from .. import misc
87

@@ -88,11 +87,12 @@ def test_tsnr_withpoly3(self):
8887

8988
@mock.patch('warnings.warn')
9089
def test_warning(self, mock_warn):
90+
''' test that usage of misc.TSNR trips a warning to use confounds.TSNR instead '''
9191
# run
9292
misc.TSNR(in_file=self.in_filenames['in_file'])
9393

9494
# assert
95-
mock_warn.assert_called_once_with(mock.ANY, UserWarning)
95+
assert_in(True, [args[0].count('confounds') > 0 for _, args, _ in mock_warn.mock_calls])
9696

9797
def assert_expected_outputs_poly(self, tsnrresult, expected_ranges):
9898
assert_equal(os.path.basename(tsnrresult.outputs.detrended_file),

nipype/interfaces/ants/resampling.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -246,9 +246,10 @@ class ApplyTransformsInputSpec(ANTSCommandInputSpec):
246246
interpolation_parameters = traits.Either(traits.Tuple(traits.Int()), # BSpline (order)
247247
traits.Tuple(traits.Float(), # Gaussian/MultiLabel (sigma, alpha)
248248
traits.Float())
249-
)
250-
transforms = InputMultiPath(
251-
File(exists=True), argstr='%s', mandatory=True, desc='transform files: will be applied in reverse order. For example, the last specified transform will be applied first')
249+
)
250+
transforms = InputMultiPath(File(exists=True), argstr='%s', mandatory=True,
251+
desc='transform files: will be applied in reverse order. For '
252+
'example, the last specified transform will be applied first.')
252253
invert_transform_flags = InputMultiPath(traits.Bool())
253254
default_value = traits.Float(0.0, argstr='--default-value %g', usedefault=True)
254255
print_out_composite_warp_file = traits.Bool(False, requires=["output_image"],
@@ -296,8 +297,6 @@ class ApplyTransforms(ANTSCommand):
296297
'antsApplyTransforms --default-value 0 --dimensionality 3 --input moving1.nii --interpolation BSpline[ 5 ] \
297298
--output deformed_moving1.nii --reference-image fixed1.nii --transform [ ants_Warp.nii.gz, 0 ] \
298299
--transform [ trans.mat, 0 ]'
299-
300-
301300
"""
302301
_cmd = 'antsApplyTransforms'
303302
input_spec = ApplyTransformsInputSpec

nipype/interfaces/fsl/epi.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ def _overload_extension(self, value, name=None):
307307

308308
class ApplyTOPUPInputSpec(FSLCommandInputSpec):
309309
in_files = InputMultiPath(File(exists=True), mandatory=True,
310-
desc='name of 4D file with images',
310+
desc='name of file with images',
311311
argstr='--imain=%s', sep=',')
312312
encoding_file = File(exists=True, mandatory=True,
313313
desc='name of text file with PE directions/times',

0 commit comments

Comments
 (0)