Skip to content

Commit 8bacfb6

Browse files
authored
Merge branch 'master' into enh/nstdstate
2 parents d9fa4f6 + 0ac8143 commit 8bacfb6

File tree

708 files changed

+1956
-886
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

708 files changed

+1956
-886
lines changed

CHANGES

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@ Upcoming release 0.13
22
=====================
33

44
* ENH: Added non-steady state detector for EPI data (https://github.com/nipy/nipype/pull/1839)
5+
* ENH: DVARS includes intensity normalization feature - turned on by default (https://github.com/nipy/nipype/pull/1827)
6+
* FIX: DVARS is correctly using sum of squares instead of standard deviation (https://github.com/nipy/nipype/pull/1827)
57
* ENH: Refactoring of nipype.interfaces.utility (https://github.com/nipy/nipype/pull/1828)
68
* FIX: CircleCI were failing silently. Some fixes to tests (https://github.com/nipy/nipype/pull/1833)
79
* FIX: Issues in Docker image permissions, and docker documentation (https://github.com/nipy/nipype/pull/1825)

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ html:
7171

7272
specs:
7373
@echo "Checking specs and autogenerating spec tests"
74-
python tools/checkspecs.py
74+
env PYTHONPATH=".:$(PYTHONPATH)" python tools/checkspecs.py
7575

7676
check: check-before-commit # just a shortcut
7777
check-before-commit: specs trailing-spaces html test

nipype/algorithms/confounds.py

Lines changed: 34 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,16 @@ class ComputeDVARSInputSpec(BaseInterfaceInputSpec):
5252
desc='output figure size')
5353
figformat = traits.Enum('png', 'pdf', 'svg', usedefault=True,
5454
desc='output format for figures')
55+
intensity_normalization = traits.Float(1000.0, usedefault=True,
56+
desc='Divide value in each voxel at each timepoint '
57+
'by the median calculated across all voxels'
58+
'and timepoints within the mask (if specified)'
59+
'and then multiply by the value specified by'
60+
'this parameter. By using the default (1000)' \
61+
'output DVARS will be expressed in ' \
62+
'x10 % BOLD units compatible with Power et al.' \
63+
'2012. Set this to 0 to disable intensity' \
64+
'normalization altogether.')
5565

5666

5767

@@ -128,7 +138,8 @@ def _gen_fname(self, suffix, ext=None):
128138

129139
def _run_interface(self, runtime):
130140
dvars = compute_dvars(self.inputs.in_file, self.inputs.in_mask,
131-
remove_zerovariance=self.inputs.remove_zerovariance)
141+
remove_zerovariance=self.inputs.remove_zerovariance,
142+
intensity_normalization=self.inputs.intensity_normalization)
132143

133144
(self._results['avg_std'],
134145
self._results['avg_nstd'],
@@ -668,7 +679,8 @@ def regress_poly(degree, data, remove_mean=True, axis=-1):
668679
# Back to original shape
669680
return regressed_data.reshape(datashape)
670681

671-
def compute_dvars(in_file, in_mask, remove_zerovariance=False):
682+
def compute_dvars(in_file, in_mask, remove_zerovariance=False,
683+
intensity_normalization=1000):
672684
"""
673685
Compute the :abbr:`DVARS (D referring to temporal
674686
derivative of timecourses, VARS referring to RMS variance over voxels)`
@@ -709,59 +721,49 @@ def compute_dvars(in_file, in_mask, remove_zerovariance=False):
709721
raise RuntimeError(
710722
"Input fMRI dataset should be 4-dimensional")
711723

712-
# Robust standard deviation
713-
func_sd = (np.percentile(func, 75, axis=3) -
714-
np.percentile(func, 25, axis=3)) / 1.349
715-
func_sd[mask <= 0] = 0
716-
717-
if remove_zerovariance:
718-
# Remove zero-variance voxels across time axis
719-
mask = zero_remove(func_sd, mask)
720-
721724
idx = np.where(mask > 0)
722725
mfunc = func[idx[0], idx[1], idx[2], :]
723726

724-
# Demean
725-
mfunc = regress_poly(0, mfunc, remove_mean=True).astype(np.float32)
727+
if intensity_normalization != 0:
728+
mfunc = (mfunc / np.median(mfunc)) * intensity_normalization
729+
730+
# Robust standard deviation (we are using "lower" interpolation
731+
# because this is what FSL is doing
732+
func_sd = (np.percentile(mfunc, 75, axis=1, interpolation="lower") -
733+
np.percentile(mfunc, 25, axis=1, interpolation="lower")) / 1.349
734+
735+
if remove_zerovariance:
736+
mfunc = mfunc[func_sd != 0, :]
737+
func_sd = func_sd[func_sd != 0]
726738

727739
# Compute (non-robust) estimate of lag-1 autocorrelation
728-
ar1 = np.apply_along_axis(AR_est_YW, 1, mfunc, 1)[:, 0]
740+
ar1 = np.apply_along_axis(AR_est_YW, 1,
741+
regress_poly(0, mfunc, remove_mean=True).astype(
742+
np.float32), 1)[:, 0]
729743

730744
# Compute (predicted) standard deviation of temporal difference time series
731-
diff_sdhat = np.squeeze(np.sqrt(((1 - ar1) * 2).tolist())) * func_sd[mask > 0].reshape(-1)
745+
diff_sdhat = np.squeeze(np.sqrt(((1 - ar1) * 2).tolist())) * func_sd
732746
diff_sd_mean = diff_sdhat.mean()
733747

734748
# Compute temporal difference time series
735749
func_diff = np.diff(mfunc, axis=1)
736750

737751
# DVARS (no standardization)
738-
dvars_nstd = func_diff.std(axis=0)
752+
dvars_nstd = np.sqrt(np.square(func_diff).mean(axis=0))
739753

740754
# standardization
741755
dvars_stdz = dvars_nstd / diff_sd_mean
742756

743-
with warnings.catch_warnings(): # catch, e.g., divide by zero errors
757+
with warnings.catch_warnings(): # catch, e.g., divide by zero errors
744758
warnings.filterwarnings('error')
745759

746760
# voxelwise standardization
747-
diff_vx_stdz = func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T
748-
dvars_vx_stdz = diff_vx_stdz.std(axis=0, ddof=1)
761+
diff_vx_stdz = np.square(
762+
func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T)
763+
dvars_vx_stdz = np.sqrt(diff_vx_stdz.mean(axis=0))
749764

750765
return (dvars_stdz, dvars_nstd, dvars_vx_stdz)
751766

752-
def zero_remove(data, mask):
753-
"""
754-
Modify inputted mask to also mask out zero values
755-
756-
:param numpy.ndarray data: e.g. voxelwise stddev of fMRI dataset, after motion correction
757-
:param numpy.ndarray mask: brain mask (same dimensions as data)
758-
:return: the mask with any additional zero voxels removed (same dimensions as inputs)
759-
:rtype: numpy.ndarray
760-
761-
"""
762-
new_mask = mask.copy()
763-
new_mask[data == 0] = 0
764-
return new_mask
765767

766768
def plot_confound(tseries, figsize, name, units=None,
767769
series_tr=None, normalize=False):

nipype/algorithms/tests/test_auto_ACompCor.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from __future__ import unicode_literals
23
from ..confounds import ACompCor
34

45

nipype/algorithms/tests/test_auto_AddCSVColumn.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from __future__ import unicode_literals
23
from ..misc import AddCSVColumn
34

45

nipype/algorithms/tests/test_auto_AddCSVRow.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from __future__ import unicode_literals
23
from ..misc import AddCSVRow
34

45

nipype/algorithms/tests/test_auto_AddNoise.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from __future__ import unicode_literals
23
from ..misc import AddNoise
34

45

nipype/algorithms/tests/test_auto_ArtifactDetect.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from __future__ import unicode_literals
23
from ..rapidart import ArtifactDetect
34

45

@@ -16,7 +17,7 @@ def test_ArtifactDetect_inputs():
1617
mask_type=dict(mandatory=True,
1718
),
1819
norm_threshold=dict(mandatory=True,
19-
xor=[u'rotation_threshold', u'translation_threshold'],
20+
xor=['rotation_threshold', 'translation_threshold'],
2021
),
2122
parameter_source=dict(mandatory=True,
2223
),
@@ -27,18 +28,18 @@ def test_ArtifactDetect_inputs():
2728
realignment_parameters=dict(mandatory=True,
2829
),
2930
rotation_threshold=dict(mandatory=True,
30-
xor=[u'norm_threshold'],
31+
xor=['norm_threshold'],
3132
),
3233
save_plot=dict(usedefault=True,
3334
),
3435
translation_threshold=dict(mandatory=True,
35-
xor=[u'norm_threshold'],
36+
xor=['norm_threshold'],
3637
),
3738
use_differences=dict(maxlen=2,
3839
minlen=2,
3940
usedefault=True,
4041
),
41-
use_norm=dict(requires=[u'norm_threshold'],
42+
use_norm=dict(requires=['norm_threshold'],
4243
usedefault=True,
4344
),
4445
zintensity_threshold=dict(mandatory=True,

nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from __future__ import unicode_literals
23
from ..misc import CalculateNormalizedMoments
34

45

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from __future__ import unicode_literals
3+
from ..confounds import CompCor
4+
5+
6+
def test_CompCor_inputs():
7+
input_map = dict(components_file=dict(usedefault=True,
8+
),
9+
header=dict(),
10+
ignore_exception=dict(nohash=True,
11+
usedefault=True,
12+
),
13+
mask_file=dict(),
14+
num_components=dict(usedefault=True,
15+
),
16+
realigned_file=dict(mandatory=True,
17+
),
18+
regress_poly_degree=dict(usedefault=True,
19+
),
20+
use_regress_poly=dict(usedefault=True,
21+
),
22+
)
23+
inputs = CompCor.input_spec()
24+
25+
for key, metadata in list(input_map.items()):
26+
for metakey, value in list(metadata.items()):
27+
assert getattr(inputs.traits()[key], metakey) == value
28+
29+
30+
def test_CompCor_outputs():
31+
output_map = dict(components_file=dict(),
32+
)
33+
outputs = CompCor.output_spec()
34+
35+
for key, metadata in list(output_map.items()):
36+
for metakey, value in list(metadata.items()):
37+
assert getattr(outputs.traits()[key], metakey) == value

0 commit comments

Comments
 (0)