Skip to content

Commit 0cff06b

Browse files
committed
Merge remote-tracking branch 'origin/pr/923'
2 parents 39a0ed8 + 2a5bf9a commit 0cff06b

File tree

2 files changed

+169
-31
lines changed

2 files changed

+169
-31
lines changed

nipype/algorithms/misc.py

Lines changed: 136 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
66
Change directory to provide relative paths for doctests
77
>>> import os
8-
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
8+
>>> filepath = os.path.dirname(os.path.realpath(__file__))
99
>>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data'))
1010
>>> os.chdir(datadir)
1111
@@ -34,7 +34,7 @@
3434
from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File,
3535
InputMultiPath, OutputMultiPath,
3636
BaseInterfaceInputSpec, isdefined,
37-
DynamicTraitedSpec )
37+
DynamicTraitedSpec)
3838
from nipype.utils.filemanip import fname_presuffix, split_filename
3939
iflogger = logging.getLogger('interface')
4040

@@ -785,7 +785,7 @@ def _list_outputs(self):
785785

786786
class AddCSVRowInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
787787
in_file = traits.File(mandatory=True, desc='Input comma-separated value (CSV) files')
788-
_outputs = traits.Dict( traits.Any, value={}, usedefault=True )
788+
_outputs = traits.Dict(traits.Any, value={}, usedefault=True)
789789

790790
def __setattr__(self, key, value):
791791
if key not in self.copyable_trait_names():
@@ -876,7 +876,7 @@ def _run_interface(self, runtime):
876876

877877
if op.exists(self.inputs.in_file):
878878
formerdf = pd.read_csv(self.inputs.in_file, index_col=0)
879-
df = pd.concat([formerdf, df], ignore_index=True )
879+
df = pd.concat([formerdf, df], ignore_index=True)
880880

881881
with open(self.inputs.in_file, 'w') as f:
882882
df.to_csv(f)
@@ -956,6 +956,115 @@ def calc_moments(timeseries_file, moment):
956956
return np.where(zero, 0, m3 / m2**(moment/2.0))
957957

958958

959+
class AddNoiseInputSpec(TraitedSpec):
960+
in_file = File(exists=True, mandatory=True,
961+
desc='input image that will be corrupted with noise')
962+
in_mask = File(exists=True, desc=('input mask, voxels outside this mask '
963+
'will be considered background'))
964+
snr = traits.Float(10.0, desc='desired output SNR in dB', usedefault=True)
965+
dist = traits.Enum('normal', 'rician', usedefault=True, mandatory=True,
966+
desc=('desired noise distribution'))
967+
bg_dist = traits.Enum('normal', 'rayleigh', usedefault=True, mandatory=True,
968+
desc=('desired noise distribution, currently '
969+
'only normal is implemented'))
970+
out_file = File(desc='desired output filename')
971+
972+
973+
class AddNoiseOutputSpec(TraitedSpec):
974+
out_file = File(exists=True, desc='corrupted image')
975+
976+
977+
class AddNoise(BaseInterface):
978+
"""
979+
Corrupts with noise the input image
980+
981+
Example
982+
-------
983+
984+
>>> from nipype.algorithms.misc import AddNoise
985+
>>> noise = AddNoise()
986+
>>> noise.inputs.in_file = 'T1.nii'
987+
>>> noise.inputs.in_mask = 'mask.nii'
988+
>>> noise.snr = 30.0
989+
>>> noise.run() # doctest: +SKIP
990+
"""
991+
input_spec = AddNoiseInputSpec
992+
output_spec = AddNoiseOutputSpec
993+
994+
def _run_interface(self, runtime):
995+
in_image = nb.load(self.inputs.in_file)
996+
in_data = in_image.get_data()
997+
snr = self.inputs.snr
998+
999+
if isdefined(self.inputs.in_mask):
1000+
in_mask = nb.load(self.inputs.in_mask).get_data()
1001+
else:
1002+
in_mask = np.ones_like(in_data)
1003+
1004+
result = self.gen_noise(in_data, mask=in_mask, snr_db=snr,
1005+
dist=self.inputs.dist, bg_dist=self.inputs.bg_dist)
1006+
res_im = nb.Nifti1Image(result, in_image.get_affine(), in_image.get_header())
1007+
res_im.to_filename(self._gen_output_filename())
1008+
return runtime
1009+
1010+
def _gen_output_filename(self):
1011+
if not isdefined(self.inputs.out_file):
1012+
_, base, ext = split_filename(self.inputs.in_file)
1013+
out_file = os.path.abspath('%s_SNR%03.2f%s' % (base, self.inputs.snr, ext))
1014+
else:
1015+
out_file = self.inputs.out_file
1016+
1017+
return out_file
1018+
1019+
def _list_outputs(self):
1020+
outputs = self.output_spec().get()
1021+
outputs['out_file'] = self._gen_output_filename()
1022+
return outputs
1023+
1024+
def gen_noise(self, image, mask=None, snr_db=10.0, dist='normal', bg_dist='normal'):
1025+
"""
1026+
Generates a copy of an image with a certain amount of
1027+
added gaussian noise (rayleigh for background in mask)
1028+
"""
1029+
from math import sqrt
1030+
snr = sqrt(np.power(10.0, snr_db/10.0))
1031+
1032+
if mask is None:
1033+
mask = np.ones_like(image)
1034+
else:
1035+
mask[mask>0] = 1
1036+
mask[mask<1] = 0
1037+
1038+
if mask.ndim < image.ndim:
1039+
mask = np.rollaxis(np.array([mask]*image.shape[3]),0, 4)
1040+
1041+
signal = image[mask>0].reshape(-1)
1042+
1043+
if dist == 'normal':
1044+
signal = signal - signal.mean()
1045+
sigma_n = sqrt(signal.var()/snr)
1046+
noise = np.random.normal(size=image.shape, scale=sigma_n)
1047+
1048+
if (np.any(mask==0)) and (bg_dist == 'rayleigh'):
1049+
bg_noise = np.random.rayleigh(size=image.shape, scale=sigma_n)
1050+
noise[mask==0] = bg_noise[mask==0]
1051+
1052+
im_noise = image + noise
1053+
1054+
elif dist == 'rician':
1055+
sigma_n = signal.mean()/snr
1056+
n_1 = np.random.normal(size=image.shape, scale=sigma_n)
1057+
n_2 = np.random.normal(size=image.shape, scale=sigma_n)
1058+
stde_1 = n_1/sqrt(2.0)
1059+
stde_2 = n_2/sqrt(2.0)
1060+
im_noise = np.sqrt((image + stde_1)**2 + (stde_2)**2)
1061+
else:
1062+
raise NotImplementedError(('Only normal and rician distributions '
1063+
'are supported'))
1064+
1065+
return im_noise
1066+
1067+
9591068
class NormalizeProbabilityMapSetInputSpec(TraitedSpec):
9601069
in_files = InputMultiPath(File(exists=True, mandatory=True,
9611070
desc='The tpms to be normalized') )
@@ -973,9 +1082,6 @@ class NormalizeProbabilityMapSet(BaseInterface):
9731082
9741083
.. note:: Please recall this is not a spatial normalization algorithm
9751084
976-
Example
977-
-------
978-
9791085
>>> import nipype.algorithms.misc as misc
9801086
>>> normalize = misc.NormalizeProbabilityMapSet()
9811087
>>> normalize.inputs.in_files = [ 'tpm_00.nii.gz', 'tpm_01.nii.gz', 'tpm_02.nii.gz' ]
@@ -988,10 +1094,10 @@ class NormalizeProbabilityMapSet(BaseInterface):
9881094
def _run_interface(self, runtime):
9891095
mask = None
9901096

991-
if isdefined( self.inputs.in_mask ):
1097+
if isdefined(self.inputs.in_mask):
9921098
mask = self.inputs.in_mask
9931099

994-
self._out_filenames = normalize_tpms( self.inputs.in_files, mask )
1100+
self._out_filenames = normalize_tpms(self.inputs.in_files, mask)
9951101
return runtime
9961102

9971103
def _list_outputs(self):
@@ -1000,7 +1106,7 @@ def _list_outputs(self):
10001106
return outputs
10011107

10021108

1003-
def normalize_tpms( in_files, in_mask=None, out_files=[] ):
1109+
def normalize_tpms(in_files, in_mask=None, out_files=[]):
10041110
"""
10051111
Returns the input tissue probability maps (tpms, aka volume fractions)
10061112
normalized to sum up 1.0 at each voxel within the mask.
@@ -1009,16 +1115,16 @@ def normalize_tpms( in_files, in_mask=None, out_files=[] ):
10091115
import numpy as np
10101116
import os.path as op
10111117

1012-
in_files = np.atleast_1d( in_files ).tolist()
1118+
in_files = np.atleast_1d(in_files).tolist()
10131119

1014-
if len(out_files)!=len(in_files):
1015-
for i,finname in enumerate( in_files ):
1016-
fname,fext = op.splitext( op.basename( finname ) )
1120+
if len(out_files) != len(in_files):
1121+
for i,finname in enumerate(in_files):
1122+
fname,fext = op.splitext(op.basename(finname))
10171123
if fext == '.gz':
1018-
fname,fext2 = op.splitext( fname )
1124+
fname,fext2 = op.splitext(fname)
10191125
fext = fext2 + fext
10201126

1021-
out_file = op.abspath(fname+'_norm'+('_%02d' % i)+fext)
1127+
out_file = op.abspath('%s_norm_%02d%s' % (fname,i,fext))
10221128
out_files+= [out_file]
10231129

10241130
imgs = [nib.load(fim) for fim in in_files]
@@ -1028,39 +1134,39 @@ def normalize_tpms( in_files, in_mask=None, out_files=[] ):
10281134
img_data[img_data>0.0] = 1.0
10291135
hdr = imgs[0].get_header().copy()
10301136
hdr['data_type']= 16
1031-
hdr.set_data_dtype( 'float32' )
1032-
nib.save( nib.Nifti1Image( img_data.astype(np.float32), imgs[0].get_affine(), hdr ), out_files[0] )
1137+
hdr.set_data_dtype(np.float32)
1138+
nib.save(nib.Nifti1Image(img_data.astype(np.float32), imgs[0].get_affine(), hdr), out_files[0])
10331139
return out_files[0]
10341140

1035-
img_data = np.array( [ im.get_data() for im in imgs ] ).astype( 'f32' )
1141+
img_data = np.array([im.get_data() for im in imgs]).astype(np.float32)
10361142
#img_data[img_data>1.0] = 1.0
10371143
img_data[img_data<0.0] = 0.0
1038-
weights = np.sum( img_data, axis=0 )
1144+
weights = np.sum(img_data, axis=0)
10391145

1040-
msk = np.ones_like( imgs[0].get_data() )
1146+
msk = np.ones_like(imgs[0].get_data())
10411147
msk[ weights<= 0 ] = 0
10421148

10431149
if not in_mask is None:
1044-
msk = nib.load( in_mask ).get_data()
1150+
msk = nib.load(in_mask).get_data()
10451151
msk[ msk<=0 ] = 0
10461152
msk[ msk>0 ] = 1
10471153

1048-
msk = np.ma.masked_equal( msk, 0 )
1154+
msk = np.ma.masked_equal(msk, 0)
10491155

10501156

1051-
for i,out_file in enumerate( out_files ):
1052-
data = np.ma.masked_equal( img_data[i], 0 )
1157+
for i,out_file in enumerate(out_files):
1158+
data = np.ma.masked_equal(img_data[i], 0)
10531159
probmap = data / weights
10541160
hdr = imgs[i].get_header().copy()
10551161
hdr['data_type']= 16
1056-
hdr.set_data_dtype( 'float32' )
1057-
nib.save( nib.Nifti1Image( probmap.astype(np.float32), imgs[i].get_affine(), hdr ), out_file )
1162+
hdr.set_data_dtype('float32')
1163+
nib.save(nib.Nifti1Image(probmap.astype(np.float32), imgs[i].get_affine(), hdr), out_file)
10581164

10591165
return out_files
10601166

10611167

10621168
# Deprecated interfaces ---------------------------------------------------------
1063-
class Distance( nam.Distance ):
1169+
class Distance(nam.Distance):
10641170
"""Calculates distance between two volumes.
10651171
10661172
.. deprecated:: 0.10.0
@@ -1072,7 +1178,7 @@ def __init__(self, **inputs):
10721178
" please use nipype.algorithms.metrics.Distance"),
10731179
DeprecationWarning)
10741180

1075-
class Overlap( nam.Overlap ):
1181+
class Overlap(nam.Overlap):
10761182
"""Calculates various overlap measures between two maps.
10771183
10781184
.. deprecated:: 0.10.0
@@ -1085,7 +1191,7 @@ def __init__(self, **inputs):
10851191
DeprecationWarning)
10861192

10871193

1088-
class FuzzyOverlap( nam.FuzzyOverlap ):
1194+
class FuzzyOverlap(nam.FuzzyOverlap):
10891195
"""Calculates various overlap measures between two maps, using a fuzzy
10901196
definition.
10911197
@@ -1097,4 +1203,3 @@ def __init__(self, **inputs):
10971203
warnings.warn(("This interface has been deprecated since 0.10.0,"
10981204
" please use nipype.algorithms.metrics.FuzzyOverlap"),
10991205
DeprecationWarning)
1100-
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
2+
from nipype.testing import assert_equal
3+
from nipype.algorithms.misc import AddNoise
4+
5+
def test_AddNoise_inputs():
6+
input_map = dict(bg_dist=dict(mandatory=True,
7+
usedefault=True,
8+
),
9+
dist=dict(mandatory=True,
10+
usedefault=True,
11+
),
12+
in_file=dict(mandatory=True,
13+
),
14+
in_mask=dict(),
15+
out_file=dict(),
16+
snr=dict(usedefault=True,
17+
),
18+
)
19+
inputs = AddNoise.input_spec()
20+
21+
for key, metadata in input_map.items():
22+
for metakey, value in metadata.items():
23+
yield assert_equal, getattr(inputs.traits()[key], metakey), value
24+
25+
def test_AddNoise_outputs():
26+
output_map = dict(out_file=dict(),
27+
)
28+
outputs = AddNoise.output_spec()
29+
30+
for key, metadata in output_map.items():
31+
for metakey, value in metadata.items():
32+
yield assert_equal, getattr(outputs.traits()[key], metakey), value
33+

0 commit comments

Comments
 (0)