Skip to content

Commit 79b9782

Browse files
committed
Remove a duplicate "Returns" section (which irked numpydoc 0.7)
1 parent d80061b commit 79b9782

File tree

1 file changed

+47
-58
lines changed

1 file changed

+47
-58
lines changed

nitime/algorithms/cohere.py

Lines changed: 47 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,8 @@ def coherency(time_series, csd_method=None):
6767

6868
f, fxy = get_spectra(time_series, csd_method)
6969

70-
#A container for the coherencys, with the size and shape of the expected
71-
#output:
70+
# A container for the coherencys, with the size and shape of the expected
71+
# output:
7272
c = np.zeros((time_series.shape[0],
7373
time_series.shape[0],
7474
f.shape[0]), dtype=complex) # Make sure it's complex
@@ -108,7 +108,6 @@ def coherency_spec(fxy, fxx, fyy):
108108
--------
109109
:func:`coherency`
110110
"""
111-
112111
return fxy / np.sqrt(fxx * fyy)
113112

114113

@@ -275,7 +274,6 @@ def coherency_regularized(time_series, epsilon, alpha, csd_method=None):
275274

276275

277276
def _coherency_reqularized(fxy, fxx, fyy, epsilon, alpha):
278-
279277
r"""
280278
A regularized version of the calculation of coherency, which is more
281279
robust to numerical noise than the standard calculation
@@ -303,9 +301,8 @@ def _coherency_reqularized(fxy, fxx, fyy, epsilon, alpha):
303301
The coherence values
304302
305303
"""
306-
307304
return (((alpha * fxy + epsilon)) /
308-
np.sqrt(((alpha ** 2) * (fxx + epsilon) * (fyy + epsilon))))
305+
np.sqrt(((alpha ** 2) * (fxx + epsilon) * (fyy + epsilon))))
309306

310307

311308
def coherence_regularized(time_series, epsilon, alpha, csd_method=None):
@@ -341,9 +338,6 @@ def coherence_regularized(time_series, epsilon, alpha, csd_method=None):
341338
This is a symmetric matrix with the coherencys of the signals. The
342339
coherency of signal i and signal j is in f[i][j].
343340
344-
Returns
345-
-------
346-
frequencies, coherence
347341
348342
Notes
349343
-----
@@ -360,8 +354,8 @@ def coherence_regularized(time_series, epsilon, alpha, csd_method=None):
360354

361355
f, fxy = get_spectra(time_series, csd_method)
362356

363-
#A container for the coherences, with the size and shape of the expected
364-
#output:
357+
# A container for the coherences, with the size and shape of the expected
358+
# output:
365359
c = np.zeros((time_series.shape[0],
366360
time_series.shape[0],
367361
f.shape[0]), complex)
@@ -378,7 +372,6 @@ def coherence_regularized(time_series, epsilon, alpha, csd_method=None):
378372

379373

380374
def _coherence_reqularized(fxy, fxx, fyy, epsilon, alpha):
381-
382375
r"""A regularized version of the calculation of coherence, which is more
383376
robust to numerical noise than the standard calculation.
384377
@@ -406,7 +399,7 @@ def _coherence_reqularized(fxy, fxx, fyy, epsilon, alpha):
406399
407400
"""
408401
return (((alpha * np.abs(fxy) + epsilon) ** 2) /
409-
((alpha ** 2) * (fxx + epsilon) * (fyy + epsilon)))
402+
((alpha ** 2) * (fxx + epsilon) * (fyy + epsilon)))
410403

411404

412405
def coherency_bavg(time_series, lb=0, ub=None, csd_method=None):
@@ -509,7 +502,6 @@ def _coherency_bavg(fxy, fxx, fyy):
509502
temporal dynamics of functional networks using phase spectrum of fMRI
510503
data. Neuroimage, 28: 227-37.
511504
"""
512-
513505
# Average the phases and the magnitudes separately and then recombine:
514506

515507
p = np.angle(fxy)
@@ -519,7 +511,7 @@ def _coherency_bavg(fxy, fxx, fyy):
519511
m_bavg = np.mean(m)
520512

521513
# Recombine according to z = r(cos(phi)+sin(phi)i):
522-
return m_bavg * (np.cos(p_bavg) + np.sin(p_bavg) * 1j)
514+
return m_bavg * (np.cos(p_bavg) + np.sin(p_bavg) * 1j)
523515

524516

525517
def coherence_bavg(time_series, lb=0, ub=None, csd_method=None):
@@ -546,7 +538,6 @@ def coherence_bavg(time_series, lb=0, ub=None, csd_method=None):
546538
This is an upper-diagonal array, where c[i][j] is the band-averaged
547539
coherency between time_series[i] and time_series[j]
548540
"""
549-
550541
if csd_method is None:
551542
csd_method = {'this_method': 'welch'} # The default
552543

@@ -575,7 +566,8 @@ def coherence_bavg(time_series, lb=0, ub=None, csd_method=None):
575566
def _coherence_bavg(fxy, fxx, fyy):
576567
r"""
577568
Compute the band-averaged coherency between the spectra of two time series.
578-
input to this function is in the frequency domain
569+
570+
Input to this function is in the frequency domain
579571
580572
Parameters
581573
----------
@@ -649,7 +641,6 @@ def coherence_partial(time_series, r, csd_method=None):
649641
functional connectivity using coherence and partial coherence analyses of
650642
fMRI data Neuroimage, 21: 647-58.
651643
"""
652-
653644
if csd_method is None:
654645
csd_method = {'this_method': 'welch'} # The default
655646

@@ -664,8 +655,12 @@ def coherence_partial(time_series, r, csd_method=None):
664655
for j in range(i, time_series.shape[0]):
665656
f, fxx, frr, frx = get_spectra_bi(time_series[i], r, csd_method)
666657
f, fyy, frr, fry = get_spectra_bi(time_series[j], r, csd_method)
667-
c[i, j] = coherence_partial_spec(fxy[i][j], fxy[i][i],
668-
fxy[j][j], frx, fry, frr)
658+
c[i, j] = coherence_partial_spec(fxy[i][j],
659+
fxy[i][i],
660+
fxy[j][j],
661+
frx,
662+
fry,
663+
frr)
669664

670665
idx = tril_indices(time_series.shape[0], -1)
671666
c[idx[0], idx[1], ...] = c[idx[1], idx[0], ...].conj() # Make it symmetric
@@ -702,7 +697,7 @@ def coherence_partial_spec(fxy, fxx, fyy, fxr, fry, frr):
702697
Rxy = coh(fxy, fxx, fyy)
703698

704699
return (((np.abs(Rxy - Rxr * Rry)) ** 2) /
705-
((1 - ((np.abs(Rxr)) ** 2)) * (1 - ((np.abs(Rry)) ** 2))))
700+
((1 - ((np.abs(Rxr)) ** 2)) * (1 - ((np.abs(Rry)) ** 2))))
706701

707702

708703
def coherency_phase_spectrum(time_series, csd_method=None):
@@ -799,8 +794,9 @@ def coherency_phase_delay(time_series, lb=0, ub=None, csd_method=None):
799794
for j in range(i, time_series.shape[0]):
800795
p[i][j] = _coherency_phase_delay(f[lb_idx:ub_idx],
801796
fxy[i][j][lb_idx:ub_idx])
802-
p[j][i] = _coherency_phase_delay(f[lb_idx:ub_idx],
803-
fxy[i][j][lb_idx:ub_idx].conjugate())
797+
p[j][i] = _coherency_phase_delay(
798+
f[lb_idx:ub_idx],
799+
fxy[i][j][lb_idx:ub_idx].conjugate())
804800

805801
return f[lb_idx:ub_idx], p
806802

@@ -826,7 +822,6 @@ def _coherency_phase_delay(f, fxy):
826822
the phase delay (in sec) for each frequency band.
827823
828824
"""
829-
830825
return np.angle(fxy) / (2 * np.pi * f)
831826

832827

@@ -861,9 +856,7 @@ def correlation_spectrum(x1, x2, Fs=2 * np.pi, norm=False):
861856
J Wendt, P A Turski, C H Moritz, M A Quigley, M E Meyerand (2000). Mapping
862857
functionally related regions of brain with functional connectivity MR
863858
imaging. AJNR American journal of neuroradiology 21:1636-44
864-
865859
"""
866-
867860
x1 = x1 - np.mean(x1)
868861
x2 = x2 - np.mean(x2)
869862
x1_f = fftpack.fft(x1)
@@ -876,18 +869,18 @@ def correlation_spectrum(x1, x2, Fs=2 * np.pi, norm=False):
876869
(D * n))
877870

878871
if norm:
879-
ccn = ccn / np.sum(ccn) * 2 # Only half of the sum is sent back
880-
# because of the freq domain symmetry.
881-
# XXX Does normalization make this
882-
# strictly positive?
872+
# Only half of the sum is sent back because of the freq domain
873+
# symmetry.
874+
ccn = ccn / np.sum(ccn) * 2
875+
# XXX Does normalization make this strictly positive?
883876

884877
f = utils.get_freqs(Fs, n)
885878
return f, ccn[0:(n // 2 + 1)]
886879

887880

888-
#------------------------------------------------------------------------
889-
#Coherency calculated using cached spectra
890-
#------------------------------------------------------------------------
881+
# -----------------------------------------------------------------------
882+
# Coherency calculated using cached spectra
883+
# -----------------------------------------------------------------------
891884
"""The idea behind this set of functions is to keep a cache of the windowed fft
892885
calculations of each time-series in a massive collection of time-series, so
893886
that this calculation doesn't have to be repeated each time a cross-spectrum is
@@ -898,8 +891,8 @@ def correlation_spectrum(x1, x2, Fs=2 * np.pi, norm=False):
898891

899892

900893
def cache_fft(time_series, ij, lb=0, ub=None,
901-
method=None, prefer_speed_over_memory=False,
902-
scale_by_freq=True):
894+
method=None, prefer_speed_over_memory=False,
895+
scale_by_freq=True):
903896
"""compute and cache the windowed FFTs of the time_series, in such a way
904897
that computing the psd and csd of any combination of them can be done
905898
quickly.
@@ -957,7 +950,7 @@ def cache_fft(time_series, ij, lb=0, ub=None,
957950
raise ValueError(e_s)
958951
time_series = utils.zero_pad(time_series, NFFT)
959952

960-
#The shape of the zero-padded version:
953+
# The shape of the zero-padded version:
961954
n_channels, n_time_points = time_series.shape
962955

963956
# get all the unique channels in time_series that we are interested in by
@@ -973,30 +966,30 @@ def cache_fft(time_series, ij, lb=0, ub=None,
973966
else:
974967
n_freqs = NFFT // 2 + 1
975968

976-
#Which frequencies
969+
# Which frequencies
977970
freqs = utils.get_freqs(Fs, NFFT)
978971

979-
#If there are bounds, limit the calculation to within that band,
980-
#potentially include the DC component:
972+
# If there are bounds, limit the calculation to within that band,
973+
# potentially include the DC component:
981974
lb_idx, ub_idx = utils.get_bounds(freqs, lb, ub)
982975

983976
n_freqs = ub_idx - lb_idx
984-
#Make the window:
977+
# Make the window:
985978
if mlab.cbook.iterable(window):
986979
assert(len(window) == NFFT)
987980
window_vals = window
988981
else:
989982
window_vals = window(np.ones(NFFT, time_series.dtype))
990983

991-
#Each fft needs to be normalized by the square of the norm of the window
992-
#and, for consistency with newer versions of mlab.csd (which, in turn, are
993-
#consistent with Matlab), normalize also by the sampling rate:
984+
# Each fft needs to be normalized by the square of the norm of the window
985+
# and, for consistency with newer versions of mlab.csd (which, in turn, are
986+
# consistent with Matlab), normalize also by the sampling rate:
994987

995988
if scale_by_freq:
996-
#This is the normalization factor for one-sided estimation, taking into
997-
#account the sampling rate. This makes the PSD a density function, with
998-
#units of dB/Hz, so that integrating over frequencies gives you the RMS
999-
#(XXX this should be in the tests!).
989+
# This is the normalization factor for one-sided estimation, taking
990+
# into account the sampling rate. This makes the PSD a density
991+
# function, with units of dB/Hz, so that integrating over
992+
# frequencies gives you the RMS. (XXX this should be in the tests!).
1000993
norm_val = (np.abs(window_vals) ** 2).sum() * (Fs / 2)
1001994

1002995
else:
@@ -1012,16 +1005,14 @@ def cache_fft(time_series, ij, lb=0, ub=None,
10121005
FFT_conj_slices = {}
10131006

10141007
for i_channel in all_channels:
1015-
#dbg:
1016-
#print i_channel
10171008
Slices = np.zeros((n_slices, n_freqs), dtype=np.complex)
10181009
for iSlice in range(n_slices):
10191010
thisSlice = time_series[i_channel,
10201011
i_times[iSlice]:i_times[iSlice] + NFFT]
10211012

1022-
#Windowing:
1013+
# Windowing:
10231014
thisSlice = window_vals * thisSlice # No detrending
1024-
#Derive the fft for that slice:
1015+
# Derive the fft for that slice:
10251016
Slices[iSlice, :] = (fftpack.fft(thisSlice)[lb_idx:ub_idx])
10261017

10271018
FFT_slices[i_channel] = Slices
@@ -1068,15 +1059,13 @@ def cache_to_psd(cache, ij):
10681059
all_channels.add(j)
10691060

10701061
for i in all_channels:
1071-
#dbg:
1072-
#print i
1073-
#If we made the conjugate slices:
1062+
# If we made the conjugate slices:
10741063
if FFT_conj_slices:
10751064
Pxx[i] = FFT_slices[i] * FFT_conj_slices[i]
10761065
else:
10771066
Pxx[i] = FFT_slices[i] * np.conjugate(FFT_slices[i])
10781067

1079-
#If there is more than one window
1068+
# If there is more than one window
10801069
if FFT_slices[i].shape[0] > 1:
10811070
Pxx[i] = np.mean(Pxx[i], 0)
10821071

@@ -1123,7 +1112,7 @@ def cache_to_phase(cache, ij):
11231112

11241113
for i in all_channels:
11251114
Phase[i] = np.angle(FFT_slices[i])
1126-
#If there is more than one window, average over all the windows:
1115+
# If there is more than one window, average over all the windows:
11271116
if FFT_slices[i].shape[0] > 1:
11281117
Phase[i] = np.mean(Phase[i], 0)
11291118

@@ -1171,10 +1160,10 @@ def cache_to_relative_phase(cache, ij):
11711160

11721161
channels_i = max(1, max(ij_array[:, 0]) + 1)
11731162
channels_j = max(1, max(ij_array[:, 1]) + 1)
1174-
#Pre-allocate for speed:
1163+
# Pre-allocate for speed:
11751164
Phi_xy = np.zeros((channels_i, channels_j, freqs), dtype=np.complex)
11761165

1177-
#These checks take time, so do them up front, not in every iteration:
1166+
# These checks take time, so do them up front, not in every iteration:
11781167
if list(FFT_slices.items())[0][1].shape[0] > 1:
11791168
if FFT_conj_slices:
11801169
for i, j in ij:

0 commit comments

Comments
 (0)