Skip to content

Commit cab64bd

Browse files
committed
Merge branch 'release/2.4.0'
2 parents e8415f6 + 76eaa36 commit cab64bd

File tree

18 files changed

+567
-115
lines changed

18 files changed

+567
-115
lines changed

brainbox/behavior/dlc.py

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44
import numpy as np
55
import scipy.interpolate as interpolate
66
import logging
7+
import warnings
78
from one.api import ONE
9+
from ibllib.dsp.smooth import smooth_interpolate_savgol
810

911
logger = logging.getLogger('ibllib')
1012

@@ -141,3 +143,74 @@ def get_dlc_everything(dlc_cam, camera):
141143
dlc_cam['aligned'] = aligned
142144

143145
return dlc_cam
146+
147+
148+
def get_pupil_diameter(dlc):
149+
"""
150+
Estimates pupil diameter by taking median of different computations.
151+
152+
The two most straightforward estimates: d1 = top - bottom, d2 = left - right
153+
In addition, assume the pupil is a circle and estimate diameter from other pairs of points
154+
155+
:param dlc: dlc pqt table with pupil estimates, should be likelihood thresholded (e.g. at 0.9)
156+
:return: np.array, pupil diameter estimate for each time point, shape (n_frames,)
157+
"""
158+
diameters = []
159+
# Get the x,y coordinates of the four pupil points
160+
top, bottom, left, right = [np.vstack((dlc[f'pupil_{point}_r_x'], dlc[f'pupil_{point}_r_y']))
161+
for point in ['top', 'bottom', 'left', 'right']]
162+
# First compute direct diameters
163+
diameters.append(np.linalg.norm(top - bottom, axis=0))
164+
diameters.append(np.linalg.norm(left - right, axis=0))
165+
166+
# For non-crossing edges, estimate diameter via circle assumption
167+
for pair in [(top, left), (top, right), (bottom, left), (bottom, right)]:
168+
diameters.append(np.linalg.norm(pair[0] - pair[1], axis=0) * 2 ** 0.5)
169+
170+
# Ignore all nan runtime warning
171+
with warnings.catch_warnings():
172+
warnings.simplefilter("ignore", category=RuntimeWarning)
173+
return np.nanmedian(diameters, axis=0)
174+
175+
176+
def get_smooth_pupil_diameter(diameter_raw, camera, std_thresh=5, nan_thresh=1):
177+
"""
178+
:param diameter_raw: np.array, raw pupil diameters, calculated from (thresholded) dlc traces
179+
:param camera: str ('left', 'right'), which camera to run the smoothing for
180+
:param std_thresh: threshold (in standard deviations) beyond which a point is labeled as an outlier
181+
:param nan_thresh: threshold (in seconds) above which we will not interpolate nans, but keep them
182+
(for long stretches interpolation may not be appropriate)
183+
:return:
184+
"""
185+
# set framerate of camera
186+
if camera == 'left':
187+
fr = 60 # set by hardware
188+
window = 31 # works well empirically
189+
elif camera == 'right':
190+
fr = 150 # set by hardware
191+
window = 75 # works well empirically
192+
else:
193+
raise NotImplementedError("camera has to be 'left' or 'right")
194+
195+
# run savitzy-golay filter on non-nan time points to denoise
196+
diameter_smoothed = smooth_interpolate_savgol(diameter_raw, window=window, order=3, interp_kind='linear')
197+
198+
# find outliers and set them to nan
199+
difference = diameter_raw - diameter_smoothed
200+
outlier_thresh = std_thresh * np.nanstd(difference)
201+
without_outliers = np.copy(diameter_raw)
202+
without_outliers[(difference < -outlier_thresh) | (difference > outlier_thresh)] = np.nan
203+
# run savitzy-golay filter again on (possibly reduced) non-nan timepoints to denoise
204+
diameter_smoothed = smooth_interpolate_savgol(without_outliers, window=window, order=3, interp_kind='linear')
205+
206+
# don't interpolate long strings of nans
207+
t = np.diff(np.isnan(without_outliers).astype(int))
208+
begs = np.where(t == 1)[0]
209+
ends = np.where(t == -1)[0]
210+
if begs.shape[0] > ends.shape[0]:
211+
begs = begs[:ends.shape[0]]
212+
for b, e in zip(begs, ends):
213+
if (e - b) > (fr * nan_thresh):
214+
diameter_smoothed[(b + 1):(e + 1)] = np.nan # offset by 1 due to earlier diff
215+
216+
return diameter_smoothed

brainbox/ephys_plots.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -404,6 +404,8 @@ def plot_brain_regions(channel_ids, channel_depths=None, brain_regions=None, dis
404404
if display:
405405
if ax is None:
406406
fig, ax = plt.subplots()
407+
else:
408+
fig = ax.get_figure()
407409

408410
for reg, col in zip(regions, region_colours):
409411
height = np.abs(reg[1] - reg[0])
@@ -423,7 +425,7 @@ def plot_brain_regions(channel_ids, channel_depths=None, brain_regions=None, dis
423425

424426

425427
def plot_cdf(spike_amps, spike_depths, spike_times, n_amp_bins=10, d_bin=40, amp_range=None, d_range=None,
426-
display=False, cmap='hot'):
428+
display=False, cmap='hot', ax=None):
427429
"""
428430
Plot cumulative amplitude of spikes across depth
429431
:param spike_amps:
@@ -466,7 +468,7 @@ def histc(x, bins):
466468
ylabel='Distance from probe tip (um)', clabel='Firing Rate (Hz)')
467469

468470
if display:
469-
fig, ax = plot_image(data.convert2dict(), fig_kwargs={'figsize': [3, 7]})
471+
fig, ax = plot_image(data.convert2dict(), fig_kwargs={'figsize': [3, 7]}, ax=ax)
470472
return data.convert2dict(), fig, ax
471473

472474
return data

brainbox/io/one.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -331,6 +331,9 @@ def _load_channel_locations_traj(eid, probe=None, one=None, revision=None, align
331331

332332
channels[probe] = _channels_traj2bunch(chans, brain_atlas)
333333

334+
channels[probe]['axial_um'] = chn_coords[:, 1]
335+
channels[probe]['lateral_um'] = chn_coords[:, 0]
336+
334337
else:
335338
_logger.warning(f'Histology tracing for {probe} does not exist. '
336339
f'No channels for {probe}')

examples/one/histology/docs_find_nearby_trajectories.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
import ibllib.atlas as atlas
1616
from atlaselectrophysiology import rendering
1717

18-
mlab.init_notebook()
1918
# Instantiate brain atlas and one
2019
brain_atlas = atlas.AllenAtlas(25)
2120
one = ONE(base_url='https://openalyx.internationalbrainlab.org')

examples/one/histology/docs_visualize_session_coronal_tilted.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,4 +29,4 @@
2929

3030
cax = ba.plot_tilted_slice(xyz=picks, axis=1, volume='image')
3131
cax.plot(picks[:, 0] * 1e6, picks[:, 2] * 1e6)
32-
cax.plot(channels[probe_label].x * 1e6, channels[probe_label].z * 1e6, 'g*')
32+
cax.plot(channels[probe_label]['x'] * 1e6, channels[probe_label]['z'] * 1e6, 'g*')

ibllib/atlas/cosmos.npy

-40 Bytes
Binary file not shown.

ibllib/dsp/smooth.py

Lines changed: 136 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import numpy as np
22
import matplotlib.pyplot as plt
3+
from scipy.interpolate import interp1d
34

45
import ibllib.dsp.fourier as ft
56

@@ -77,6 +78,141 @@ def rolling_window(x, window_len=11, window='blackman'):
7778
return y[round((window_len / 2 - 1)):round(-(window_len / 2))]
7879

7980

81+
def non_uniform_savgol(x, y, window, polynom):
82+
"""Applies a Savitzky-Golay filter to y with non-uniform spacing as defined in x.
83+
This is based on
84+
https://dsp.stackexchange.com/questions/1676/savitzky-golay-smoothing-filter-for-not-equally-spaced-data
85+
The borders are interpolated like scipy.signal.savgol_filter would do
86+
https://dsp.stackexchange.com/a/64313
87+
Parameters
88+
----------
89+
x : array_like
90+
List of floats representing the x values of the data
91+
y : array_like
92+
List of floats representing the y values. Must have same length as x
93+
window : int (odd)
94+
Window length of datapoints. Must be odd and smaller than x
95+
polynom : int
96+
The order of polynom used. Must be smaller than the window size
97+
Returns
98+
-------
99+
np.array
100+
The smoothed y values
101+
"""
102+
103+
if len(x) != len(y):
104+
raise ValueError('"x" and "y" must be of the same size')
105+
if len(x) < window:
106+
raise ValueError('The data size must be larger than the window size')
107+
if type(window) is not int:
108+
raise TypeError('"window" must be an integer')
109+
if window % 2 == 0:
110+
raise ValueError('The "window" must be an odd integer')
111+
if type(polynom) is not int:
112+
raise TypeError('"polynom" must be an integer')
113+
if polynom >= window:
114+
raise ValueError('"polynom" must be less than "window"')
115+
116+
half_window = window // 2
117+
polynom += 1
118+
119+
# Initialize variables
120+
A = np.empty((window, polynom)) # Matrix
121+
tA = np.empty((polynom, window)) # Transposed matrix
122+
t = np.empty(window) # Local x variables
123+
y_smoothed = np.full(len(y), np.nan)
124+
125+
# Start smoothing
126+
for i in range(half_window, len(x) - half_window, 1):
127+
# Center a window of x values on x[i]
128+
for j in range(0, window, 1):
129+
t[j] = x[i + j - half_window] - x[i]
130+
131+
# Create the initial matrix A and its transposed form tA
132+
for j in range(0, window, 1):
133+
r = 1.0
134+
for k in range(0, polynom, 1):
135+
A[j, k] = r
136+
tA[k, j] = r
137+
r *= t[j]
138+
139+
# Multiply the two matrices
140+
tAA = np.matmul(tA, A)
141+
# Invert the product of the matrices
142+
tAA = np.linalg.inv(tAA)
143+
# Calculate the pseudoinverse of the design matrix
144+
coeffs = np.matmul(tAA, tA)
145+
# Calculate c0 which is also the y value for y[i]
146+
y_smoothed[i] = 0
147+
for j in range(0, window, 1):
148+
y_smoothed[i] += coeffs[0, j] * y[i + j - half_window]
149+
150+
# If at the end or beginning, store all coefficients for the polynom
151+
if i == half_window:
152+
first_coeffs = np.zeros(polynom)
153+
for j in range(0, window, 1):
154+
for k in range(polynom):
155+
first_coeffs[k] += coeffs[k, j] * y[j]
156+
elif i == len(x) - half_window - 1:
157+
last_coeffs = np.zeros(polynom)
158+
for j in range(0, window, 1):
159+
for k in range(polynom):
160+
last_coeffs[k] += coeffs[k, j] * y[len(y) - window + j]
161+
162+
# Interpolate the result at the left border
163+
for i in range(0, half_window, 1):
164+
y_smoothed[i] = 0
165+
x_i = 1
166+
for j in range(0, polynom, 1):
167+
y_smoothed[i] += first_coeffs[j] * x_i
168+
x_i *= x[i] - x[half_window]
169+
170+
# Interpolate the result at the right border
171+
for i in range(len(x) - half_window, len(x), 1):
172+
y_smoothed[i] = 0
173+
x_i = 1
174+
for j in range(0, polynom, 1):
175+
y_smoothed[i] += last_coeffs[j] * x_i
176+
x_i *= x[i] - x[-half_window - 1]
177+
178+
return y_smoothed
179+
180+
181+
def smooth_interpolate_savgol(signal, window=31, order=3, interp_kind='cubic'):
182+
"""Run savitzy-golay filter on signal, interpolate through nan points.
183+
184+
Parameters
185+
----------
186+
signal : np.ndarray
187+
original noisy signal of shape (t,), may contain nans
188+
window : int
189+
window of polynomial fit for savitzy-golay filter
190+
order : int
191+
order of polynomial for savitzy-golay filter
192+
interp_kind : str
193+
type of interpolation for nans, e.g. 'linear', 'quadratic', 'cubic'
194+
Returns
195+
-------
196+
np.array
197+
smoothed, interpolated signal for each time point, shape (t,)
198+
"""
199+
200+
signal_noisy_w_nans = np.copy(signal)
201+
timestamps = np.arange(signal_noisy_w_nans.shape[0])
202+
good_idxs = np.where(~np.isnan(signal_noisy_w_nans))[0]
203+
# perform savitzky-golay filtering on non-nan points
204+
signal_smooth_nonans = non_uniform_savgol(
205+
timestamps[good_idxs], signal_noisy_w_nans[good_idxs], window=window, polynom=order)
206+
signal_smooth_w_nans = np.copy(signal_noisy_w_nans)
207+
signal_smooth_w_nans[good_idxs] = signal_smooth_nonans
208+
# interpolate nan points
209+
interpolater = interp1d(
210+
timestamps[good_idxs], signal_smooth_nonans, kind=interp_kind, fill_value='extrapolate')
211+
signal = interpolater(timestamps)
212+
213+
return signal
214+
215+
80216
def smooth_demo():
81217

82218
t = np.linspace(-4, 4, 100)

ibllib/oneibl/registration.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -165,10 +165,16 @@ def create_sessions(self, root_data_folder, glob_pattern='**/create_me.flag', dr
165165
if dry:
166166
print(flag_file)
167167
continue
168-
_logger.info('creating session for ' + str(flag_file.parent))
169-
# providing a false flag stops the registration after session creation
170-
self.create_session(flag_file.parent)
171-
flag_file.unlink()
168+
try:
169+
_logger.info('creating session for ' + str(flag_file.parent))
170+
# providing a false flag stops the registration after session creation
171+
self.create_session(flag_file.parent)
172+
flag_file.unlink()
173+
except BaseException as e:
174+
_logger.error(f'Error creating session for {flag_file.parent}\n{e}')
175+
_logger.warning(f'Skipping {flag_file.parent}')
176+
continue
177+
172178
return [ff.parent for ff in flag_files]
173179

174180
def create_session(self, session_path):

0 commit comments

Comments
 (0)