Skip to content

Commit f82cf5d

Browse files
committed
Merge branch 'release/2.13.0'
2 parents 810282c + be1e43f commit f82cf5d

File tree

17 files changed

+151
-67
lines changed

17 files changed

+151
-67
lines changed

brainbox/io/one.py

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -334,7 +334,6 @@ def _load_channel_locations_traj(eid, probe=None, one=None, revision=None, align
334334
# get the channels from histology tracing
335335
xyz = xyz[np.argsort(xyz[:, 2]), :]
336336
chans = histology.interpolate_along_track(xyz, (depths + TIP_SIZE_UM) / 1e6)
337-
338337
channels[probe] = _channels_traj2bunch(chans, brain_atlas)
339338
source = 'traced'
340339
channels[probe]['axial_um'] = chn_coords[:, 1]
@@ -894,6 +893,7 @@ class SpikeSortingLoader:
894893
collection: str = ''
895894
histology: str = '' # 'alf', 'resolved', 'aligned' or 'traced'
896895
spike_sorting_path: Path = None
896+
_sync: dict = None
897897

898898
def __post_init__(self):
899899
# pid gets precedence
@@ -1039,3 +1039,20 @@ def url(self):
10391039
"""Gets flatiron URL for the session"""
10401040
webclient = getattr(self.one, '_web_client', None)
10411041
return webclient.rel_path2url(get_alf_path(self.session_path)) if webclient else None
1042+
1043+
def samples2times(self, values, direction='forward'):
1044+
"""
1045+
:param values: numpy array of times in seconds or samples to resync
1046+
:param direction: 'forward' (samples probe time to seconds main time) or 'reverse'
1047+
(seconds main time to samples probe time)
1048+
:return:
1049+
"""
1050+
if self._sync is None:
1051+
timestamps = self.one.load_dataset(
1052+
self.eid, dataset='_spikeglx_*.timestamps.npy', collection=f'raw_ephys_data/{self.pname}')
1053+
self._sync = {
1054+
'timestamps': timestamps,
1055+
'forward': interp1d(timestamps[:, 0], timestamps[:, 1], fill_value='extrapolate'),
1056+
'reverse': interp1d(timestamps[:, 1], timestamps[:, 0], fill_value='extrapolate'),
1057+
}
1058+
return self._sync[direction](values)

ibllib/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "2.12.2"
1+
__version__ = "2.13.0"
22
import warnings
33

44
from ibllib.misc import logger_config

ibllib/atlas/atlas.py

Lines changed: 59 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -110,22 +110,55 @@ def _round(i, round=True):
110110
else:
111111
return i
112112

113-
def x2i(self, x, round=True):
114-
return self._round((x - self.x0) / self.dx, round=round)
115-
116-
def y2i(self, y, round=True):
117-
return self._round((y - self.y0) / self.dy, round=round)
118-
119-
def z2i(self, z, round=True):
120-
return self._round((z - self.z0) / self.dz, round=round)
113+
def x2i(self, x, round=True, mode='raise'):
114+
i = np.asarray(self._round((x - self.x0) / self.dx, round=round))
115+
if np.any(i < 0) or np.any(i >= self.nx):
116+
if mode == 'clip':
117+
i[i < 0] = 0
118+
i[i >= self.nx] = self.nx - 1
119+
elif mode == 'raise':
120+
raise ValueError("At least one x value lies outside of the atlas volume.")
121+
elif mode == 'wrap':
122+
pass
123+
return i
124+
125+
def y2i(self, y, round=True, mode='raise'):
126+
i = np.asarray(self._round((y - self.y0) / self.dy, round=round))
127+
if np.any(i < 0) or np.any(i >= self.ny):
128+
if mode == 'clip':
129+
i[i < 0] = 0
130+
i[i >= self.ny] = self.ny - 1
131+
elif mode == 'raise':
132+
raise ValueError("At least one y value lies outside of the atlas volume.")
133+
elif mode == 'wrap':
134+
pass
135+
return i
136+
137+
def z2i(self, z, round=True, mode='raise'):
138+
i = np.asarray(self._round((z - self.z0) / self.dz, round=round))
139+
if np.any(i < 0) or np.any(i >= self.nz):
140+
if mode == 'clip':
141+
i[i < 0] = 0
142+
i[i >= self.nz] = self.nz - 1
143+
elif mode == 'raise':
144+
raise ValueError("At least one z value lies outside of the atlas volume.")
145+
elif mode == 'wrap':
146+
pass
147+
return i
121148

122-
def xyz2i(self, xyz, round=True):
149+
def xyz2i(self, xyz, round=True, mode='raise'):
150+
"""
151+
:param mode: {‘raise’, 'clip', 'wrap'} determines what to do when determined index lies outside the atlas volume
152+
'raise' will raise a ValueError
153+
'clip' will replace the index with the closest index inside the volume
154+
'wrap' will wrap around to the other side of the volume. This is only here for legacy reasons
155+
"""
123156
xyz = np.array(xyz)
124157
dt = int if round else float
125158
out = np.zeros_like(xyz, dtype=dt)
126-
out[..., 0] = self.x2i(xyz[..., 0], round=round)
127-
out[..., 1] = self.y2i(xyz[..., 1], round=round)
128-
out[..., 2] = self.z2i(xyz[..., 2], round=round)
159+
out[..., 0] = self.x2i(xyz[..., 0], round=round, mode=mode)
160+
out[..., 1] = self.y2i(xyz[..., 1], round=round, mode=mode)
161+
out[..., 2] = self.z2i(xyz[..., 2], round=round, mode=mode)
129162
return out
130163

131164
"""Methods indices to distance"""
@@ -227,7 +260,10 @@ def _get_cache_dir():
227260
def compute_surface(self):
228261
"""
229262
Get the volume top, bottom, left and right surfaces, and from these the outer surface of
230-
the image volume. This is needed to compute probe insertions intersections
263+
the image volume. This is needed to compute probe insertions intersections.
264+
265+
NOTE: In places where the top or bottom surface touch the top or bottom of the atlas volume, the surface
266+
will be set to np.nan. If you encounter issues working with these surfaces check if this might be the cause.
231267
"""
232268
if self.surface is None: # only compute if it hasn't already been computed
233269
axz = self.xyz2dims[2] # this is the dv axis
@@ -439,7 +475,12 @@ def slice(self, coordinate, axis, volume='image', mode='raise', region_values=No
439475
:param mapping: mapping to use. Options can be found using ba.regions.mappings.keys()
440476
:return: 2d array or 3d RGB numpy int8 array
441477
"""
442-
index = self.bc.xyz2i(np.array([coordinate] * 3))[axis]
478+
if axis == 0:
479+
index = self.bc.x2i(np.array(coordinate), mode=mode)
480+
elif axis == 1:
481+
index = self.bc.y2i(np.array(coordinate), mode=mode)
482+
elif axis == 2:
483+
index = self.bc.z2i(np.array(coordinate), mode=mode)
443484

444485
# np.take is 50 thousand times slower than straight slicing !
445486
def _take(vol, ind, axis):
@@ -765,7 +806,10 @@ def from_dict(d, brain_atlas=None):
765806
if brain_atlas:
766807
iy = brain_atlas.bc.y2i(d['y'] / 1e6)
767808
ix = brain_atlas.bc.x2i(d['x'] / 1e6)
768-
z = brain_atlas.top[iy, ix]
809+
# Only use the brain surface value as z if it isn't NaN (this happens when the surface touches the edges
810+
# of the atlas volume
811+
if not np.isnan(brain_atlas.top[iy, ix]):
812+
z = brain_atlas.top[iy, ix]
769813
return Insertion(x=d['x'] / 1e6, y=d['y'] / 1e6, z=z,
770814
phi=d['phi'], theta=d['theta'], depth=d['depth'] / 1e6,
771815
beta=d.get('beta', 0), label=d.get('label', ''))

ibllib/ephys/neuropixel.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,4 @@
88
', change your imports to neuropixel !', DeprecationWarning)
99

1010
from neuropixel import * # noqa
11+
from neuropixel import SITES_COORDINATES # noqa

ibllib/io/extractors/biased_trials.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
StimOnTimes_deprecated, StimOnTriggerTimes, StimOnOffFreezeTimes, ItiInTimes,
1313
StimOffTriggerTimes, StimFreezeTriggerTimes, ErrorCueTriggerTimes, PhasePosQuiescence)
1414
from ibllib.io.extractors.training_wheel import Wheel
15-
from ibllib.misc import version
1615

1716

1817
class ContrastLR(BaseBpodTrialsExtractor):
@@ -163,7 +162,7 @@ def extract_all(session_path, save=False, bpod_trials=False, settings=False, ext
163162

164163
base = [GoCueTriggerTimes]
165164
# Version check
166-
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
165+
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
167166
# We now extract a single trials table
168167
base.extend([
169168
StimOnTriggerTimes, ItiInTimes, StimOffTriggerTimes, StimFreezeTriggerTimes, ErrorCueTriggerTimes,

ibllib/io/extractors/bpod_trials.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55
import logging
66
from collections import OrderedDict
77

8+
from pkg_resources import parse_version
89
from ibllib.io.extractors import habituation_trials, training_trials, biased_trials, opto_trials
910
import ibllib.io.extractors.base
1011
import ibllib.io.raw_data_loaders as rawio
11-
from ibllib.misc import version
1212

1313
_logger = logging.getLogger('ibllib')
1414

@@ -54,7 +54,8 @@ def extract_all(session_path, save=True, bpod_trials=None, settings=None):
5454
files_wheel = []
5555
wheel = OrderedDict({k: trials.pop(k) for k in tuple(trials.keys()) if 'wheel' in k})
5656
elif extractor_type == 'habituation':
57-
if settings['IBLRIG_VERSION_TAG'] and version.le(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
57+
if settings['IBLRIG_VERSION_TAG'] and \
58+
parse_version(settings['IBLRIG_VERSION_TAG']) <= parse_version('5.0.0'):
5859
_logger.warning("No extraction of legacy habituation sessions")
5960
return None, None, None
6061
trials, files_trials = habituation_trials.extract_all(

ibllib/io/extractors/training_trials.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
import ibllib.io.raw_data_loaders as raw
77
from ibllib.io.extractors.base import BaseBpodTrialsExtractor, run_extractor_classes
88
from ibllib.io.extractors.training_wheel import Wheel
9-
from ibllib.misc import version
109

1110

1211
_logger = logging.getLogger('ibllib')
@@ -211,7 +210,7 @@ def get_feedback_times_ge5(session_path, data=False):
211210

212211
def _extract(self):
213212
# Version check
214-
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
213+
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
215214
merge = self.get_feedback_times_ge5(self.session_path, data=self.bpod_trials)
216215
else:
217216
merge = self.get_feedback_times_lt5(self.session_path, data=self.bpod_trials)
@@ -282,7 +281,7 @@ class GoCueTriggerTimes(BaseBpodTrialsExtractor):
282281
var_names = 'goCueTrigger_times'
283282

284283
def _extract(self):
285-
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
284+
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
286285
goCue = np.array([tr['behavior_data']['States timestamps']
287286
['play_tone'][0][0] for tr in self.bpod_trials])
288287
else:
@@ -356,7 +355,7 @@ class IncludedTrials(BaseBpodTrialsExtractor):
356355
var_names = 'included'
357356

358357
def _extract(self):
359-
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
358+
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
360359
trials_included = self.get_included_trials_ge5(
361360
data=self.bpod_trials, settings=self.settings)
362361
else:
@@ -513,7 +512,7 @@ def _extract(self):
513512
# Version check
514513
_logger.warning("Deprecation Warning: this is an old version of stimOn extraction."
515514
"From version 5., use StimOnOffFreezeTimes")
516-
if version.ge(self.settings['IBLRIG_VERSION_TAG'], '5.0.0'):
515+
if parse_version(self.settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
517516
stimOn_times = self.get_stimOn_times_ge5(self.session_path, data=self.bpod_trials)
518517
else:
519518
stimOn_times = self.get_stimOn_times_lt5(self.session_path, data=self.bpod_trials)
@@ -719,7 +718,7 @@ def extract_all(session_path, save=False, bpod_trials=None, settings=None):
719718

720719
base = [RepNum, GoCueTriggerTimes]
721720
# Version check
722-
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
721+
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
723722
# We now extract a single trials table
724723
base.extend([
725724
StimOnTriggerTimes, ItiInTimes, StimOffTriggerTimes, StimFreezeTriggerTimes,

ibllib/io/raw_data_loaders.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@
1515
from pathlib import Path
1616
from typing import Union
1717

18+
from pkg_resources import parse_version
1819
import numpy as np
1920
import pandas as pd
2021

2122
from iblutil.io import jsonable
2223
from ibllib.io.video import assert_valid_label
23-
from ibllib.misc import version
2424
from ibllib.time import uncycle_pgts, convert_pgts
2525

2626
_logger = logging.getLogger('ibllib')
@@ -374,7 +374,7 @@ def load_encoder_events(session_path, settings=False):
374374
settings = {'IBLRIG_VERSION_TAG': '0.0.0'}
375375
if not path:
376376
return None
377-
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
377+
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
378378
return _load_encoder_events_file_ge5(path)
379379
else:
380380
return _load_encoder_events_file_lt5(path)
@@ -479,7 +479,7 @@ def load_encoder_positions(session_path, settings=False):
479479
if not path:
480480
_logger.warning("No data loaded: could not find raw encoderPositions file")
481481
return None
482-
if version.ge(settings['IBLRIG_VERSION_TAG'], '5.0.0'):
482+
if parse_version(settings['IBLRIG_VERSION_TAG']) >= parse_version('5.0.0'):
483483
return _load_encoder_positions_file_ge5(path)
484484
else:
485485
return _load_encoder_positions_file_lt5(path)

ibllib/misc/version.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,15 @@
11
import pkg_resources
2+
import traceback
3+
import warnings
4+
5+
for line in traceback.format_stack():
6+
print(line.strip())
7+
8+
warnings.warn(
9+
'ibllib.version is deprecated and functionality will be removed! '
10+
'use pkg_resources.parse_version and ibllib.__version__ instead. See stack above.',
11+
DeprecationWarning
12+
)
213

314

415
def _compare_version_tag(v1, v2, fcn):

ibllib/oneibl/patcher.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@
1212
from one.alf.spec import is_uuid_string
1313
from one import params
1414
from one.converters import path_from_dataset
15+
from one.remote import globus
1516

16-
from ibllib.io import globus
1717
from ibllib.oneibl.registration import register_dataset
1818

1919
_logger = logging.getLogger('ibllib')
@@ -97,7 +97,7 @@ def _patch_dataset(self, path, dset_id=None, dry=False, ftp=False):
9797
full_remote_path = PurePosixPath(FLATIRON_MOUNT, remote_path)
9898
if isinstance(path, WindowsPath) and not ftp:
9999
# On Windows replace drive map with Globus uri, e.g. C:/ -> /~/C/
100-
path = '/~/' + path.as_posix().replace(':', '')
100+
path = globus.as_globus_path(path)
101101
status = self._scp(path, full_remote_path, dry=dry)[0]
102102
return status
103103

@@ -140,8 +140,8 @@ def patch_dataset(self, file_list, dry=False, ftp=False, **kwargs):
140140
Rules for creation/patching are the same that apply for registration via Alyx
141141
as this uses the registration endpoint to get the dataset.
142142
An existing file (same session and path relative to session) will be patched.
143-
:param path: full file path. Must be whithin an ALF session folder (subject/date/number)
144-
can also be a list of full file pathes belonging to the same session.
143+
:param path: full file path. Must be within an ALF session folder (subject/date/number)
144+
can also be a list of full file paths belonging to the same session.
145145
:param server_repository: Alyx server repository name
146146
:param created_by: alyx username for the dataset (optional, defaults to root)
147147
:param ftp: flag for case when using ftppatcher. Don't adjust windows path in
@@ -197,13 +197,12 @@ class GlobusPatcher(Patcher):
197197
198198
"""
199199

200-
def __init__(self, one=None, globus_client_id=None, local_endpoint=None, label='ibllib patch'):
201-
assert globus_client_id
200+
def __init__(self, client_name='default', one=None, label='ibllib patch'):
202201
assert one
203-
self.local_endpoint = local_endpoint or globus.get_local_endpoint()
202+
self.local_endpoint = getattr(globus.load_client_params(f'globus.{client_name}'),
203+
'local_endpoint', globus.get_local_endpoint_id())
204+
self.transfer_client = globus.create_globus_client(client_name)
204205
self.label = label
205-
self.transfer_client = globus.login_auto(
206-
globus_client_id=globus_client_id, str_app='globus/admin')
207206
# transfers/delete from the current computer to the flatiron: mandatory and executed first
208207
self.globus_transfer = globus_sdk.TransferData(
209208
self.transfer_client, self.local_endpoint, FLAT_IRON_GLOBUS_ID, verify_checksum=True,
@@ -296,11 +295,11 @@ def _wait_for_task(resp):
296295
# on an errored task
297296
# Out[10]: TransferResponse({'bytes_checksummed': 0, 'bytes_transferred': 0, 'canceled_by_admin': None, 'canceled_by_admin_message': None, 'command': 'API 0.10', 'completion_time': '2021-01-03T17:39:00+00:00', 'deadline': '2021-01-04T17:37:34+00:00', 'delete_destination_extra': False, 'destination_endpoint': 'simonsfoundation#ibl', 'destination_endpoint_display_name': 'IBL Flatiron SDSC Data', 'destination_endpoint_id': 'ab2d064c-413d-11eb-b188-0ee0d5d9299f', 'directories': 0, 'effective_bytes_per_second': 0, 'encrypt_data': False, 'fatal_error': {'code': 'CANCELED', 'description': 'canceled'}, 'faults': 2, 'files': 6, 'files_skipped': 0, 'files_transferred': 0, 'history_deleted': False, 'is_ok': None, 'is_paused': False, 'key': 'complete,2021-01-03T17:38:59.697413', 'label': 'test 3B analog sync patch', 'nice_status': None, 'nice_status_details': None, 'nice_status_expires_in': None, 'nice_status_short_description': None, 'owner_id': 'e633663a-8561-4a5d-ac92-f198d43b14dc', 'preserve_timestamp': False, 'recursive_symlinks': 'ignore', 'request_time': '2021-01-03T17:37:34+00:00', 'source_endpoint': 'internationalbrainlab#916c2766-bd2a-11ea-8f22-0a21f750d19b', 'source_endpoint_display_name': 'olivier_laptop', 'source_endpoint_id': '916c2766-bd2a-11ea-8f22-0a21f750d19b', 'status': 'FAILED', 'subtasks_canceled': 6, 'subtasks_expired': 0, 'subtasks_failed': 0, 'subtasks_pending': 0, 'subtasks_retrying': 0, 'subtasks_succeeded': 6, 'subtasks_total': 12, 'symlinks': 0, 'sync_level': 3, 'task_id': '5706dd2c-4dea-11eb-8ffb-0a34088e79f9', 'type': 'TRANSFER', 'username': 'internationalbrainlab', 'verify_checksum': True}) # noqa
298297
while True:
299-
tinfo = gtc.get_task(task_id=resp['task_id'])['completion_time']
300-
if tinfo['completion_time'] is not None:
298+
tinfo = gtc.get_task(task_id=resp['task_id'])
299+
if tinfo and tinfo['completion_time'] is not None:
301300
break
302301
_ = gtc.task_wait(task_id=resp['task_id'], timeout=30)
303-
if tinfo['fatal_error'] is not None:
302+
if tinfo and tinfo['fatal_error'] is not None:
304303
raise ConnectionError(f"Globus transfer failed \n {tinfo}")
305304

306305
# handles the transfers first

0 commit comments

Comments
 (0)