Skip to content

Commit f4c02c8

Browse files
authored
Merge pull request #750 from int-brain-lab/protected_view
Protected view
2 parents 634e9db + a96efd4 commit f4c02c8

File tree

9 files changed

+71
-32
lines changed

9 files changed

+71
-32
lines changed

ibllib/oneibl/patcher.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def _patch_dataset(self, path, dset_id=None, revision=None, dry=False, ftp=False
115115
assert is_uuid_string(dset_id)
116116
# If the revision is not None then we need to add the revision into the path. Note the moving of the file
117117
# is handled by one registration client
118-
if revision is not None:
118+
if revision is not None and f'#{revision}' not in str(path):
119119
path = path.parent.joinpath(f'#{revision}#', path.name)
120120
assert path.exists()
121121
dset = self.one.alyx.rest('datasets', 'read', id=dset_id)

ibllib/oneibl/registration.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from one.converters import ConversionMixin
1313
import one.alf.exceptions as alferr
1414
from one.util import datasets2records, ensure_list
15+
from one.api import ONE
1516

1617
import ibllib
1718
import ibllib.io.extractors.base
@@ -78,6 +79,25 @@ def register_dataset(file_list, one=None, exists=False, versions=None, **kwargs)
7879
assert all(Path(f).exists() for f in file_list)
7980

8081
client = IBLRegistrationClient(one)
82+
83+
# Check for protected datasets
84+
# Account for cases where we are connected to cortex lab database
85+
if one.alyx.base_url == 'https://alyx.cortexlab.net':
86+
protected_status = IBLRegistrationClient(
87+
ONE(base_url='https://alyx.internationalbrainlab.org', mode='remote')).check_protected_files(file_list)
88+
else:
89+
protected_status = client.check_protected_files(file_list)
90+
91+
if isinstance(protected_status, list):
92+
protected = any(d['status_code'] == 403 for d in protected_status)
93+
else:
94+
protected = protected_status['status_code'] == 403
95+
96+
# If we find a protected dataset, and we don't have a force=True flag, raise an error
97+
if protected and not kwargs.pop('force', False):
98+
raise FileExistsError('Protected datasets were found in the file list. To force the registration of datasets '
99+
'add the force=True argument.')
100+
81101
# If the repository is specified then for the registration client we want server_only=True to
82102
# make sure we don't make any other repositories for the lab
83103
if kwargs.get('repository') and not kwargs.get('server_only', False):

ibllib/pipes/histology.py

Lines changed: 23 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -237,24 +237,22 @@ def register_chronic_track(chronic_id, picks=None, one=None, overwrite=False, ch
237237
:return:
238238
"""
239239
assert one
240-
brain_locations, insertion_histology = register_track(chronic_id, picks=picks, one=one, overwrite=overwrite,
241-
channels=channels, brain_atlas=brain_atlas,
242-
endpoint='chronic-insertions')
243-
244-
# Update all the associated probe insertions with the relevant QC and xyz_picks
245240
chronic = one.alyx.rest('chronic-insertions', 'list', id=chronic_id)[0]
246241
for probe_id in chronic['probe_insertion']:
247242
pid = probe_id['id']
248-
if picks is None or picks.size == 0:
249-
hist_qc = base.QC(pid, one=one, endpoint='insertions')
250-
hist_qc.update_extended_qc({'tracing_exists': False})
251-
hist_qc.update('CRITICAL', namespace='tracing')
252-
else:
253-
one.alyx.json_field_update(endpoint='insertions', uuid=pid, field_name='json',
254-
data={'xyz_picks': np.int32(picks * 1e6).tolist()})
255-
# Update the insertion qc to register tracing exits
256-
hist_qc = base.QC(pid, one=one, endpoint='insertions')
257-
hist_qc.update_extended_qc({'tracing_exists': True})
243+
brain_locations, insertion_histology = register_track(pid, picks=picks, one=one, overwrite=overwrite,
244+
channels=channels, brain_atlas=brain_atlas)
245+
246+
if picks is None or picks.size == 0:
247+
hist_qc = base.QC(chronic_id, one=one, endpoint='chronic-insertions')
248+
hist_qc.update_extended_qc({'tracing_exists': False})
249+
hist_qc.update('CRITICAL', namespace='tracing')
250+
else:
251+
one.alyx.json_field_update(endpoint='chronic-insertions', uuid=chronic_id, field_name='json',
252+
data={'xyz_picks': np.int32(picks * 1e6).tolist()})
253+
# Update the insertion qc to register tracing exits
254+
hist_qc = base.QC(chronic_id, one=one, endpoint='chronic-insertions')
255+
hist_qc.update_extended_qc({'tracing_exists': True})
258256

259257
return brain_locations, insertion_histology
260258

@@ -291,7 +289,14 @@ def register_track(probe_id, picks=None, one=None, overwrite=False, channels=Tru
291289
insertion_histology = None
292290
# Here need to change the track qc to critical and also extended qc to zero
293291
else:
294-
brain_locations, insertion_histology = get_brain_regions(picks, brain_atlas=brain_atlas)
292+
try:
293+
eid, pname = one.pid2eid(probe_id)
294+
chan_pos = one.load_dataset(eid, 'channels.localCoordinates.npy', collection=f'alf/{pname}/pykilosort')
295+
except Exception:
296+
chan_pos = None
297+
298+
brain_locations, insertion_histology = get_brain_regions(picks, channels_positions=chan_pos,
299+
brain_atlas=brain_atlas)
295300
# 1) update the alyx models, first put the picked points in the insertion json
296301
one.alyx.json_field_update(endpoint=endpoint, uuid=probe_id, field_name='json',
297302
data={'xyz_picks': np.int32(picks * 1e6).tolist()})
@@ -391,8 +396,10 @@ def create_trajectory_dict(probe_id, insertion, provenance, endpoint='insertions
391396
}
392397
if endpoint == 'chronic-insertions':
393398
tdict['chronic_insertion'] = probe_id
399+
tdict['probe_insertion'] = None
394400
else:
395401
tdict['probe_insertion'] = probe_id
402+
tdict['chronic_insertion'] = None
396403

397404
return tdict
398405

ibllib/plots/figures.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -785,7 +785,11 @@ def dlc_qc_plot(session_path, one=None, device_collection='raw_video_data',
785785
# Load session level data
786786
for alf_object in ['trials', 'wheel', 'licks']:
787787
try:
788-
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(trials_collection), alf_object) # load locally
788+
if alf_object == 'licks':
789+
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath('alf'),
790+
alf_object) # load locally
791+
else:
792+
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(trials_collection), alf_object) # load locally
789793
continue
790794
except ALFObjectNotFound:
791795
pass

ibllib/qc/alignment_qc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,7 @@ def upload_channels(self, alignment_key, upload_alyx, upload_flatiron):
316316
ephys_traj = self.one.alyx.get(f'/trajectories?&probe_insertion={self.eid}'
317317
'&provenance=Ephys aligned histology track',
318318
clobber=True)
319-
patch_dict = {'json': self.alignments}
319+
patch_dict = {'probe_insertion': self.eid, 'json': self.alignments}
320320
self.one.alyx.rest('trajectories', 'partial_update', id=ephys_traj[0]['id'],
321321
data=patch_dict)
322322

ibllib/tests/qc/test_alignment_qc.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,7 @@ def _02_one_alignment(self):
186186
self.alignments['2020-06-26T16:40:14_Karolina_Socha']}
187187
trajectory = copy.deepcopy(self.trajectory)
188188
trajectory.update({'json': alignments})
189+
trajectory.update({'chronic_insertion': None})
189190
_ = one.alyx.rest('trajectories', 'create', data=trajectory)
190191
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
191192
align_qc.run(update=True, upload_alyx=True, upload_flatiron=False)
@@ -199,8 +200,8 @@ def _03_alignments_disagree(self):
199200
self.alignments['2020-06-26T16:40:14_Karolina_Socha'],
200201
'2020-06-12T00:39:15_nate': self.alignments['2020-06-12T00:39:15_nate']}
201202
trajectory = copy.deepcopy(self.trajectory)
202-
trajectory.update({'json': alignments})
203-
traj = one.alyx.rest('trajectories', 'update', id=self.prev_traj_id, data=trajectory)
203+
trajectory.update({'probe_insertion': self.probe_id, 'json': alignments})
204+
traj = one.alyx.rest('trajectories', 'partial_update', id=self.prev_traj_id, data=trajectory)
204205
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
205206
align_qc.load_data(prev_alignments=traj['json'], xyz_picks=np.array(self.xyz_picks) / 1e6,
206207
cluster_chns=self.cluster_chns, depths=SITES_COORDINATES[:, 1],
@@ -216,8 +217,8 @@ def _04_alignments_agree(self):
216217
self.alignments['2020-06-19T10:52:36_noam.roth'],
217218
'2020-06-12T00:39:15_nate': self.alignments['2020-06-12T00:39:15_nate']}
218219
trajectory = copy.deepcopy(self.trajectory)
219-
trajectory.update({'json': alignments})
220-
traj = one.alyx.rest('trajectories', 'update', id=self.prev_traj_id, data=trajectory)
220+
trajectory.update({'probe_insertion': self.probe_id, 'json': alignments})
221+
traj = one.alyx.rest('trajectories', 'partial_update', id=self.prev_traj_id, data=trajectory)
221222
self.assertEqual(self.prev_traj_id, traj['id'])
222223
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
223224
align_qc.load_data(cluster_chns=self.cluster_chns, depths=SITES_COORDINATES[:, 1],
@@ -230,8 +231,8 @@ def _04_alignments_agree(self):
230231
def _05_not_latest_alignments_agree(self):
231232
alignments = copy.deepcopy(self.alignments)
232233
trajectory = copy.deepcopy(self.trajectory)
233-
trajectory.update({'json': alignments})
234-
traj = one.alyx.rest('trajectories', 'update', id=self.prev_traj_id, data=trajectory)
234+
trajectory.update({'probe_insertion': self.probe_id, 'json': alignments})
235+
traj = one.alyx.rest('trajectories', 'partial_update', id=self.prev_traj_id, data=trajectory)
235236
self.assertEqual(self.prev_traj_id, traj['id'])
236237
align_qc = AlignmentQC(self.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
237238
align_qc.load_data(prev_alignments=traj['json'], xyz_picks=np.array(self.xyz_picks) / 1e6,
@@ -277,6 +278,7 @@ def setUpClass(cls) -> None:
277278
cls.probe_id = probe_insertion['id']
278279
cls.trajectory = data['trajectory'].tolist()
279280
cls.trajectory.update({'probe_insertion': cls.probe_id})
281+
cls.trajectory.update({'chronic_insertion': None})
280282
cls.trajectory.update({'json': cls.alignments})
281283
cls.traj = one.alyx.rest('trajectories', 'create', data=cls.trajectory)
282284

@@ -415,6 +417,7 @@ def setUpClass(cls) -> None:
415417
cls.probe_name = probe_insertion['name']
416418
cls.trajectory = data['trajectory'].tolist()
417419
cls.trajectory.update({'probe_insertion': cls.probe_id})
420+
cls.trajectory.update({'chronic_insertion': None})
418421
cls.trajectory.update({'json': cls.alignments})
419422
cls.traj = one.alyx.rest('trajectories', 'create', data=cls.trajectory)
420423

ibllib/tests/qc/test_task_metrics.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ class TestAggregateOutcome(unittest.TestCase):
1717
def test_deprecation_warning(self):
1818
"""Remove TaskQC.compute_session_status_from_dict after 2024-04-01."""
1919
from datetime import datetime
20-
self.assertFalse(datetime.now() > datetime(2024, 4, 1), 'remove TaskQC.compute_session_status_from_dict method.')
20+
self.assertFalse(datetime.now() > datetime(2024, 4, 10), 'remove TaskQC.compute_session_status_from_dict method.')
2121
qc_dict = {'_task_iti_delays': .99}
2222
with self.assertWarns(DeprecationWarning), self.assertLogs(qcmetrics.__name__, 'WARNING'):
2323
out = qcmetrics.TaskQC.compute_session_status_from_dict(qc_dict)

ibllib/tests/test_oneibl.py

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def test_patch_datasets(self):
126126

127127
# Mock the post method of AlyxClient and assert that it was called during registration
128128
with mock.patch.object(self.one.alyx, 'post') as rest_mock:
129-
rest_mock.side_effect = responses
129+
rest_mock.side_effect = [[r] for r in responses]
130130
self.globus_patcher.patch_datasets(file_list)
131131
self.assertEqual(rest_mock.call_count, 2)
132132
for call, file in zip(rest_mock.call_args_list, file_list):
@@ -332,9 +332,14 @@ def test_registration_datasets(self):
332332
self.one.alyx.rest('datasets', 'partial_update',
333333
id=d['url'][-36:], data={'tags': [self.tag['name']]})
334334

335+
# Check that we get an exception error unless force=True
336+
flist = list(self.rev_path.glob('*.npy'))
337+
with self.assertRaises(FileExistsError):
338+
registration.register_dataset(file_list=flist, one=self.one)
339+
335340
# Test registering with a revision already in the file path, should use this rather than create one with today's date
336341
flist = list(self.rev_path.glob('*.npy'))
337-
r = registration.register_dataset(file_list=flist, one=self.one)
342+
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
338343
self.assertTrue(all(d['revision'] == self.revision for d in r))
339344
self.assertTrue(all(d['default'] for d in r))
340345
self.assertTrue(all(d['collection'] == 'alf' for d in r))
@@ -348,7 +353,7 @@ def test_registration_datasets(self):
348353
# Register again with revision in file path, it should register to self.revision + a
349354
flist = list(self.rev_path.glob('*.npy'))
350355

351-
r = registration.register_dataset(file_list=flist, one=self.one)
356+
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
352357
self.assertTrue(all(d['revision'] == f'{self.revision}a' for d in r))
353358
self.assertTrue(self.alf_path.joinpath(f'#{self.revision}a#', 'spikes.times.npy').exists())
354359
self.assertTrue(self.alf_path.joinpath(f'#{self.revision}a#', 'spikes.amps.npy').exists())
@@ -357,7 +362,7 @@ def test_registration_datasets(self):
357362

358363
# When we re-register the original it should move them into revision with today's date
359364
flist = list(self.alf_path.glob('*.npy'))
360-
r = registration.register_dataset(file_list=flist, one=self.one)
365+
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
361366
self.assertTrue(all(d['revision'] == self.today_revision for d in r))
362367
self.assertTrue(self.alf_path.joinpath(f'#{self.today_revision}#', 'spikes.times.npy').exists())
363368
self.assertTrue(self.alf_path.joinpath(f'#{self.today_revision}#', 'spikes.amps.npy').exists())
@@ -375,7 +380,7 @@ def test_registration_datasets(self):
375380
np.save(self.alf_path.joinpath('spikes.times.npy'), np.random.random(500))
376381
np.save(self.alf_path.joinpath('spikes.amps.npy'), np.random.random(500))
377382
flist = list(self.alf_path.glob('*.npy'))
378-
r = registration.register_dataset(file_list=flist, one=self.one)
383+
r = registration.register_dataset(file_list=flist, one=self.one, force=True)
379384
self.assertTrue(all(d['revision'] == self.today_revision + 'a' for d in r))
380385

381386
def _write_settings_file(self):

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ pynrrd>=0.4.0
1515
pytest
1616
requests>=2.22.0
1717
scikit-learn>=0.22.1
18-
scipy>=1.7.0
18+
scipy>=1.7.0,<1.13 # scipy gaussian missing April 2024
1919
scikit-image # this is a widefield requirement missing as of July 2023, we may remove it once wfield has this figured out
2020
sparse
2121
seaborn>=0.9.0

0 commit comments

Comments
 (0)