Skip to content

Commit 06052a1

Browse files
authored
Merge pull request #1018 from int-brain-lab/align
split the dataset creation part from the FTP patcher
2 parents 307a735 + e99712c commit 06052a1

File tree

3 files changed

+86
-67
lines changed

3 files changed

+86
-67
lines changed

ibllib/qc/alignment_qc.py

Lines changed: 66 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from pathlib import Path
55
from one.alf.spec import QC
66
from datetime import date
7+
from typing import Tuple
78

89
from neuropixel import trace_header
910
import spikeglx
@@ -275,13 +276,8 @@ def compute_alignment_status(self):
275276

276277
return results
277278

278-
def upload_channels(self, alignment_key, upload_alyx, upload_flatiron):
279-
"""
280-
Upload channels to alyx and flatiron based on the alignment specified by the alignment key
281-
"""
282-
283-
feature = np.array(self.alignments[alignment_key][0])
284-
track = np.array(self.alignments[alignment_key][1])
279+
def get_channels_mlapdv(self, alignment_key: str) -> Tuple[np.ndarray, np.ndarray, np.array]:
280+
feature, track = np.array(self.alignments[alignment_key][:2])
285281

286282
try:
287283
meta_dset = self.one.list_datasets(self.insertion['session'], '*ap.meta',
@@ -291,25 +287,82 @@ def upload_channels(self, alignment_key, upload_alyx, upload_flatiron):
291287
collection=f'raw_ephys_data/{self.insertion["name"]}',
292288
download_only=True)
293289
geometry = spikeglx.read_geometry(meta_file)
294-
chns = np.c_[geometry['x'], geometry['y']]
290+
channels_xy = np.c_[geometry['x'], geometry['y']]
295291
except Exception as err:
296292
self.log.warning(f"Could not compute channel locations from meta file, errored with message: {err}. "
297293
f"Will use default Neuropixel 1 channels")
298294
geometry = trace_header(version=1)
299-
chns = np.c_[geometry['x'], geometry['y']]
295+
channels_xy = np.c_[geometry['x'], geometry['y']]
300296

301-
ephysalign = EphysAlignment(self.xyz_picks, chns[:, 1],
297+
ephysalign = EphysAlignment(self.xyz_picks, channels_xy[:, 1],
302298
track_prev=track,
303299
feature_prev=feature,
304300
brain_atlas=self.brain_atlas)
305301
channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6)
306302
channels_atlas_id = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id']
303+
return channels_mlapdv, channels_atlas_id, channels_xy
304+
305+
def create_electrode_datasets(self, alignment_key: str) -> list:
306+
"""
307+
When an insertion alignment is resolved, we write datasets containing electrode locations
308+
:return:
309+
"""
310+
channels_mlapdv, channels_atlas_id, channels_xy = self.get_channels_mlapdv(alignment_key)
311+
files_to_register = []
312+
alf_path = self.one.eid2path(self.insertion['session']).joinpath('alf', self.insertion["name"])
313+
alf_path.mkdir(exist_ok=True, parents=True)
314+
315+
f_name = alf_path.joinpath('electrodeSites.mlapdv.npy')
316+
np.save(f_name, channels_mlapdv)
317+
files_to_register.append(f_name)
318+
319+
f_name = alf_path.joinpath('electrodeSites.brainLocationIds_ccf_2017.npy')
320+
np.save(f_name, channels_atlas_id)
321+
files_to_register.append(f_name)
322+
323+
f_name = alf_path.joinpath('electrodeSites.localCoordinates.npy')
324+
np.save(f_name, channels_xy)
325+
files_to_register.append(f_name)
326+
probe_collections = self.one.list_collections(self.insertion['session'], filename='channels*',
327+
collection=f'alf/{self.insertion["name"]}*')
328+
feature, track = np.array(self.alignments[alignment_key][:2])
329+
for collection in probe_collections:
330+
chns = self.one.load_dataset(self.insertion['session'], 'channels.localCoordinates', collection=collection)
331+
332+
ephysalign = EphysAlignment(self.xyz_picks, chns[:, 1],
333+
track_prev=track,
334+
feature_prev=feature,
335+
brain_atlas=self.brain_atlas)
336+
channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6)
337+
channels_atlas_id = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id']
338+
339+
alf_path = self.one.eid2path(self.insertion['session']).joinpath(collection)
340+
alf_path.mkdir(exist_ok=True, parents=True)
341+
342+
f_name = alf_path.joinpath('channels.mlapdv.npy')
343+
np.save(f_name, channels_mlapdv)
344+
files_to_register.append(f_name)
345+
346+
f_name = alf_path.joinpath('channels.brainLocationIds_ccf_2017.npy')
347+
np.save(f_name, channels_atlas_id)
348+
files_to_register.append(f_name)
349+
350+
self.log.info("The following files have been saved:")
351+
for f in files_to_register:
352+
self.log.info(f)
307353

354+
return files_to_register
355+
356+
def upload_channels(self, alignment_key, upload_alyx, upload_flatiron):
357+
"""
358+
Upload channels to alyx and flatiron based on the alignment specified by the alignment key
359+
"""
360+
channels_mlapdv, channels_atlas_id, channels_xy = self.get_channels_mlapdv(alignment_key)
308361
# Need to change channels stored on alyx as well as the stored key is not the same as the latest key
309362
if upload_alyx:
310363
if alignment_key != self.align_keys_sorted[0]:
311364
histology.register_aligned_track(self.eid, channels_mlapdv / 1e6,
312-
chn_coords=chns, one=self.one,
365+
chn_coords=channels_xy, one=self.one,
313366
overwrite=True, channels=self.channels_flag,
314367
brain_atlas=self.brain_atlas)
315368

@@ -322,47 +375,9 @@ def upload_channels(self, alignment_key, upload_alyx, upload_flatiron):
322375

323376
files_to_register = []
324377
if upload_flatiron:
378+
files_to_register = self.create_electrode_datasets()
379+
self.log.info(f"Registering {len(files_to_register)} datasets on FTP patcher")
325380
ftp_patcher = FTPPatcher(one=self.one)
326-
327-
alf_path = self.one.eid2path(self.insertion['session']).joinpath('alf', self.insertion["name"])
328-
alf_path.mkdir(exist_ok=True, parents=True)
329-
330-
f_name = alf_path.joinpath('electrodeSites.mlapdv.npy')
331-
np.save(f_name, channels_mlapdv)
332-
files_to_register.append(f_name)
333-
334-
f_name = alf_path.joinpath('electrodeSites.brainLocationIds_ccf_2017.npy')
335-
np.save(f_name, channels_atlas_id)
336-
files_to_register.append(f_name)
337-
338-
f_name = alf_path.joinpath('electrodeSites.localCoordinates.npy')
339-
np.save(f_name, chns)
340-
files_to_register.append(f_name)
341-
342-
probe_collections = self.one.list_collections(self.insertion['session'], filename='channels*',
343-
collection=f'alf/{self.insertion["name"]}*')
344-
345-
for collection in probe_collections:
346-
chns = self.one.load_dataset(self.insertion['session'], 'channels.localCoordinates', collection=collection)
347-
ephysalign = EphysAlignment(self.xyz_picks, chns[:, 1],
348-
track_prev=track,
349-
feature_prev=feature,
350-
brain_atlas=self.brain_atlas)
351-
channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6)
352-
channels_atlas_id = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id']
353-
354-
alf_path = self.one.eid2path(self.insertion['session']).joinpath(collection)
355-
alf_path.mkdir(exist_ok=True, parents=True)
356-
357-
f_name = alf_path.joinpath('channels.mlapdv.npy')
358-
np.save(f_name, channels_mlapdv)
359-
files_to_register.append(f_name)
360-
361-
f_name = alf_path.joinpath('channels.brainLocationIds_ccf_2017.npy')
362-
np.save(f_name, channels_atlas_id)
363-
files_to_register.append(f_name)
364-
365-
self.log.info("Writing datasets to FlatIron")
366381
ftp_patcher.create_dataset(path=files_to_register,
367382
created_by=self.one.alyx.user)
368383

ibllib/tests/extractors/test_extractors_base.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,12 @@ def import_mock(self, name, *args):
3030
def test_get_task_extractor_map(self):
3131
"""Test ibllib.io.extractors.base._get_task_extractor_map function."""
3232
# Check the custom map is loaded
33-
with patch('builtins.__import__', side_effect=self.import_mock):
33+
with patch.dict('sys.modules', {'projects': self.projects}):
3434
extractors = base._get_task_extractor_map()
3535
self.assertTrue(self.custom_extractors.items() < extractors.items())
3636
# Test handles case where module not installed
37-
with patch('builtins.__import__', side_effect=ModuleNotFoundError):
37+
with patch.dict('sys.modules', {'projects': None}):
38+
# When trying to import a module that's None in sys.modules, it will raise ImportError
3839
extractors = base._get_task_extractor_map()
3940
self.assertFalse(set(self.custom_extractors.items()).issubset(set(extractors.items())))
4041
# Remove the file and check exception is caught

ibllib/tests/qc/test_alignment_qc.py

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111

1212
from one.api import ONE
1313
from neuropixel import trace_header
14+
import one.alf.io as alfio
1415

1516
from ibllib.tests import TEST_DB
1617
from ibllib.tests.fixtures.utils import register_new_session
@@ -398,7 +399,6 @@ class TestUploadToFlatIron(unittest.TestCase):
398399
xyz_picks = None
399400
trajectory = None
400401

401-
@unittest.skip("Skip FTP upload test")
402402
@classmethod
403403
def setUpClass(cls) -> None:
404404
data = np.load(Path(Path(__file__).parent.parent.
@@ -413,6 +413,9 @@ def setUpClass(cls) -> None:
413413
allow_pickle=True)
414414
insertion = data['insertion'].tolist()
415415
insertion['json'] = {'xyz_picks': cls.xyz_picks}
416+
# makes sure there is no existing probe insertion before creating a new one
417+
for ins in one.alyx.rest('insertions', 'list', session=insertion['session'], name=insertion['name'], no_cache=True):
418+
one.alyx.rest('insertions', 'delete', id=ins['id'])
416419
probe_insertion = one.alyx.rest('insertions', 'create', data=insertion)
417420
cls.probe_id = probe_insertion['id']
418421
cls.probe_name = probe_insertion['name']
@@ -422,23 +425,23 @@ def setUpClass(cls) -> None:
422425
cls.trajectory.update({'json': cls.alignments})
423426
cls.traj = one.alyx.rest('trajectories', 'create', data=cls.trajectory)
424427

425-
align_qc = AlignmentQC(cls.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
426-
align_qc.load_data(prev_alignments=cls.traj['json'],
427-
xyz_picks=np.array(cls.xyz_picks) / 1e6,
428-
cluster_chns=cls.cluster_chns,
429-
depths=SITES_COORDINATES[:, 1],
430-
chn_coords=SITES_COORDINATES)
431-
cls.file_paths = align_qc.resolve_manual('2020-09-28T15:57:25_mayo', update=True,
432-
upload_alyx=True, upload_flatiron=True)
428+
cls.align_qc = AlignmentQC(cls.probe_id, one=one, brain_atlas=brain_atlas, channels=False)
429+
cls.align_qc.load_data(prev_alignments=cls.traj['json'],
430+
xyz_picks=np.array(cls.xyz_picks) / 1e6,
431+
cluster_chns=cls.cluster_chns,
432+
depths=SITES_COORDINATES[:, 1],
433+
chn_coords=SITES_COORDINATES)
434+
cls.file_paths = cls.align_qc.resolve_manual('2020-09-28T15:57:25_mayo', update=True,
435+
upload_alyx=True, upload_flatiron=False)
433436
print(cls.file_paths)
434437

435438
def test_data_content(self):
436-
alf_path = one.eid2path(EPHYS_SESSION).joinpath('alf', self.probe_name)
437-
channels_mlapdv = np.load(alf_path.joinpath('channels.mlapdv.npy'))
438-
self.assertTrue(np.all(np.abs(channels_mlapdv) > 0))
439-
channels_id = np.load(alf_path.joinpath('channels.brainLocationIds_ccf_2017.npy'))
440-
self.assertEqual(channels_mlapdv.shape[0], channels_id.shape[0])
439+
files_to_register = self.align_qc.create_electrode_datasets(alignment_key='2020-09-28T15:57:25_mayo')
440+
esites = alfio.load_object(one.eid2path(EPHYS_SESSION).joinpath('alf', self.probe_name), 'electrodeSites')
441+
np.testing.assert_array_equal([v.shape[0] for v in esites.values()], 384)
442+
self.assertEqual(len(files_to_register), 3)
441443

444+
@unittest.skip
442445
def test_upload_to_flatiron(self):
443446
for file in self.file_paths:
444447
file_registered = one.alyx.get(f'/datasets?&session={EPHYS_SESSION}'

0 commit comments

Comments
 (0)