Skip to content

Commit 065a259

Browse files
committed
use alyx client instead of ONE in insertion creation
1 parent 44203cb commit 065a259

File tree

1 file changed

+55
-42
lines changed

1 file changed

+55
-42
lines changed

ibllib/ephys/spikes.py

Lines changed: 55 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import numpy as np
99
from one.alf.path import get_session_path
1010
import spikeglx
11-
from one.api import ONE
11+
from one.webclient import AlyxClient
1212

1313
from iblutil.util import Bunch
1414
import phylib.io.alf
@@ -19,7 +19,7 @@
1919
_logger = logging.getLogger(__name__)
2020

2121

22-
def create_insertion(one: ONE, md: dict, label: str, eid: str) -> Tuple[dict, dict]:
22+
def create_insertion(alyx: AlyxClient, md: dict, label: str, eid: str) -> Tuple[dict, dict]:
2323
"""
2424
Create or update a probe insertion in Alyx and return description and the alyx rest record.
2525
@@ -29,8 +29,8 @@ def create_insertion(one: ONE, md: dict, label: str, eid: str) -> Tuple[dict, di
2929
3030
Parameters
3131
----------
32-
one : one.api.ONE
33-
An instance of the ONE API to interact with Alyx.
32+
alyx : one.webclient.AlyxClient
33+
An instance of the Alyx rest client.
3434
md : dict
3535
A Bunch object containing metadata from a spikeglx meta file, including
3636
'neuropixelVersion', 'serial', and 'fileName'.
@@ -48,18 +48,17 @@ def create_insertion(one: ONE, md: dict, label: str, eid: str) -> Tuple[dict, di
4848
- insertion (dict): The Alyx record for the created or updated probe insertion.
4949
"""
5050
# create json description
51-
description = {'label': label, 'model': md['neuropixelVersion'], 'serial': int(md['serial']),
52-
'raw_file_name': md['fileName']}
51+
description = {'label': label, 'model': md['neuropixelVersion'], 'serial': int(md['serial']), 'raw_file_name': md['fileName']}
5352

5453
# create or update probe insertion on alyx
5554
alyx_insertion = {'session': eid, 'model': md['neuropixelVersion'], 'serial': md['serial'], 'name': label}
56-
pi = one.alyx.rest('insertions', 'list', session=eid, name=label)
55+
pi = alyx.rest('insertions', 'list', session=eid, name=label)
5756
if len(pi) == 0:
5857
qc_dict = {'qc': 'NOT_SET', 'extended_qc': {}}
5958
alyx_insertion.update({'json': qc_dict})
60-
insertion = one.alyx.rest('insertions', 'create', data=alyx_insertion)
59+
insertion = alyx.rest('insertions', 'create', data=alyx_insertion)
6160
else:
62-
insertion = one.alyx.rest('insertions', 'partial_update', data=alyx_insertion, id=pi[0]['id'])
61+
insertion = alyx.rest('insertions', 'partial_update', data=alyx_insertion, id=pi[0]['id'])
6362

6463
return description, insertion
6564

@@ -95,16 +94,16 @@ def probes_description(ses_path, one):
9594
nshanks = np.unique(geometry['shank'])
9695
for shank in nshanks:
9796
label_ext = f'{label}{chr(97 + int(shank))}'
98-
description, insertion = create_insertion(one, md, label_ext, eid)
97+
description, insertion = create_insertion(one.alyx, md, label_ext, eid)
9998
probe_description.append(description)
10099
alyx_insertions.append(insertion)
101100
# NP2.4 meta that has already been split
102101
else:
103-
description, insertion = create_insertion(one, md, label, eid)
102+
description, insertion = create_insertion(one.alyx, md, label, eid)
104103
probe_description.append(description)
105104
alyx_insertions.append(insertion)
106105
else:
107-
description, insertion = create_insertion(one, md, label, eid)
106+
description, insertion = create_insertion(one.alyx, md, label, eid)
108107
probe_description.append(description)
109108
alyx_insertions.append(insertion)
110109

@@ -131,8 +130,7 @@ def _sr(ap_file):
131130

132131
out_files = []
133132
label = ap_file.parts[-1] # now the bin file is always in a folder bearing the name of probe
134-
sync_file = ap_file.parent.joinpath(
135-
ap_file.name.replace('.ap.', '.sync.')).with_suffix('.npy')
133+
sync_file = ap_file.parent.joinpath(ap_file.name.replace('.ap.', '.sync.')).with_suffix('.npy')
136134
# try to get probe sync if it doesn't exist
137135
if not sync_file.exists():
138136
_, sync_files = sync_probes.sync(get_session_path(ap_file))
@@ -141,8 +139,10 @@ def _sr(ap_file):
141139
if not sync_file.exists():
142140
# if there is no sync file it means something went wrong. Outputs the spike sorting
143141
# in time according the the probe by following ALF convention on the times objects
144-
error_msg = f'No synchronisation file for {label}: {sync_file}. The spike-' \
145-
f'sorting is not synchronized and data not uploaded on Flat-Iron'
142+
error_msg = (
143+
f'No synchronisation file for {label}: {sync_file}. The spike-'
144+
f'sorting is not synchronized and data not uploaded on Flat-Iron'
145+
)
146146
_logger.error(error_msg)
147147
# remove the alf folder if the sync failed
148148
shutil.rmtree(out_path)
@@ -153,9 +153,20 @@ def _sr(ap_file):
153153
interp_times = apply_sync(sync_file, spike_samples / _sr(ap_file), forward=True)
154154
np.save(st_file, interp_times)
155155
# get the list of output files
156-
out_files.extend([f for f in out_path.glob("*.*") if
157-
f.name.startswith(('channels.', 'drift', 'clusters.', 'spikes.', 'templates.',
158-
'_kilosort_', '_phy_spikes_subset', '_ibl_log.info'))])
156+
out_files.extend([
157+
f
158+
for f in out_path.glob('*.*')
159+
if f.name.startswith((
160+
'channels.',
161+
'drift',
162+
'clusters.',
163+
'spikes.',
164+
'templates.',
165+
'_kilosort_',
166+
'_phy_spikes_subset',
167+
'_ibl_log.info',
168+
))
169+
])
159170
# the QC files computed during spike sorting stay within the raw ephys data folder
160171
out_files.extend(list(ap_file.parent.glob('_iblqc_*AP.*.npy')))
161172
return out_files, 0
@@ -191,31 +202,33 @@ def ks2_to_tar(ks_path, out_path, force=False):
191202
tar_dir.extractall(path=save_path)
192203
193204
"""
194-
ks2_output = ['amplitudes.npy',
195-
'channel_map.npy',
196-
'channel_positions.npy',
197-
'cluster_Amplitude.tsv',
198-
'cluster_ContamPct.tsv',
199-
'cluster_group.tsv',
200-
'cluster_KSLabel.tsv',
201-
'params.py',
202-
'pc_feature_ind.npy',
203-
'pc_features.npy',
204-
'similar_templates.npy',
205-
'spike_clusters.npy',
206-
'spike_sorting_ks2.log',
207-
'spike_templates.npy',
208-
'spike_times.npy',
209-
'template_feature_ind.npy',
210-
'template_features.npy',
211-
'templates.npy',
212-
'templates_ind.npy',
213-
'whitening_mat.npy',
214-
'whitening_mat_inv.npy']
205+
ks2_output = [
206+
'amplitudes.npy',
207+
'channel_map.npy',
208+
'channel_positions.npy',
209+
'cluster_Amplitude.tsv',
210+
'cluster_ContamPct.tsv',
211+
'cluster_group.tsv',
212+
'cluster_KSLabel.tsv',
213+
'params.py',
214+
'pc_feature_ind.npy',
215+
'pc_features.npy',
216+
'similar_templates.npy',
217+
'spike_clusters.npy',
218+
'spike_sorting_ks2.log',
219+
'spike_templates.npy',
220+
'spike_times.npy',
221+
'template_feature_ind.npy',
222+
'template_features.npy',
223+
'templates.npy',
224+
'templates_ind.npy',
225+
'whitening_mat.npy',
226+
'whitening_mat_inv.npy',
227+
]
215228

216229
out_file = Path(out_path).joinpath('_kilosort_raw.output.tar')
217230
if out_file.exists() and not force:
218-
_logger.info(f"Already converted ks2 to tar: for {ks_path}, skipping.")
231+
_logger.info(f'Already converted ks2 to tar: for {ks_path}, skipping.')
219232
return [out_file]
220233

221234
with tarfile.open(out_file, 'w') as tar_dir:
@@ -226,7 +239,7 @@ def ks2_to_tar(ks_path, out_path, force=False):
226239
return [out_file]
227240

228241

229-
def detection(data, fs, h, detect_threshold=-4, time_tol=.002, distance_threshold_um=70):
242+
def detection(data, fs, h, detect_threshold=-4, time_tol=0.002, distance_threshold_um=70):
230243
"""
231244
Detects and de-duplicates negative voltage spikes based on voltage thresholding.
232245
The de-duplication step locks in maximum amplitude events. To account for collisions the amplitude

0 commit comments

Comments
 (0)