Skip to content

Commit 499db96

Browse files
authored
Merge pull request #1519 from zm711/fix-spikegadgets
Allow spikegadgets to handle another data style
2 parents 375c973 + 92a3095 commit 499db96

File tree

1 file changed

+26
-7
lines changed

1 file changed

+26
-7
lines changed

neo/rawio/spikegadgetsrawio.py

Lines changed: 26 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -81,14 +81,18 @@ def _source_name(self):
8181
return self.filename
8282

8383
def _produce_ephys_channel_ids(self, n_total_channels, n_channels_per_chip):
84-
"""Compute the channel ID labels
84+
"""Compute the channel ID labels for subset of spikegadgets recordings
8585
The ephys channels in the .rec file are stored in the following order:
8686
hwChan ID of channel 0 of first chip, hwChan ID of channel 0 of second chip, ..., hwChan ID of channel 0 of Nth chip,
8787
hwChan ID of channel 1 of first chip, hwChan ID of channel 1 of second chip, ..., hwChan ID of channel 1 of Nth chip,
8888
...
8989
So if there are 32 channels per chip and 128 channels (4 chips), then the channel IDs are:
9090
0, 32, 64, 96, 1, 33, 65, 97, ..., 128
9191
See also: https://github.com/NeuralEnsemble/python-neo/issues/1215
92+
93+
This doesn't work for all types of spikegadgets
94+
see: https://github.com/NeuralEnsemble/python-neo/issues/1517
95+
9296
"""
9397
ephys_channel_ids_list = []
9498
for hw_channel in range(n_channels_per_chip):
@@ -129,9 +133,11 @@ def _parse_header(self):
129133
num_ephy_channels = sconf_channels
130134
if sconf_channels > num_ephy_channels:
131135
raise NeoReadWriteError(
132-
"SpikeGadgets: the number of channels in the spike configuration is larger than the number of channels in the hardware configuration"
136+
"SpikeGadgets: the number of channels in the spike configuration is larger "
137+
"than the number of channels in the hardware configuration"
133138
)
134139

140+
# as spikegadgets change we should follow this
135141
try:
136142
num_chan_per_chip = int(sconf.attrib["chanPerChip"])
137143
except KeyError:
@@ -207,9 +213,16 @@ def _parse_header(self):
207213
signal_streams.append((stream_name, stream_id))
208214
self._mask_channels_bytes[stream_id] = []
209215

210-
channel_ids = self._produce_ephys_channel_ids(num_ephy_channels, num_chan_per_chip)
216+
# we can only produce these channels for a subset of spikegadgets setup. If this criteria isn't
217+
# true then we should just use the raw_channel_ids and let the end user sort everything out
218+
if num_ephy_channels % num_chan_per_chip == 0:
219+
channel_ids = self._produce_ephys_channel_ids(num_ephy_channels, num_chan_per_chip)
220+
raw_channel_ids = False
221+
else:
222+
raw_channel_ids = True
223+
211224
chan_ind = 0
212-
self.is_scaleable = "spikeScalingToUv" in sconf[0].attrib
225+
self.is_scaleable = all("spikeScalingToUv" in trode.attrib for trode in sconf)
213226
if not self.is_scaleable:
214227
self.logger.warning(
215228
"Unable to read channel gain scaling (to uV) from .rec header. Data has no physical units!"
@@ -224,8 +237,14 @@ def _parse_header(self):
224237
units = ""
225238

226239
for schan in trode:
227-
chan_id = str(channel_ids[chan_ind])
228-
name = "hwChan" + chan_id
240+
# Here we use raw ids if necessary for parsing (for some neuropixel recordings)
241+
# otherwise we default back to the raw hwChan IDs
242+
if raw_channel_ids:
243+
name = "trode" + trode.attrib["id"] + "chan" + schan.attrib["hwChan"]
244+
chan_id = schan.attrib["hwChan"]
245+
else:
246+
chan_id = str(channel_ids[chan_ind])
247+
name = "hwChan" + chan_id
229248

230249
offset = 0.0
231250
signal_channels.append(
@@ -250,7 +269,7 @@ def _parse_header(self):
250269
signal_streams = np.array(signal_streams, dtype=_signal_stream_dtype)
251270
signal_channels = np.array(signal_channels, dtype=_signal_channel_dtype)
252271

253-
# remove some stream if no wanted
272+
# remove some stream if not wanted
254273
if self.selected_streams is not None:
255274
if isinstance(self.selected_streams, str):
256275
self.selected_streams = [self.selected_streams]

0 commit comments

Comments
 (0)