Skip to content

Commit a733c67

Browse files
committed
Merge branch 'master' into fix_blackrock
2 parents 61a1f13 + 1dad7e2 commit a733c67

21 files changed

+789
-340
lines changed

.github/workflows/io-test.yml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -114,13 +114,6 @@ jobs:
114114
# run: |
115115
# pip install --no-dependencies -e .
116116

117-
- name: Install wine
118-
run: |
119-
sudo rm -f /etc/apt/sources.list.d/microsoft-prod.list
120-
sudo dpkg --add-architecture i386
121-
sudo apt-get update -qq
122-
sudo apt-get install -yqq --allow-downgrades libc6:i386 libgcc-s1:i386 libstdc++6:i386 wine
123-
124117
- name: Pip list
125118
run: |
126119
pip list
Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
name: NeoPlexon2Test
2+
3+
on:
4+
workflow_dispatch:
5+
schedule:
6+
- cron: "0 12 * * 0" # weekly at noon UTC on Sundays
7+
8+
9+
jobs:
10+
build-and-test:
11+
name: Test on (${{ inputs.os }}) (${{ matrix.python-version}}) (${{ matrix.numpy-version }})
12+
runs-on: ${{ inputs.os }}
13+
strategy:
14+
fail-fast: true
15+
matrix:
16+
python-version: ['3.9', '3.12']
17+
numpy-version: ['1.26', '2.0']
18+
defaults:
19+
# by default run in bash mode (required for conda usage)
20+
run:
21+
shell: bash -l {0}
22+
steps:
23+
24+
- name: Checkout repository
25+
uses: actions/checkout@v4
26+
27+
- name: Get current year-month
28+
id: date
29+
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT
30+
31+
- name: Get ephy_testing_data current head hash
32+
# the key depend on the last commit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
33+
id: ephy_testing_data
34+
run: |
35+
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
36+
37+
- uses: actions/cache/restore@v4
38+
# Loading cache of ephys_testing_dataset
39+
id: cache-datasets
40+
with:
41+
path: ~/ephy_testing_data
42+
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data.outputs.dataset_hash }}
43+
restore-keys: ${{ runner.os }}-datasets-
44+
45+
- uses: conda-incubator/setup-miniconda@v3
46+
with:
47+
activate-environment: neo-test-env-${{ matrix.python-version }}
48+
python-version: "${{ matrix.python-version }}"
49+
50+
- name: Install testing dependencies
51+
# testing environment is only created from yml if no cache was found
52+
# restore-key hits should result in `cache-hit` == 'false'
53+
#if: steps.cache-conda-env.outputs.cache-hit != 'true'
54+
run: |
55+
conda install pip numpy=${{ matrix.numpy-version }} -c conda-forge
56+
# this command is for updating cache. We are resting removal.
57+
# conda env update --name neo-test-env-${{ matrix.python-version }} --file environment_testing.yml --prune
58+
59+
- name: Install git-annex
60+
# this is the trick from the spikeinterface repo for getting git-annex to work with datalad
61+
# see https://github.com/SpikeInterface/spikeinterface/pull/3877 for more info
62+
shell: bash
63+
run: |
64+
pip install datalad-installer
65+
datalad-installer --sudo ok git-annex --method datalad/packages
66+
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
67+
68+
- name: Configure git
69+
run: |
70+
git config --global user.email "neo_ci@fake_mail.com"
71+
git config --global user.name "neo CI"
72+
73+
- name: Python version
74+
run: |
75+
which python
76+
python --version
77+
78+
- name: Install neo including dependencies
79+
# installation with dependencies is only required if no cache was found
80+
# restore-key hits should result in `cache-hit` == 'false'
81+
# if: steps.cache-conda-env.outputs.cache-hit != 'true'
82+
run: |
83+
pip install --upgrade -e .
84+
pip install .[test]
85+
86+
87+
- name: Install wine
88+
run: |
89+
sudo rm -f /etc/apt/sources.list.d/microsoft-prod.list
90+
sudo dpkg --add-architecture i386
91+
sudo apt-get update -qq
92+
sudo apt-get install -yqq --allow-downgrades libc6:i386 libgcc-s1:i386 libstdc++6:i386 wine
93+
94+
- name: Pip list
95+
run: |
96+
pip list
97+
98+
- name: Conda list
99+
run: |
100+
conda list
101+
102+
- name: Test with pytest
103+
env:
104+
HDF5_PLUGIN_PATH: ${{ github.workspace }}/hdf5_local_plugin_path
105+
PLEXON2_TEST: true
106+
run: |
107+
# only neo.rawio and neo.io
108+
pytest --cov=neo neo/test/rawiotest
109+
pytest --cov=neo neo/test/iotest

doc/source/authors.rst

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ and may not be the current affiliation of a contributor.
7171
* Daniel P. Crepeau [30]
7272
* Divyansh Gupta [31]
7373
* Nate Dolensek [3]
74-
* Philipp Hornauer [32]
74+
* Philipp Hornauer [32, 45]
7575
* Robert Wolff [42]
7676
* Jules Lebert [33]
7777
* Benjamin Heasly
@@ -95,6 +95,9 @@ and may not be the current affiliation of a contributor.
9595
* Rémi Proville [44]
9696
* Paul Adkisson [24]
9797
* Luiz Tauffer [24]
98+
* Akshaj Verma [46]
99+
* Letizia Signorelli [47]
100+
* Daniel Parthier [48]
98101

99102
1. Centre de Recherche en Neuroscience de Lyon, CNRS UMR5292 - INSERM U1028 - Université Claude Bernard Lyon 1
100103
2. Unité de Neuroscience, Information et Complexité, CNRS UPR 3293, Gif-sur-Yvette, France
@@ -140,6 +143,10 @@ and may not be the current affiliation of a contributor.
140143
42. Istituto Italiano di Tecnologia (IIT), Genoa, Italy
141144
43. University of Genoa, Italy
142145
44. AquiNeuro, SAS
146+
45. Maxwell Biosystems AG
147+
46. Brain Center, University Medical Center Utrecht, Utrecht University, The Netherlands
148+
47. Centre for Molecular Medicine Norway (NCMM), University of Oslo, Norway
149+
48. Charité - Universitätmedizin Berlin, Freie Universität Berlin and Humboldt Universität zu Berlin
143150

144151

145152

doc/source/governance.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ The current maintainers are:
3838
- Julia Sprenger (`@JuliaSprenger`_)
3939
- Michael Denker (`@mdenker`_)
4040
- Alessio Buccino (`@alejoe91`_)
41+
- Zach McKenzie (`@zm711`_)
4142

4243

4344
.. _`Neo maintainers team`: https://github.com/orgs/NeuralEnsemble/teams/neo-maintainers
@@ -47,3 +48,4 @@ The current maintainers are:
4748
.. _`@JuliaSprenger`: https://github.com/JuliaSprenger
4849
.. _`@mdenker`: https://github.com/mdenker
4950
.. _`@alejoe91`: https://github.com/alejoe91
51+
.. _`@zm711`: https://github.com/zm711

neo/core/spiketrainlist.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,11 @@ def __init__(self, items=None, parent=None):
113113
self._channel_id_array = None
114114
self._all_channel_ids = None
115115
self._spiketrain_metadata = {}
116-
if parent is not None and parent.__class__.__name__ != "Segment":
117-
raise AttributeError("The parent class must be a Segment")
116+
117+
from .segment import Segment # import here rather than at the top to avoid circular import
118+
119+
if parent is not None and not isinstance(parent, Segment):
120+
raise AttributeError("If provided, the parent class must be a Segment")
118121
self.segment = parent
119122

120123
@property

neo/rawio/axonarawio.py

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -377,8 +377,7 @@ def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, strea
377377
if channel_indexes is None:
378378
channel_indexes = [i for i in range(bin_dict["num_channels"])]
379379
elif isinstance(channel_indexes, slice):
380-
channel_indexes_all = [i for i in range(bin_dict["num_channels"])]
381-
channel_indexes = channel_indexes_all[channel_indexes]
380+
channel_indexes = self._get_active_channels()
382381

383382
num_samples = i_stop - i_start
384383

@@ -562,6 +561,20 @@ def get_active_tetrode(self):
562561
active_tetrodes.append(tetrode_id)
563562
return active_tetrodes
564563

564+
def _get_active_channels(self):
565+
"""
566+
Returns the ID numbers of the active channels as a list.
567+
E.g.: [20,21,22,23] for tetrode 6 active.
568+
"""
569+
active_tetrodes = self.get_active_tetrode()
570+
active_channels = []
571+
572+
for tetrode in active_tetrodes:
573+
chans = self._get_channel_from_tetrode(tetrode)
574+
active_channels.append(chans)
575+
576+
return np.concatenate(active_channels)
577+
565578
def _get_channel_from_tetrode(self, tetrode):
566579
"""
567580
This function will take the tetrode number and return the Axona
@@ -632,12 +645,13 @@ def _get_signal_chan_header(self):
632645
gain_list = self._get_channel_gain()
633646
offset = 0 # What is the offset?
634647

648+
first_channel = (active_tetrode_set[0] - 1) * elec_per_tetrode
635649
sig_channels = []
636650
for itetr in range(num_active_tetrode):
637651

638652
for ielec in range(elec_per_tetrode):
639-
cntr = (itetr * elec_per_tetrode) + ielec
640-
ch_name = f"{itetr + 1}{letters[ielec]}"
653+
cntr = (itetr * elec_per_tetrode) + ielec + first_channel
654+
ch_name = f"{itetr + active_tetrode_set[0]}{letters[ielec]}"
641655
chan_id = str(cntr)
642656
gain = gain_list[cntr]
643657
stream_id = "0"

neo/rawio/blackrockrawio.py

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -605,10 +605,17 @@ def _parse_header(self):
605605
for c in range(spike_channels.size):
606606
st_ann = seg_ann["spikes"][c]
607607
channel_id, unit_id = self.internal_unit_ids[c]
608-
unit_tag = {0: "unclassified", 255: "noise"}.get(unit_id, str(unit_id))
609608
st_ann["channel_id"] = channel_id
610609
st_ann["unit_id"] = unit_id
611-
st_ann["unit_tag"] = unit_tag
610+
if unit_id == 0:
611+
st_ann["unit_classification"] = "unclassified"
612+
elif 1 <= unit_id <= 16:
613+
st_ann["unit_classification"] = "sorted"
614+
elif unit_id == 255:
615+
st_ann["unit_classification"] = "noise"
616+
else: # 17-254 are reserved
617+
st_ann["unit_classification"] = "reserved"
618+
st_ann["unit_tag"] = st_ann["unit_classification"]
612619
st_ann["description"] = f"SpikeTrain channel_id: {channel_id}, unit_id: {unit_id}"
613620
st_ann["file_origin"] = self._filenames["nev"] + ".nev"
614621

@@ -1273,7 +1280,19 @@ def __read_nev_data(self, nev_data_masks, nev_data_types):
12731280
# read all raw data packets and markers
12741281
dt0 = [("timestamp", ts_format), ("packet_id", "uint16"), ("value", f"S{data_size - header_skip}")]
12751282

1276-
raw_data = np.memmap(filename, offset=header_size, dtype=dt0, mode="r")
1283+
# expected number of data packets. We are not sure why, but it seems we can get partial data packets
1284+
# based on blackrock's own code this is okay so applying an int to round down is necessary to obtain the
1285+
# memory map of full packets and toss the partial packet.
1286+
# See reference: https://github.com/BlackrockNeurotech/Python-Utilities/blob/fa75aa671680306788e10d3d8dd625f9da4ea4f6/brpylib/brpylib.py#L580-L587
1287+
n_packets = int((self.__get_file_size(filename) - header_size) / data_size)
1288+
1289+
raw_data = np.memmap(
1290+
filename,
1291+
offset=header_size,
1292+
dtype=dt0,
1293+
shape=(n_packets,),
1294+
mode="r",
1295+
)
12771296

12781297
masks = self.__nev_data_masks(raw_data["packet_id"])
12791298
types = self.__nev_data_types(data_size)
@@ -1816,7 +1835,7 @@ def __nev_params(self, param_name):
18161835
hour=self.__nev_basic_header["hour"],
18171836
minute=self.__nev_basic_header["minute"],
18181837
second=self.__nev_basic_header["second"],
1819-
microsecond=self.__nev_basic_header["millisecond"],
1838+
microsecond=int(self.__nev_basic_header["millisecond"]) * 1000,
18201839
),
18211840
"max_res": self.__nev_basic_header["timestamp_resolution"],
18221841
"channel_ids": self.__nev_ext_header[b"NEUEVWAV"]["electrode_id"],

neo/rawio/maxwellrawio.py

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -242,18 +242,20 @@ def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, strea
242242
raise (e)
243243

244244

245-
_hdf_maxwell_error = """Maxwell file format is based on HDF5.
246-
The internal compression requires a custom plugin!!!
247-
This is a big pain for the end user.
248-
You, as a end user, should ask Maxwell company to change this.
249-
Please visit this page and install the missing decompression libraries:
250-
https://share.mxwbio.com/d/4742248b2e674a85be97/
251-
Then, link the decompression library by setting the `HDF5_PLUGIN_PATH` to your
252-
installation location, e.g. via
245+
_hdf_maxwell_error = """The MaxWell file compression requires a custom plugin.
246+
You can use the auto_install_maxwell_hdf5_compression_plugin() function or
247+
(if it fails) install it manually:
248+
Download the missing decompression library:
249+
https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35
250+
Then, link the decompression library by setting the `HDF5_PLUGIN_PATH` to its location,
251+
e.g. directly in Python via:
253252
os.environ['HDF5_PLUGIN_PATH'] = '/path/to/custom/hdf5/plugin/'
254-
255-
Alternatively, you can use the auto_install_maxwell_hdf5_compression_plugin() below
256-
function that do it automagically.
253+
or in your shell via:
254+
export HDF5_PLUGIN_PATH=/path/to/custom/hdf5/plugin/
255+
You can also set the `HDF5_PLUGIN_PATH` environment variable in your shell
256+
configuration file (e.g. .bashrc, .bash_profile, .zshrc, etc.) to make it
257+
permanent.
258+
See https://mxw.bio/MxW_Doc_Installing_Decompression_Library_to_load_MaxLab_Live_Recordings for more details.
257259
"""
258260

259261

@@ -267,13 +269,17 @@ def auto_install_maxwell_hdf5_compression_plugin(hdf5_plugin_path=None, force_do
267269
hdf5_plugin_path.mkdir(exist_ok=True)
268270

269271
if platform.system() == "Linux":
270-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FLinux%2Flibcompression.so&dl=1"
272+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FLinux%2Flibcompression.so&dl=1"
271273
local_lib = hdf5_plugin_path / "libcompression.so"
272274
elif platform.system() == "Darwin":
273-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FMacOS%2Flibcompression.dylib&dl=1"
275+
if platform.machine() == "arm64":
276+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FMacOS%2FMac_arm64%2Flibcompression.dylib&dl=1"
277+
else:
278+
# Assuming x86_64 for MacOS
279+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FMacOS%2FMac_x86_64%2Flibcompression.dylib&dl=1"
274280
local_lib = hdf5_plugin_path / "libcompression.dylib"
275281
elif platform.system() == "Windows":
276-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FWindows%2Fcompression.dll&dl=1"
282+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FWindows%2Fcompression.dll&dl=1"
277283
local_lib = hdf5_plugin_path / "compression.dll"
278284

279285
if not force_download and local_lib.is_file():

neo/rawio/micromedrawio.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,6 @@ def __init__(self, filename=""):
5151
self.filename = filename
5252

5353
def _parse_header(self):
54-
5554
with open(self.filename, "rb") as fid:
5655
f = StructFile(fid)
5756

@@ -133,7 +132,11 @@ def _parse_header(self):
133132
sig_grounds = []
134133
for c in range(Num_Chan):
135134
zname2, pos, length = zones["LABCOD"]
136-
f.seek(pos + code[c] * 128 + 2, 0)
135+
# Force code[c] which is currently a uint16 (or u2) into a int to prevent integer overflow
136+
# for the following operation -- code[c] * 128 + 2.
137+
# An integer overflow below may have side - effects including but not limited
138+
# to having repeated channel names.
139+
f.seek(pos + int(code[c]) * 128 + 2, 0)
137140

138141
chan_name = f.read(6).strip(b"\x00").decode("ascii")
139142
ground = f.read(6).strip(b"\x00").decode("ascii")
@@ -269,7 +272,6 @@ def _event_count(self, block_index, seg_index, event_channel_index):
269272
return n
270273

271274
def _get_event_timestamps(self, block_index, seg_index, event_channel_index, t_start, t_stop):
272-
273275
raw_event = self._raw_events[event_channel_index][seg_index]
274276

275277
# important : all events timing are related to the first segment t_start

0 commit comments

Comments
 (0)