Skip to content

Commit 98f35b3

Browse files
authored
Merge branch 'master' into micromed
2 parents 0d6dc09 + 2e9b86f commit 98f35b3

File tree

13 files changed

+264
-49
lines changed

13 files changed

+264
-49
lines changed

.github/workflows/io-test.yml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -114,13 +114,6 @@ jobs:
114114
# run: |
115115
# pip install --no-dependencies -e .
116116

117-
- name: Install wine
118-
run: |
119-
sudo rm -f /etc/apt/sources.list.d/microsoft-prod.list
120-
sudo dpkg --add-architecture i386
121-
sudo apt-get update -qq
122-
sudo apt-get install -yqq --allow-downgrades libc6:i386 libgcc-s1:i386 libstdc++6:i386 wine
123-
124117
- name: Pip list
125118
run: |
126119
pip list
Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
name: NeoPlexon2Test
2+
3+
on:
4+
workflow_dispatch:
5+
schedule:
6+
- cron: "0 12 * * 0" # weekly at noon UTC on Sundays
7+
8+
9+
jobs:
10+
build-and-test:
11+
name: Test on (${{ inputs.os }}) (${{ matrix.python-version}}) (${{ matrix.numpy-version }})
12+
runs-on: ${{ inputs.os }}
13+
strategy:
14+
fail-fast: true
15+
matrix:
16+
python-version: ['3.9', '3.12']
17+
numpy-version: ['1.26', '2.0']
18+
defaults:
19+
# by default run in bash mode (required for conda usage)
20+
run:
21+
shell: bash -l {0}
22+
steps:
23+
24+
- name: Checkout repository
25+
uses: actions/checkout@v4
26+
27+
- name: Get current year-month
28+
id: date
29+
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT
30+
31+
- name: Get ephy_testing_data current head hash
32+
# the key depend on the last commit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
33+
id: ephy_testing_data
34+
run: |
35+
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
36+
37+
- uses: actions/cache/restore@v4
38+
# Loading cache of ephys_testing_dataset
39+
id: cache-datasets
40+
with:
41+
path: ~/ephy_testing_data
42+
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data.outputs.dataset_hash }}
43+
restore-keys: ${{ runner.os }}-datasets-
44+
45+
- uses: conda-incubator/setup-miniconda@v3
46+
with:
47+
activate-environment: neo-test-env-${{ matrix.python-version }}
48+
python-version: "${{ matrix.python-version }}"
49+
50+
- name: Install testing dependencies
51+
# testing environment is only created from yml if no cache was found
52+
# restore-key hits should result in `cache-hit` == 'false'
53+
#if: steps.cache-conda-env.outputs.cache-hit != 'true'
54+
run: |
55+
conda install pip numpy=${{ matrix.numpy-version }} -c conda-forge
56+
# this command is for updating cache. We are resting removal.
57+
# conda env update --name neo-test-env-${{ matrix.python-version }} --file environment_testing.yml --prune
58+
59+
- name: Install git-annex
60+
# this is the trick from the spikeinterface repo for getting git-annex to work with datalad
61+
# see https://github.com/SpikeInterface/spikeinterface/pull/3877 for more info
62+
shell: bash
63+
run: |
64+
pip install datalad-installer
65+
datalad-installer --sudo ok git-annex --method datalad/packages
66+
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
67+
68+
- name: Configure git
69+
run: |
70+
git config --global user.email "neo_ci@fake_mail.com"
71+
git config --global user.name "neo CI"
72+
73+
- name: Python version
74+
run: |
75+
which python
76+
python --version
77+
78+
- name: Install neo including dependencies
79+
# installation with dependencies is only required if no cache was found
80+
# restore-key hits should result in `cache-hit` == 'false'
81+
# if: steps.cache-conda-env.outputs.cache-hit != 'true'
82+
run: |
83+
pip install --upgrade -e .
84+
pip install .[test]
85+
86+
87+
- name: Install wine
88+
run: |
89+
sudo rm -f /etc/apt/sources.list.d/microsoft-prod.list
90+
sudo dpkg --add-architecture i386
91+
sudo apt-get update -qq
92+
sudo apt-get install -yqq --allow-downgrades libc6:i386 libgcc-s1:i386 libstdc++6:i386 wine
93+
94+
- name: Pip list
95+
run: |
96+
pip list
97+
98+
- name: Conda list
99+
run: |
100+
conda list
101+
102+
- name: Test with pytest
103+
env:
104+
HDF5_PLUGIN_PATH: ${{ github.workspace }}/hdf5_local_plugin_path
105+
PLEXON2_TEST: true
106+
run: |
107+
# only neo.rawio and neo.io
108+
pytest --cov=neo neo/test/rawiotest
109+
pytest --cov=neo neo/test/iotest

doc/source/authors.rst

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,8 @@ and may not be the current affiliation of a contributor.
7171
* Daniel P. Crepeau [30]
7272
* Divyansh Gupta [31]
7373
* Nate Dolensek [3]
74-
* Philipp Hornauer [32]
75-
* Robert Wolff
74+
* Philipp Hornauer [32, 45]
75+
* Robert Wolff [42]
7676
* Jules Lebert [33]
7777
* Benjamin Heasly
7878
* Baptiste Grimaud [34]
@@ -94,6 +94,7 @@ and may not be the current affiliation of a contributor.
9494
* Nina Kudryashova [37]
9595
* Rémi Proville [44]
9696
* Paul Adkisson [24]
97+
* Luiz Tauffer [24]
9798

9899
1. Centre de Recherche en Neuroscience de Lyon, CNRS UMR5292 - INSERM U1028 - Université Claude Bernard Lyon 1
99100
2. Unité de Neuroscience, Information et Complexité, CNRS UPR 3293, Gif-sur-Yvette, France
@@ -136,9 +137,10 @@ and may not be the current affiliation of a contributor.
136137
39. Massachusetts General Hospital, Department of Molecular Biology
137138
40. Plexon Inc.
138139
41. Paris Brain Institute
139-
42. Istituto Italiano di Tecnologia (IIT), Italy
140+
42. Istituto Italiano di Tecnologia (IIT), Genoa, Italy
140141
43. University of Genoa, Italy
141142
44. AquiNeuro, SAS
143+
45. Maxwell Biosystems AG
142144

143145

144146

doc/source/governance.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ The current maintainers are:
3838
- Julia Sprenger (`@JuliaSprenger`_)
3939
- Michael Denker (`@mdenker`_)
4040
- Alessio Buccino (`@alejoe91`_)
41+
- Zach McKenzie (`@zm711`_)
4142

4243

4344
.. _`Neo maintainers team`: https://github.com/orgs/NeuralEnsemble/teams/neo-maintainers
@@ -47,3 +48,4 @@ The current maintainers are:
4748
.. _`@JuliaSprenger`: https://github.com/JuliaSprenger
4849
.. _`@mdenker`: https://github.com/mdenker
4950
.. _`@alejoe91`: https://github.com/alejoe91
51+
.. _`@zm711`: https://github.com/zm711

neo/core/spiketrainlist.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,10 @@ def __init__(self, items=None, parent=None):
113113
self._channel_id_array = None
114114
self._all_channel_ids = None
115115
self._spiketrain_metadata = {}
116-
if parent is not None and parent.__class__.__name__ != "Segment":
117-
raise AttributeError("The parent class must be a Segment")
116+
117+
from .segment import Segment # import here rather than at the top to avoid circular import
118+
if parent is not None and not isinstance(parent, Segment):
119+
raise AttributeError("If provided, the parent class must be a Segment")
118120
self.segment = parent
119121

120122
@property

neo/rawio/blackrockrawio.py

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -597,10 +597,17 @@ def _parse_header(self):
597597
for c in range(spike_channels.size):
598598
st_ann = seg_ann["spikes"][c]
599599
channel_id, unit_id = self.internal_unit_ids[c]
600-
unit_tag = {0: "unclassified", 255: "noise"}.get(unit_id, str(unit_id))
601600
st_ann["channel_id"] = channel_id
602601
st_ann["unit_id"] = unit_id
603-
st_ann["unit_tag"] = unit_tag
602+
if unit_id == 0:
603+
st_ann["unit_classification"] = "unclassified"
604+
elif 1 <= unit_id <= 16:
605+
st_ann["unit_classification"] = "sorted"
606+
elif unit_id == 255:
607+
st_ann["unit_classification"] = "noise"
608+
else: # 17-254 are reserved
609+
st_ann["unit_classification"] = "reserved"
610+
st_ann["unit_tag"] = st_ann["unit_classification"]
604611
st_ann["description"] = f"SpikeTrain channel_id: {channel_id}, unit_id: {unit_id}"
605612
st_ann["file_origin"] = self._filenames["nev"] + ".nev"
606613

@@ -1058,7 +1065,10 @@ def __read_nsx_data_variant_a(self, nsx_nb):
10581065
filename = ".".join([self._filenames["nsx"], f"ns{nsx_nb}"])
10591066

10601067
# get shape of data
1061-
shape = (int(self.__nsx_params["2.1"](nsx_nb)["nb_data_points"]), int(self.__nsx_basic_header[nsx_nb]["channel_count"]))
1068+
shape = (
1069+
int(self.__nsx_params["2.1"](nsx_nb)["nb_data_points"]),
1070+
int(self.__nsx_basic_header[nsx_nb]["channel_count"]),
1071+
)
10621072
offset = int(self.__nsx_params["2.1"](nsx_nb)["bytes_in_headers"])
10631073

10641074
# read nsx data
@@ -1251,7 +1261,19 @@ def __read_nev_data(self, nev_data_masks, nev_data_types):
12511261
# read all raw data packets and markers
12521262
dt0 = [("timestamp", ts_format), ("packet_id", "uint16"), ("value", f"S{data_size - header_skip}")]
12531263

1254-
raw_data = np.memmap(filename, offset=header_size, dtype=dt0, mode="r")
1264+
# expected number of data packets. We are not sure why, but it seems we can get partial data packets
1265+
# based on blackrock's own code this is okay so applying an int to round down is necessary to obtain the
1266+
# memory map of full packets and toss the partial packet.
1267+
# See reference: https://github.com/BlackrockNeurotech/Python-Utilities/blob/fa75aa671680306788e10d3d8dd625f9da4ea4f6/brpylib/brpylib.py#L580-L587
1268+
n_packets = int((self.__get_file_size(filename) - header_size) / data_size)
1269+
1270+
raw_data = np.memmap(
1271+
filename,
1272+
offset=header_size,
1273+
dtype=dt0,
1274+
shape=(n_packets,),
1275+
mode="r",
1276+
)
12551277

12561278
masks = self.__nev_data_masks(raw_data["packet_id"])
12571279
types = self.__nev_data_types(data_size)
@@ -1794,7 +1816,7 @@ def __nev_params(self, param_name):
17941816
hour=self.__nev_basic_header["hour"],
17951817
minute=self.__nev_basic_header["minute"],
17961818
second=self.__nev_basic_header["second"],
1797-
microsecond=self.__nev_basic_header["millisecond"],
1819+
microsecond=int(self.__nev_basic_header["millisecond"]) * 1000,
17981820
),
17991821
"max_res": self.__nev_basic_header["timestamp_resolution"],
18001822
"channel_ids": self.__nev_ext_header[b"NEUEVWAV"]["electrode_id"],

neo/rawio/intanrawio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
_signal_buffer_dtype,
3636
_spike_channel_dtype,
3737
_event_channel_dtype,
38-
)
38+
)
3939

4040

4141
class IntanRawIO(BaseRawIO):

neo/rawio/maxwellrawio.py

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -242,18 +242,20 @@ def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, strea
242242
raise (e)
243243

244244

245-
_hdf_maxwell_error = """Maxwell file format is based on HDF5.
246-
The internal compression requires a custom plugin!!!
247-
This is a big pain for the end user.
248-
You, as a end user, should ask Maxwell company to change this.
249-
Please visit this page and install the missing decompression libraries:
250-
https://share.mxwbio.com/d/4742248b2e674a85be97/
251-
Then, link the decompression library by setting the `HDF5_PLUGIN_PATH` to your
252-
installation location, e.g. via
245+
_hdf_maxwell_error = """The MaxWell file compression requires a custom plugin.
246+
You can use the auto_install_maxwell_hdf5_compression_plugin() function or
247+
(if it fails) install it manually:
248+
Download the missing decompression library:
249+
https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35
250+
Then, link the decompression library by setting the `HDF5_PLUGIN_PATH` to its location,
251+
e.g. directly in Python via:
253252
os.environ['HDF5_PLUGIN_PATH'] = '/path/to/custom/hdf5/plugin/'
254-
255-
Alternatively, you can use the auto_install_maxwell_hdf5_compression_plugin() below
256-
function that do it automagically.
253+
or in your shell via:
254+
export HDF5_PLUGIN_PATH=/path/to/custom/hdf5/plugin/
255+
You can also set the `HDF5_PLUGIN_PATH` environment variable in your shell
256+
configuration file (e.g. .bashrc, .bash_profile, .zshrc, etc.) to make it
257+
permanent.
258+
See https://mxw.bio/MxW_Doc_Installing_Decompression_Library_to_load_MaxLab_Live_Recordings for more details.
257259
"""
258260

259261

@@ -267,13 +269,17 @@ def auto_install_maxwell_hdf5_compression_plugin(hdf5_plugin_path=None, force_do
267269
hdf5_plugin_path.mkdir(exist_ok=True)
268270

269271
if platform.system() == "Linux":
270-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FLinux%2Flibcompression.so&dl=1"
272+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FLinux%2Flibcompression.so&dl=1"
271273
local_lib = hdf5_plugin_path / "libcompression.so"
272274
elif platform.system() == "Darwin":
273-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FMacOS%2Flibcompression.dylib&dl=1"
275+
if platform.machine() == "arm64":
276+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FMacOS%2FMac_arm64%2Flibcompression.dylib&dl=1"
277+
else:
278+
# Assuming x86_64 for MacOS
279+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FMacOS%2FMac_x86_64%2Flibcompression.dylib&dl=1"
274280
local_lib = hdf5_plugin_path / "libcompression.dylib"
275281
elif platform.system() == "Windows":
276-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FWindows%2Fcompression.dll&dl=1"
282+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FWindows%2Fcompression.dll&dl=1"
277283
local_lib = hdf5_plugin_path / "compression.dll"
278284

279285
if not force_download and local_lib.is_file():

neo/rawio/micromedrawio.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,6 @@ def __init__(self, filename=""):
5151
self.filename = filename
5252

5353
def _parse_header(self):
54-
5554
with open(self.filename, "rb") as fid:
5655
f = StructFile(fid)
5756

@@ -133,7 +132,11 @@ def _parse_header(self):
133132
sig_grounds = []
134133
for c in range(Num_Chan):
135134
zname2, pos, length = zones["LABCOD"]
136-
f.seek(pos + code[c] * 128 + 2, 0)
135+
# Force code[c] which is currently a uint16 (or u2) into a int to prevent integer overflow
136+
# for the following operation -- code[c] * 128 + 2.
137+
# An integer overflow below may have side - effects including but not limited
138+
# to having repeated channel names.
139+
f.seek(pos + int(code[c]) * 128 + 2, 0)
137140

138141
chan_name = f.read(6).strip(b"\x00").decode("ascii")
139142
ground = f.read(6).strip(b"\x00").decode("ascii")
@@ -274,7 +277,6 @@ def _event_count(self, block_index, seg_index, event_channel_index):
274277
return n
275278

276279
def _get_event_timestamps(self, block_index, seg_index, event_channel_index, t_start, t_stop):
277-
278280
raw_event = self._raw_events[event_channel_index][seg_index]
279281

280282
# important : all events timing are related to the first segment t_start

0 commit comments

Comments
 (0)