Skip to content

Commit 8bc3087

Browse files
committed
Merge remote-tracking branch 'refs/remotes/origin/add_one_box_reading' into add_one_box_reading
2 parents ceec396 + fb1cb2d commit 8bc3087

File tree

8 files changed

+140
-29
lines changed

8 files changed

+140
-29
lines changed

.github/workflows/io-test.yml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -114,13 +114,6 @@ jobs:
114114
# run: |
115115
# pip install --no-dependencies -e .
116116

117-
- name: Install wine
118-
run: |
119-
sudo rm -f /etc/apt/sources.list.d/microsoft-prod.list
120-
sudo dpkg --add-architecture i386
121-
sudo apt-get update -qq
122-
sudo apt-get install -yqq --allow-downgrades libc6:i386 libgcc-s1:i386 libstdc++6:i386 wine
123-
124117
- name: Pip list
125118
run: |
126119
pip list
Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
name: NeoPlexon2Test
2+
3+
on:
4+
workflow_dispatch:
5+
schedule:
6+
- cron: "0 12 * * 0" # weekly at noon UTC on Sundays
7+
8+
9+
jobs:
10+
build-and-test:
11+
name: Test on (${{ inputs.os }}) (${{ matrix.python-version}}) (${{ matrix.numpy-version }})
12+
runs-on: ${{ inputs.os }}
13+
strategy:
14+
fail-fast: true
15+
matrix:
16+
python-version: ['3.9', '3.12']
17+
numpy-version: ['1.26', '2.0']
18+
defaults:
19+
# by default run in bash mode (required for conda usage)
20+
run:
21+
shell: bash -l {0}
22+
steps:
23+
24+
- name: Checkout repository
25+
uses: actions/checkout@v4
26+
27+
- name: Get current year-month
28+
id: date
29+
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT
30+
31+
- name: Get ephy_testing_data current head hash
32+
# the key depend on the last commit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
33+
id: ephy_testing_data
34+
run: |
35+
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
36+
37+
- uses: actions/cache/restore@v4
38+
# Loading cache of ephys_testing_dataset
39+
id: cache-datasets
40+
with:
41+
path: ~/ephy_testing_data
42+
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data.outputs.dataset_hash }}
43+
restore-keys: ${{ runner.os }}-datasets-
44+
45+
- uses: conda-incubator/setup-miniconda@v3
46+
with:
47+
activate-environment: neo-test-env-${{ matrix.python-version }}
48+
python-version: "${{ matrix.python-version }}"
49+
50+
- name: Install testing dependencies
51+
# testing environment is only created from yml if no cache was found
52+
# restore-key hits should result in `cache-hit` == 'false'
53+
#if: steps.cache-conda-env.outputs.cache-hit != 'true'
54+
run: |
55+
conda install pip numpy=${{ matrix.numpy-version }} -c conda-forge
56+
# this command is for updating cache. We are resting removal.
57+
# conda env update --name neo-test-env-${{ matrix.python-version }} --file environment_testing.yml --prune
58+
59+
- name: Install git-annex
60+
# this is the trick from the spikeinterface repo for getting git-annex to work with datalad
61+
# see https://github.com/SpikeInterface/spikeinterface/pull/3877 for more info
62+
shell: bash
63+
run: |
64+
pip install datalad-installer
65+
datalad-installer --sudo ok git-annex --method datalad/packages
66+
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
67+
68+
- name: Configure git
69+
run: |
70+
git config --global user.email "neo_ci@fake_mail.com"
71+
git config --global user.name "neo CI"
72+
73+
- name: Python version
74+
run: |
75+
which python
76+
python --version
77+
78+
- name: Install neo including dependencies
79+
# installation with dependencies is only required if no cache was found
80+
# restore-key hits should result in `cache-hit` == 'false'
81+
# if: steps.cache-conda-env.outputs.cache-hit != 'true'
82+
run: |
83+
pip install --upgrade -e .
84+
pip install .[test]
85+
86+
87+
- name: Install wine
88+
run: |
89+
sudo rm -f /etc/apt/sources.list.d/microsoft-prod.list
90+
sudo dpkg --add-architecture i386
91+
sudo apt-get update -qq
92+
sudo apt-get install -yqq --allow-downgrades libc6:i386 libgcc-s1:i386 libstdc++6:i386 wine
93+
94+
- name: Pip list
95+
run: |
96+
pip list
97+
98+
- name: Conda list
99+
run: |
100+
conda list
101+
102+
- name: Test with pytest
103+
env:
104+
HDF5_PLUGIN_PATH: ${{ github.workspace }}/hdf5_local_plugin_path
105+
PLEXON2_TEST: true
106+
run: |
107+
# only neo.rawio and neo.io
108+
pytest --cov=neo neo/test/rawiotest
109+
pytest --cov=neo neo/test/iotest

doc/source/authors.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ and may not be the current affiliation of a contributor.
7171
* Daniel P. Crepeau [30]
7272
* Divyansh Gupta [31]
7373
* Nate Dolensek [3]
74-
* Philipp Hornauer [32]
74+
* Philipp Hornauer [32, 45]
7575
* Robert Wolff [42]
7676
* Jules Lebert [33]
7777
* Benjamin Heasly
@@ -140,6 +140,7 @@ and may not be the current affiliation of a contributor.
140140
42. Istituto Italiano di Tecnologia (IIT), Genoa, Italy
141141
43. University of Genoa, Italy
142142
44. AquiNeuro, SAS
143+
45. Maxwell Biosystems AG
143144

144145

145146

neo/rawio/blackrockrawio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -607,7 +607,7 @@ def _parse_header(self):
607607
st_ann["unit_classification"] = "noise"
608608
else: # 17-254 are reserved
609609
st_ann["unit_classification"] = "reserved"
610-
st_ann['unit_tag'] = st_ann['unit_classification']
610+
st_ann["unit_tag"] = st_ann["unit_classification"]
611611
st_ann["description"] = f"SpikeTrain channel_id: {channel_id}, unit_id: {unit_id}"
612612
st_ann["file_origin"] = self._filenames["nev"] + ".nev"
613613

neo/rawio/maxwellrawio.py

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -242,18 +242,20 @@ def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, strea
242242
raise (e)
243243

244244

245-
_hdf_maxwell_error = """Maxwell file format is based on HDF5.
246-
The internal compression requires a custom plugin!!!
247-
This is a big pain for the end user.
248-
You, as a end user, should ask Maxwell company to change this.
249-
Please visit this page and install the missing decompression libraries:
250-
https://share.mxwbio.com/d/4742248b2e674a85be97/
251-
Then, link the decompression library by setting the `HDF5_PLUGIN_PATH` to your
252-
installation location, e.g. via
245+
_hdf_maxwell_error = """The MaxWell file compression requires a custom plugin.
246+
You can use the auto_install_maxwell_hdf5_compression_plugin() function or
247+
(if it fails) install it manually:
248+
Download the missing decompression library:
249+
https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35
250+
Then, link the decompression library by setting the `HDF5_PLUGIN_PATH` to its location,
251+
e.g. directly in Python via:
253252
os.environ['HDF5_PLUGIN_PATH'] = '/path/to/custom/hdf5/plugin/'
254-
255-
Alternatively, you can use the auto_install_maxwell_hdf5_compression_plugin() below
256-
function that do it automagically.
253+
or in your shell via:
254+
export HDF5_PLUGIN_PATH=/path/to/custom/hdf5/plugin/
255+
You can also set the `HDF5_PLUGIN_PATH` environment variable in your shell
256+
configuration file (e.g. .bashrc, .bash_profile, .zshrc, etc.) to make it
257+
permanent.
258+
See https://mxw.bio/MxW_Doc_Installing_Decompression_Library_to_load_MaxLab_Live_Recordings for more details.
257259
"""
258260

259261

@@ -267,13 +269,17 @@ def auto_install_maxwell_hdf5_compression_plugin(hdf5_plugin_path=None, force_do
267269
hdf5_plugin_path.mkdir(exist_ok=True)
268270

269271
if platform.system() == "Linux":
270-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FLinux%2Flibcompression.so&dl=1"
272+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FLinux%2Flibcompression.so&dl=1"
271273
local_lib = hdf5_plugin_path / "libcompression.so"
272274
elif platform.system() == "Darwin":
273-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FMacOS%2Flibcompression.dylib&dl=1"
275+
if platform.machine() == "arm64":
276+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FMacOS%2FMac_arm64%2Flibcompression.dylib&dl=1"
277+
else:
278+
# Assuming x86_64 for MacOS
279+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FMacOS%2FMac_x86_64%2Flibcompression.dylib&dl=1"
274280
local_lib = hdf5_plugin_path / "libcompression.dylib"
275281
elif platform.system() == "Windows":
276-
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FWindows%2Fcompression.dll&dl=1"
282+
remote_lib = "https://share.mxwbio.com/d/7f2d1e98a1724a1b8b35/files/?p=%2FWindows%2Fcompression.dll&dl=1"
277283
local_lib = hdf5_plugin_path / "compression.dll"
278284

279285
if not force_download and local_lib.is_file():

neo/rawio/spikeglxrawio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -609,7 +609,7 @@ def extract_stream_info(meta_file, meta):
609609
if (
610610
"imDatPrb_type" not in meta
611611
or meta["imDatPrb_type"] == "0"
612-
or meta["imDatPrb_type"] in ("1015", "1016", "1022", "1030", "1031", "1032", "1100", "1121", "1300")
612+
or meta["imDatPrb_type"] in ("1015", "1016", "1022", "1030", "1031", "1032", "1100", "1121", "1123","1300")
613613
):
614614
# This work with NP 1.0 case with different metadata versions
615615
# https://github.com/billkarsh/SpikeGLX/blob/15ec8898e17829f9f08c226bf04f46281f106e5f/Markdown/Metadata_30.md

neo/test/iotest/test_plexon2io.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
"""
44

55
import unittest
6+
import os
67

78
from neo.io import Plexon2IO
89
from neo.test.iotest.common_io_test import BaseTestIO
@@ -17,8 +18,9 @@
1718
except (ImportError, TimeoutError):
1819
HAVE_PYPL2 = False
1920

21+
TEST_PLEXON2 = bool(os.getenv("PLEXON2_TEST"))
2022

21-
@unittest.skipUnless(HAVE_PYPL2, "requires pypl package and all its dependencies")
23+
@unittest.skipUnless(HAVE_PYPL2 and TEST_PLEXON2, "requires pypl package and all its dependencies")
2224
class TestPlexon2IO(BaseTestIO, unittest.TestCase):
2325
entities_to_download = TestPlexon2RawIO.entities_to_download
2426
entities_to_test = TestPlexon2RawIO.entities_to_test

neo/test/rawiotest/test_plexon2rawio.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
"""
2-
Tests of neo.rawio.mearecrawio
2+
Tests of neo.rawio.plexon2
33
44
"""
55

66
import unittest
7+
import os
78

89
from neo.rawio.plexon2rawio import Plexon2RawIO
9-
1010
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
11-
1211
from numpy.testing import assert_equal
1312

1413
try:
@@ -18,8 +17,9 @@
1817
except (ImportError, TimeoutError):
1918
HAVE_PYPL2 = False
2019

20+
TEST_PLEXON2 = bool(os.getenv("PLEXON2_TEST"))
2121

22-
@unittest.skipUnless(HAVE_PYPL2, "requires pypl package and all its dependencies")
22+
@unittest.skipUnless(HAVE_PYPL2 and TEST_PLEXON2, "requires pypl package and all its dependencies")
2323
class TestPlexon2RawIO(
2424
BaseTestRawIO,
2525
unittest.TestCase,

0 commit comments

Comments
 (0)