Skip to content

Commit 33e8bfb

Browse files
committed
Merge with master
2 parents eb25fec + c7480d9 commit 33e8bfb

File tree

4 files changed

+105
-24
lines changed

4 files changed

+105
-24
lines changed
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
name: Release to Test PyPI
2+
3+
on:
4+
push:
5+
tags:
6+
- '*'
7+
jobs:
8+
release:
9+
environment: TEST_PYPI_API_TOKEN
10+
runs-on: ubuntu-latest
11+
12+
steps:
13+
- uses: actions/checkout@v4
14+
- name: Set up Python 3.10
15+
uses: actions/setup-python@v4
16+
with:
17+
python-version: "3.10"
18+
- name: Install Tools
19+
run: |
20+
python -m pip install --upgrade pip
21+
pip install setuptools wheel twine build
22+
pip install .
23+
- name: Test version/tag correspondence
24+
id: version-check
25+
run: |
26+
neo_version=$(python -c "import neo; print(neo.__version__)")
27+
echo $neo_version
28+
TAG=${{ github.event.release.tag_name }}
29+
echo $TAG
30+
if [[ $TAG == $neo_version ]]; then
31+
echo "VERSION_TAG_MATCH=true" >> $GITHUB_OUTPUT
32+
echo "Version matches tag, proceeding with release to Test PyPI"
33+
else
34+
echo "VERSION_TAG_MATCH=false" >> $GITHUB_OUTPUT
35+
echo "Version does not match tag! Fix this before proceeding."
36+
exit 1
37+
fi
38+
- name: Package and Upload
39+
env:
40+
STACKMANAGER_VERSION: ${{ github.event.release.tag_name }}
41+
TWINE_USERNAME: __token__
42+
TWINE_PASSWORD: ${{ secrets.TEST_PYPI_API_TOKEN }}
43+
if: ${{ steps.version-check.outputs.VERSION_TAG_MATCH == 'true' }}
44+
run: |
45+
python -m build --sdist --wheel
46+
twine upload --repository testpypi dist/*
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
name: Release to PyPI
2+
3+
on:
4+
workflow_dispatch:
5+
6+
jobs:
7+
release:
8+
environment: PYPI_API_TOKEN
9+
runs-on: ubuntu-latest
10+
11+
steps:
12+
- uses: actions/checkout@v4
13+
- name: Set up Python 3.10
14+
uses: actions/setup-python@v4
15+
with:
16+
python-version: "3.10"
17+
- name: Install Tools
18+
run: |
19+
python -m pip install --upgrade pip
20+
pip install setuptools wheel twine build
21+
- name: Package and Upload
22+
env:
23+
STACKMANAGER_VERSION: ${{ github.event.release.tag_name }}
24+
TWINE_USERNAME: __token__
25+
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
26+
run: |
27+
python -m build --sdist --wheel
28+
twine upload dist/*

neo/rawio/biocamrawio.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,14 +148,14 @@ def open_biocam_file_header(filename):
148148
if format_100:
149149
if signal_inv == 1:
150150
read_function = readHDF5t_100
151-
elif signal_inv == 1:
151+
elif signal_inv == -1:
152152
read_function = readHDF5t_100_i
153153
else:
154154
raise Exception("Unknown signal inversion")
155155
else:
156156
if signal_inv == 1:
157157
read_function = readHDF5t_101
158-
elif signal_inv == 1:
158+
elif signal_inv == -1:
159159
read_function = readHDF5t_101_i
160160
else:
161161
raise Exception("Unknown signal inversion")

neo/rawio/openephysrawio.py

Lines changed: 29 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -171,27 +171,31 @@ def _parse_header(self):
171171
self._sig_length[seg_index] = all_sigs_length[0]
172172
self._sig_timestamp0[seg_index] = all_first_timestamps[0]
173173

174-
signal_channels = np.array(signal_channels, dtype=_signal_channel_dtype)
175-
self._sig_sampling_rate = signal_channels['sampling_rate'][0] # unique for channel
176-
177-
# split channels in stream depending the name CHxxx ADCxxx
178-
chan_stream_ids = [name[:2] if name.startswith('CH') else name[:3]
179-
for name in signal_channels['name']]
180-
signal_channels['stream_id'] = chan_stream_ids
181-
182-
# and create streams channels (keep natural order 'CH' first)
183-
stream_ids, order = np.unique(chan_stream_ids, return_index=True)
184-
stream_ids = stream_ids[np.argsort(order)]
185-
signal_streams = [(f'Signals {stream_id}', f'{stream_id}') for stream_id in stream_ids]
186-
signal_streams = np.array(signal_streams, dtype=_signal_stream_dtype)
187-
174+
if len(signal_channels) > 0:
175+
signal_channels = np.array(signal_channels, dtype=_signal_channel_dtype)
176+
self._sig_sampling_rate = signal_channels['sampling_rate'][0] # unique for channel
177+
178+
# split channels in stream depending the name CHxxx ADCxxx
179+
chan_stream_ids = [name[:2] if name.startswith('CH') else name[:3]
180+
for name in signal_channels['name']]
181+
signal_channels['stream_id'] = chan_stream_ids
182+
183+
# and create streams channels (keep natural order 'CH' first)
184+
stream_ids, order = np.unique(chan_stream_ids, return_index=True)
185+
stream_ids = stream_ids[np.argsort(order)]
186+
signal_streams = [(f'Signals {stream_id}', f'{stream_id}') for stream_id in stream_ids]
187+
signal_streams = np.array(signal_streams, dtype=_signal_stream_dtype)
188+
else:
189+
signal_streams = np.array([])
188190
# scan for spikes files
189191
spike_channels = []
190192

191193
if len(info['spikes']) > 0:
192-
194+
self._first_spk_timestamps = []
195+
self._last_spk_timestamps = []
193196
self._spikes_memmap = {}
194-
for seg_index, oe_index in enumerate(oe_indices):
197+
oe_indices_spk = sorted(list(info['spikes'].keys()))
198+
for seg_index, oe_index in enumerate(oe_indices_spk):
195199
self._spikes_memmap[seg_index] = {}
196200
for spike_filename in info['spikes'][oe_index]:
197201
fullname = os.path.join(self.dirname, spike_filename)
@@ -207,6 +211,9 @@ def _parse_header(self):
207211
dtype=spikes_dtype)
208212
self._spikes_memmap[seg_index][name] = data_spike
209213

214+
self._first_spk_timestamps.append(data_spike[0]['timestamp'])
215+
self._last_spk_timestamps.append(data_spike[-1]['timestamp'])
216+
210217
# In each file 'sorted_id' indicate the number of cluster so number of units
211218
# so need to scan file for all segment to get units
212219
self._spike_sampling_rate = None
@@ -335,9 +342,9 @@ def _get_spike_slice(self, seg_index, unit_index, t_start, t_stop):
335342
data_spike = self._spikes_memmap[seg_index][name]
336343

337344
if t_start is None:
338-
t_start = self._segment_t_start(0, seg_index)
345+
t_start = self._first_spk_timestamps[seg_index]
339346
if t_stop is None:
340-
t_stop = self._segment_t_stop(0, seg_index)
347+
t_stop = self._last_spk_timestamps[seg_index]
341348
ts0 = int(t_start * self._spike_sampling_rate)
342349
ts1 = int(t_stop * self._spike_sampling_rate)
343350

@@ -489,11 +496,11 @@ def explore_folder(dirname):
489496
if (seg_index + 1) > info['nb_segment']:
490497
info['nb_segment'] += 1
491498
elif filename.endswith('.spikes'):
492-
s = filename.replace('.spikes', '').split('_')
493-
if len(s) == 1:
494-
seg_index = 0
499+
s = re.findall(r"(_\d+)$", filename.replace('.spikes', ''))
500+
if s:
501+
seg_index = int(s[0][1:]) - 1
495502
else:
496-
seg_index = int(s[1]) - 1
503+
seg_index = 0
497504
if seg_index not in info['spikes'].keys():
498505
info['spikes'][seg_index] = []
499506
info['spikes'][seg_index].append(filename)

0 commit comments

Comments
 (0)