Skip to content

Commit 94525a1

Browse files
committed
add neoreadwriteerror + more asserts
1 parent 684f537 commit 94525a1

File tree

14 files changed

+103
-52
lines changed

14 files changed

+103
-52
lines changed

neo/core/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@
5151
from neo.core.view import ChannelView
5252
from neo.core.group import Group
5353

54+
from neo.core.baseneo import NeoReadWriteError
55+
5456
# Block should always be first in this list
5557
objectlist = [
5658
Block,

neo/core/baseneo.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,9 @@
3434
class MergeError(Exception):
3535
pass
3636

37+
class NeoReadWriteError(IOError):
38+
pass
39+
3740

3841
def _check_annotations(value):
3942
"""
@@ -68,7 +71,8 @@ def merge_annotation(a, b):
6871
For strings: concatenate with ';'
6972
Otherwise: fail if the annotations are not equal
7073
"""
71-
assert type(a) == type(b), f"type({a})) {type(a)} != type({b}) {type(b)}"
74+
if type(a) != type(b):
75+
raise TypeError(f"type({a}) {type(a)} != type({b}) {type(b)}")
7276
if isinstance(a, dict):
7377
return merge_annotations(a, b)
7478
elif isinstance(a, np.ndarray): # concatenate b to a
@@ -81,7 +85,8 @@ def merge_annotation(a, b):
8185
else:
8286
return a + ";" + b
8387
else:
84-
assert a == b, f"{a} != {b}"
88+
if a != b:
89+
raise ValueError(f"{a} != {b}")
8590
return a
8691

8792

@@ -131,7 +136,8 @@ def intersect_annotations(A, B):
131136

132137
for key in set(A.keys()) & set(B.keys()):
133138
v1, v2 = A[key], B[key]
134-
assert type(v1) == type(v2), f"type({v1}) {type(v1)} != type({v2}) {type(v2)}"
139+
if type(v1) != type(v2):
140+
raise TypeError(f"type({v1}) {type(v1)} != type({v2}) {type(v2)}")
135141
if isinstance(v1, dict) and v1 == v2:
136142
result[key] = deepcopy(v1)
137143
elif isinstance(v1, str) and v1 == v2:

neo/io/neomatlabio.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
PolygonRegionOfInterest,
4040
objectnames,
4141
class_by_name,
42+
NeoReadWriteError,
4243
)
4344
from neo.core.regionofinterest import RegionOfInterest
4445
from neo.core.baseneo import _container_name
@@ -262,7 +263,8 @@ def read_block(self, lazy=False):
262263
"""
263264
import scipy.io
264265

265-
assert not lazy, "Does not support lazy"
266+
if lazy:
267+
raise NeoReadWriteError(f"This IO does not support lazy reading")
266268

267269
d = scipy.io.loadmat(self.filename, struct_as_record=False, squeeze_me=True, mat_dtype=True)
268270
if "block" not in d:
@@ -420,7 +422,8 @@ def create_ob_from_struct(self, struct, classname):
420422
value = getattr(struct, attr[0])
421423
if i == 0:
422424
# this is a bit hacky, should really add an attribute _view_attr to ChannelView and RegionOfInterest
423-
assert isinstance(value, int) # object id
425+
if not isinstance(value, int): # object id
426+
raise TypeError(f"value must be int not of type {type(value)}")
424427
kwargs[attr[0]] = _Ref(identifier=value, target_class_name=struct.viewed_classname)
425428
else:
426429
if attr[1] == np.ndarray and isinstance(value, int):
@@ -531,13 +534,18 @@ def _resolve_references(self, bl):
531534
container = getattr(grp, container_name)
532535
for i, item in enumerate(container):
533536
if isinstance(item, _Ref):
534-
assert isinstance(item.identifier, (int, np.integer))
537+
if not isinstance(item.identifier, (int, np.integer)):
538+
raise TypeError(
539+
f"item.identifier must be either int or np.integer not of type {type(item.identifier)}"
540+
)
535541
# A reference to an object that already exists
536542
container[i] = obj_lookup[item.identifier]
537543
else:
538544
# ChannelView and RegionOfInterest
539-
assert item.is_view
540-
assert isinstance(item.obj, _Ref)
545+
if not item.is_view:
546+
raise TypeError(f"`item` must be a view")
547+
if not isinstance(item.obj, _Ref):
548+
raise TypeError(f"`item.obj` must be a {_Ref} and is of type {type(item.obj)}")
541549
item.obj = obj_lookup[item.obj.identifier]
542550

543551

neo/rawio/axonrawio.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@
5353
_spike_channel_dtype,
5454
_event_channel_dtype,
5555
)
56+
from neo.core import NeoReadWriteError
5657

5758
import numpy as np
5859

@@ -124,7 +125,8 @@ def _parse_header(self):
124125
elif version >= 2.0:
125126
mode = info["protocol"]["nOperationMode"]
126127

127-
assert mode in [1, 2, 3, 5], f"Mode {mode} is not supported"
128+
if mode not in [1, 2, 3, 5]:
129+
raise NeoReadWriteError(f"Mode {mode} is not currently supported in Neo")
128130
# event-driven variable-length mode (mode 1)
129131
# event-driven fixed-length mode (mode 2 or 5)
130132
# gap free mode (mode 3) can be in several episodes

neo/rawio/blackrockrawio.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,8 @@
7373
_event_channel_dtype,
7474
)
7575

76+
from neo.core import NeoReadWriteError
77+
7678

7779
class BlackrockRawIO(BaseRawIO):
7880
"""
@@ -330,15 +332,15 @@ def _parse_header(self):
330332
else:
331333
raise (ValueError("nsx_to_load is wrong"))
332334

333-
assert all(
334-
nsx_nb in self._avail_nsx for nsx_nb in self.nsx_to_load
335-
), "nsx_to_load do not match available nsx list"
335+
if not all(nsx_nb in self._avail_nsx for nsx_nb in self.nsx_to_load):
336+
raise FileNotFoundError(f"nsx_to_load does not match available nsx list")
336337

337338
# check that all files come from the same specification
338339
all_spec = [self.__nsx_spec[nsx_nb] for nsx_nb in self.nsx_to_load]
339340
if self._avail_files["nev"]:
340341
all_spec.append(self.__nev_spec)
341-
assert all(all_spec[0] == spec for spec in all_spec), "Files don't have the same internal version"
342+
if not all(all_spec[0] == spec for spec in all_spec):
343+
raise NeoReadWriteError("Files don't have the same internal version")
342344

343345
if len(self.nsx_to_load) > 0 and self.__nsx_spec[self.nsx_to_load[0]] == "2.1" and not self._avail_files["nev"]:
344346
pass
@@ -401,9 +403,8 @@ def _parse_header(self):
401403

402404
# check nb segment per nsx
403405
nb_segments_for_nsx = [len(self.nsx_datas[nsx_nb]) for nsx_nb in self.nsx_to_load]
404-
assert all(
405-
nb == nb_segments_for_nsx[0] for nb in nb_segments_for_nsx
406-
), "Segment nb not consistent across nsX files"
406+
if not all(nb == nb_segments_for_nsx[0] for nb in nb_segments_for_nsx):
407+
raise NeoReadWriteError("Segment nb not consistent across nsX files")
407408
self._nb_segment = nb_segments_for_nsx[0]
408409

409410
self.__delete_empty_segments()
@@ -1263,11 +1264,12 @@ def __match_nsx_and_nev_segment_ids(self, nsx_nb):
12631264
ev_ids[mask_after_seg] += 1
12641265

12651266
# consistency check: same number of segments for nsx and nev data
1266-
assert nb_possible_nev_segments == len(nonempty_nsx_segments), (
1267-
f"Inconsistent ns{nsx_nb} and nev file. {nb_possible_nev_segments} "
1268-
f"segments present in .nev file, but {len(nonempty_nsx_segments)} in "
1269-
"ns{nsx_nb} file."
1270-
)
1267+
if nb_possible_nev_segments != len(nonempty_nsx_segments):
1268+
raise NeoReadWriteError(
1269+
f"Inconsistent ns{nsx_nb} and nev file. {nb_possible_nev_segments} "
1270+
f"segments present in .nev file, but {len(nonempty_nsx_segments)} in "
1271+
f"ns{nsx_nb} file."
1272+
)
12711273

12721274
new_nev_segment_id_mapping = dict(zip(range(nb_possible_nev_segments), sorted(list(nonempty_nsx_segments))))
12731275

neo/rawio/intanrawio.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
from packaging.version import Version as V
2525

2626
import numpy as np
27+
from neo.core import NeoReadWriteError
2728

2829
from .baserawio import (
2930
BaseRawIO,
@@ -166,9 +167,10 @@ def _parse_header(self):
166167
time_stream_index = max(self._raw_data.keys())
167168
timestamp = self._raw_data[time_stream_index][0]
168169

169-
assert np.all(np.diff(timestamp) == 1), (
170-
"Timestamp have gaps, this could be due " "to a corrupted file or an inappropriate file merge"
171-
)
170+
if not np.all(np.diff(timestamp) == 1):
171+
raise NeoReadWriteError(
172+
f"Timestamp have gaps, this could be due to a corrupted file or an inappropriate file merge"
173+
)
172174

173175
# signals
174176
signal_channels = []
@@ -448,7 +450,8 @@ def read_rhs(filename):
448450
if bool(group_info["signal_group_enabled"]):
449451
for c in range(group_info["channel_num"]):
450452
chan_info = read_variable_header(f, rhs_signal_channel_header)
451-
assert chan_info["signal_type"] not in (1, 2)
453+
if chan_info["signal_type"] in (1, 2):
454+
raise NeoReadWriteError("signal_type of 1 or 2 is not yet implemented in Neo")
452455
if bool(chan_info["channel_enabled"]):
453456
channels_by_type[chan_info["signal_type"]].append(chan_info)
454457

neo/rawio/maxwellrawio.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,8 @@
3131
_event_channel_dtype,
3232
)
3333

34+
from neo.core import NeoReadWriteError
35+
3436
import numpy as np
3537

3638

@@ -94,7 +96,8 @@ def _parse_header(self):
9496
f"Possible rec_names: {unique_rec_names}"
9597
)
9698
else:
97-
assert self.rec_name in unique_rec_names, f"rec_name {self.rec_name} not found"
99+
if self.rec_name not in unique_rec_names:
100+
raise NeoReadWriteError(f"rec_name {self.rec_name} not found")
98101
else:
99102
self.rec_name = unique_rec_names[0]
100103
# add streams that contain the selected rec_name

neo/rawio/openephysrawio.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121
_event_channel_dtype,
2222
)
2323

24+
from neo.core import NeoReadWriteError
25+
2426

2527
RECORD_SIZE = 1024
2628
HEADER_SIZE = 1024
@@ -143,9 +145,8 @@ def _parse_header(self):
143145

144146
if channel_has_gaps:
145147
# protect against strange timestamp block like in file 'OpenEphys_SampleData_3' CH32
146-
assert (
147-
np.median(diff) == RECORD_SIZE
148-
), f"This file has a non valid data block size for channel {chan_id}, this case cannot be handled"
148+
if not np.median(diff) == RECORD_SIZE:
149+
raise NeoReadWriteError(f"This file has a non valid data block size for channel {chan_id}, this case cannot be handled")
149150

150151
if seg_index == 0:
151152
# add in channel list
@@ -195,7 +196,8 @@ def _parse_header(self):
195196
last = all_last_timestamps[0]
196197

197198
# check unique sampling rate
198-
assert all(all_samplerate[0] == e for e in all_samplerate), "Not all signals have the same sample rate"
199+
if not all(all_samplerate[0] == e for e in all_samplerate):
200+
raise NeoReadWriteError("Not all signals have the same sample rate")
199201

200202
self._sig_length[seg_index] = last - first
201203
self._sig_timestamp0[seg_index] = first
@@ -250,7 +252,8 @@ def _parse_header(self):
250252
if self._spike_sampling_rate is None:
251253
self._spike_sampling_rate = spike_info["sampleRate"]
252254
else:
253-
assert self._spike_sampling_rate == spike_info["sampleRate"], "mismatch in spike sampling rate"
255+
if self._spike_sampling_rate != spike_info["sampleRate"]:
256+
raise ValueError("There is a mismatch in spike sampling rate")
254257

255258
# scan all to detect several all unique(sorted_ids)
256259
all_sorted_ids = []

neo/rawio/phyrawio.py

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -197,11 +197,13 @@ def _parse_header(self):
197197
spiketrain_an["pc_feature_ind"] = self._pc_feature_ind[unique_templates]
198198

199199
def _segment_t_start(self, block_index, seg_index):
200-
assert block_index == 0
200+
if block_index != 0:
201+
raise ValueError("`block_index` must be 0")
201202
return self._t_start
202203

203204
def _segment_t_stop(self, block_index, seg_index):
204-
assert block_index == 0
205+
if block_index != 0:
206+
raise ValueError("`block_index` must be 0")
205207
return self._t_stop
206208

207209
def _get_signal_size(self, block_index, seg_index, channel_indexes=None):
@@ -214,16 +216,19 @@ def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, chann
214216
return None
215217

216218
def _spike_count(self, block_index, seg_index, spike_channel_index):
217-
assert block_index == 0
219+
if block_index != 0:
220+
raise ValueError("`block_index` must be 0")
218221
spikes = self._spike_clusters
219222
unit_label = self.unit_labels[spike_channel_index]
220223
mask = spikes == unit_label
221224
nb_spikes = np.sum(mask)
222225
return nb_spikes
223226

224227
def _get_spike_timestamps(self, block_index, seg_index, spike_channel_index, t_start, t_stop):
225-
assert block_index == 0
226-
assert seg_index == 0
228+
if block_index != 0:
229+
raise ValueError("`block_index` must be 0")
230+
if seg_index != 0:
231+
raise ValueError("`seg_index` must be 0")
227232

228233
unit_label = self.unit_labels[spike_channel_index]
229234
mask = self._spike_clusters == unit_label

neo/rawio/rawbinarysignalrawio.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,11 +143,13 @@ def _segment_t_stop(self, block_index, seg_index):
143143
return t_stop
144144

145145
def _get_signal_size(self, block_index, seg_index, stream_index):
146-
assert stream_index == 0
146+
if stream_index != 0:
147+
raise ValueError("stream_index must be 0")
147148
return self._raw_signals.shape[0]
148149

149150
def _get_signal_t_start(self, block_index, seg_index, stream_index):
150-
assert stream_index == 0
151+
if stream_index != 0:
152+
raise ValueError("stream_index must be 0")
151153
return 0.0
152154

153155
def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, stream_index, channel_indexes):

0 commit comments

Comments
 (0)