Skip to content

Commit 1326298

Browse files
committed
assert -> errors
1 parent 94525a1 commit 1326298

22 files changed

+249
-117
lines changed

neo/io/asciispiketrainio.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
import quantities as pq
1515

1616
from neo.io.baseio import BaseIO
17-
from neo.core import Segment, SpikeTrain
17+
from neo.core import Segment, SpikeTrain, NeoReadWriteError
1818

1919

2020
class AsciiSpikeTrainIO(BaseIO):
@@ -91,7 +91,8 @@ def read_segment(
9191
t_start : time start of all spiketrain 0 by default
9292
unit : unit of spike times, can be a str or directly a Quantities
9393
"""
94-
assert not lazy, "Do not support lazy"
94+
if lazy:
95+
raise NeoReadWriteError("This IO does not support lazy reading")
9596

9697
unit = pq.Quantity(1, unit)
9798

neo/io/brainwaref32io.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
import quantities as pq
3333

3434
# needed core neo modules
35-
from neo.core import Block, Group, Segment, SpikeTrain
35+
from neo.core import Block, Group, Segment, SpikeTrain, NeoReadWriteError
3636

3737
# need to subclass BaseIO
3838
from neo.io.baseio import BaseIO
@@ -127,7 +127,8 @@ def read_block(self, lazy=False, **kargs):
127127
Reads a block from the simple spike data file "fname" generated
128128
with BrainWare
129129
"""
130-
assert not lazy, "Do not support lazy"
130+
if lazy:
131+
raise NeoReadWriteError("This IO does not support lazy reading")
131132

132133
# there are no keyargs implemented to so far. If someone tries to pass
133134
# them they are expecting them to do something or making a mistake,

neo/io/elphyio.py

Lines changed: 46 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@
8787
from neo.io.baseio import BaseIO
8888

8989
# to import from core
90-
from neo.core import Block, Segment, AnalogSignal, Event, SpikeTrain
90+
from neo.core import Block, Segment, AnalogSignal, Event, SpikeTrain, NeoReadWriteError
9191

9292

9393
# --------------------------------------------------------
@@ -328,7 +328,8 @@ def __init__(
328328
super().__init__(layout, episode, number, x_unit, n_events, name)
329329
self.wf_samples = wf_samples
330330
self.wf_sampling_frequency = wf_sampling_frequency
331-
assert wf_sampling_frequency, "bad sampling frequency"
331+
if not wf_sampling_frequency:
332+
raise ValueError("bad sampling frequency")
332333
self.wf_sampling_period = 1.0 / wf_sampling_frequency
333334
self.wf_units = [unit_x_wf, unit_y_wf]
334335
self.t_start = t_start
@@ -532,7 +533,8 @@ def detect_protocol_from_name(self, path):
532533
match = re.search(pattern, path)
533534
if hasattr(match, "end"):
534535
code = codes.get(path[match.end() - 1].lower(), None)
535-
assert code != "m", "multistim file detected"
536+
if code == "m":
537+
raise ValueError("multistim file detected")
536538
return code
537539
elif "spt" in filename.lower():
538540
return "spontaneousactivity"
@@ -897,7 +899,8 @@ def __init__(self, layout):
897899

898900
# extract episode properties
899901
n_channels = read_from_char(fileobj, "B")
900-
assert not ((n_channels < 1) or (n_channels > 16)), "bad number of channels"
902+
if (n_channels < 1) or (n_channels > 16):
903+
raise ValueError(f"`n_channels` must be between 1 and 15. It is currently {n_channels}")
901904
nbpt = read_from_char(fileobj, "h")
902905
l_xu, x_unit = struct.unpack("<B3s", fileobj.read(4))
903906
if hasattr(x_unit, "decode"):
@@ -1001,7 +1004,8 @@ def __init__(self, layout):
10011004

10021005
n_ep = read_from_char(fileobj, "h")
10031006
tpData = read_from_char(fileobj, "h")
1004-
assert tpData in [3, 2, 1, 0], "bad sample size"
1007+
if tpData not in [3, 2, 1, 0]:
1008+
raise ValueError(f"`tpData` must be 3, 2, 1, or 0, but is {tpData}")
10051009
no_analog_data = read_from_char(fileobj, "?")
10061010

10071011
self.n_ep = n_ep
@@ -1784,8 +1788,10 @@ def get_signal(self, episode, channel):
17841788
Return the signal description relative
17851789
to the specified episode and channel.
17861790
"""
1787-
assert episode in range(1, self.n_episodes + 1)
1788-
assert channel in range(1, self.n_channels(episode) + 1)
1791+
if episode not in range(1, self.n_episodes + 1):
1792+
raise ValueError(f"`episode must be in {range(1, self.n_episodes + 1)}, but is {episode}")
1793+
if channel not in range(1, self.n_channels(episode) + 1):
1794+
raise ValueError(f"`channel` must be in {range(1, self.n_channels(episode)+1)} but is {channel}")
17891795
t_start = 0
17901796
sampling_period = self.sampling_period(episode, channel)
17911797
t_stop = sampling_period * self.n_samples(episode, channel)
@@ -1863,7 +1869,8 @@ def reshape_bytes(self, databytes, reshape, datatypes, order="<"):
18631869
"""
18641870
Reshape a numpy array containing a set of databytes.
18651871
"""
1866-
assert datatypes and len(datatypes) == len(reshape), "datatypes are not well defined"
1872+
if not datatypes and len(datatypes) == len(reshape):
1873+
raise NeoReadWriteError("The `datatypes` are not well-defined")
18671874

18681875
l_bytes = len(databytes)
18691876

@@ -2080,7 +2087,8 @@ def get_episode_blocks(self):
20802087

20812088
def set_info_block(self):
20822089
i_blks = self.get_blocks_of_type("USER INFO")
2083-
assert len(i_blks) < 2, "too many info blocks"
2090+
if len(i_blks) >= 2:
2091+
raise ValueError(f"There are too many info blocks: {i_blks}")
20842092
if len(i_blks):
20852093
self.info_block = i_blks[0]
20862094

@@ -2205,7 +2213,8 @@ def set_episode_blocks(self):
22052213

22062214
def set_info_block(self):
22072215
i_blks = self.get_blocks_of_type("USER INFO")
2208-
assert len(i_blks) < 2, "too many info blocks"
2216+
if len(i_blks) >= 2:
2217+
raise ValueError(f"There are too many info blocks: {i_blks}")
22092218
if len(i_blks):
22102219
self.info_block = i_blks[0]
22112220

@@ -2243,7 +2252,8 @@ def sample_type(self, ep, ch):
22432252

22442253
def sample_size(self, ep, ch):
22452254
size = super().sample_size(ep, ch)
2246-
assert size == 2, "sample size is always 2 bytes for DAC2/GS/2000 format"
2255+
if size != 2:
2256+
raise ValueError(f"sample size is always 2 bytes for DAC2/GS/2000 format, but size is calculated as {size}")
22472257
return size
22482258

22492259
def sampling_period(self, ep, ch):
@@ -2297,10 +2307,12 @@ def file_duration(self):
22972307
return self.main_block.dX * self.n_samples
22982308

22992309
def get_tag(self, episode, tag_channel):
2300-
assert episode in range(1, self.n_episodes + 1)
2310+
if episode not in range(1, self.n_episodes + 1):
2311+
raise ValueError(f"`episode` must be within {range(1 + self.n_episodes + 1)} and is {episode}")
23012312
# there are none or 2 tag channels
23022313
if self.tag_mode(episode) == 1:
2303-
assert tag_channel in range(1, 3), "DAC2/GS/2000 format support only 2 tag channels"
2314+
if tag_channel not in range(1, 3):
2315+
raise ValueError("DAC2/GS/2000 format support only 2 tag channels")
23042316
block = self.episode_block(episode)
23052317
t_stop = self.main_block.n_samples * block.dX
23062318
return ElphyTag(self, episode, tag_channel, block.x_unit, 1.0 / block.dX, 0, t_stop)
@@ -2384,7 +2396,8 @@ def set_episode_blocks(self):
23842396
def set_info_block(self):
23852397
# in fact the file info are contained into a single sub-block with an USR identifier
23862398
i_blks = self.get_blocks_of_type("B_Finfo")
2387-
assert len(i_blks) < 2, "too many info blocks"
2399+
if len(i_blks) >= 2:
2400+
raise ValueError(f"There are too many info blocks: {i_blks}")
23882401
if len(i_blks):
23892402
i_blk = i_blks[0]
23902403
sub_blocks = i_blk.sub_blocks
@@ -2658,7 +2671,8 @@ def get_tag(self, episode, tag_channel):
26582671
Return a :class:`ElphyTag` which is a
26592672
descriptor of the specified event channel.
26602673
"""
2661-
assert episode in range(1, self.n_episodes + 1)
2674+
if episode not in range(1, self.n_episodes + 1):
2675+
raise ValueError(f"`episode` must be in {range(1, self.n_episodes + 1)} and is {episode}")
26622676

26632677
# there are none, 2 or 16 tag
26642678
# channels depending on tag_mode
@@ -2669,11 +2683,11 @@ def get_tag(self, episode, tag_channel):
26692683

26702684
# verify the validity of the tag channel
26712685
if tag_mode == 1:
2672-
assert tag_channel in range(1, 3), "Elphy format support only 2 tag channels for tag_mode == 1"
2673-
elif tag_mode == 2:
2674-
assert tag_channel in range(1, 17), "Elphy format support only 16 tag channels for tag_mode == 2"
2675-
elif tag_mode == 3:
2676-
assert tag_channel in range(1, 17), "Elphy format support only 16 tag channels for tag_mode == 3"
2686+
if tag_channel not in range(1, 3):
2687+
raise ValueError("Elphy format support only 2 tag channels for tag_mode == 1")
2688+
elif tag_mode == 2 or tag_mode ==3:
2689+
if tag_channel not in range(1, 17):
2690+
raise ValueError("Elphy format support only 16 tag channels for tag_mode == 2 or tag_mode == 3")
26772691

26782692
smp_period = block.ep_block.dX
26792693
smp_freq = 1.0 / smp_period
@@ -2707,12 +2721,15 @@ def get_event(self, ep, ch, marked_ks):
27072721
Return a :class:`ElphyEvent` which is a
27082722
descriptor of the specified event channel.
27092723
"""
2710-
assert ep in range(1, self.n_episodes + 1)
2711-
assert ch in range(1, self.n_channels + 1)
2724+
if ep not in range(1, self.n_episodes + 1):
2725+
raise ValueError(f"`ep` must be in {range(1, self.n_episodes + 1)}, but is {ep}")
2726+
if ch not in range(1, self.n_channels + 1):
2727+
raise ValueError(f"`ch` must be in {range(1, self.n_channels + 1)}, but is {ch}")
27122728

27132729
# find the event channel number
27142730
evt_channel = np.where(marked_ks == -1)[0][0]
2715-
assert evt_channel in range(1, self.n_events(ep) + 1)
2731+
if evt_channel not in range(1, self.n_events(ep) + 1):
2732+
raise ValueError(f"`evt_channel` must be in {range(1, self.n_events(ep) + 1)}, but is {evt_channel}")
27162733

27172734
block = self.episode_block(ep)
27182735
ep_blocks = self.get_blocks_stored_in_episode(ep)
@@ -2792,8 +2809,10 @@ def get_spiketrain(self, episode, electrode_id):
27922809
Return a :class:`Spike` which is a
27932810
descriptor of the specified spike channel.
27942811
"""
2795-
assert episode in range(1, self.n_episodes + 1)
2796-
assert electrode_id in range(1, self.n_spiketrains(episode) + 1)
2812+
if episode not in range(1, self.n_episodes + 1):
2813+
raise ValueError(f"`episode` must be in {range(1, self.n_episodes + 1)}, but is {episode}")
2814+
if electrode_id not in range(1, self.n_spiketrains(episode) + 1):
2815+
raise ValueError(f"`eletrodee_id` must be in {range(1, self.n_spiketrains(episode)+1)}, but is {electrode_id}")
27972816
# get some properties stored in the episode sub-block
27982817
block = self.episode_block(episode)
27992818
x_unit = block.ep_block.x_unit
@@ -3748,7 +3767,8 @@ def read_block(
37483767
lazy : bool
37493768
Postpone actual reading of the file.
37503769
"""
3751-
assert not lazy, "Do not support lazy"
3770+
if lazy:
3771+
raise NeoReadWriteError("This IO does not support lazy reading")
37523772

37533773
# basic
37543774
block = Block(name=None)

neo/io/nixio.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,8 @@ def _nix_to_neo_block(self, nix_block):
313313
neo_block.segments.append(newseg)
314314
elif grp.type == "neo.group":
315315
newgrp, parent_name = self._nix_to_neo_group(grp)
316-
assert parent_name is None
316+
if parent_name is not None:
317+
raise ValueError(f"`parent_name` must be None and is {parent_name}")
317318
neo_block.groups.append(newgrp)
318319
elif grp.type == "neo.subgroup":
319320
newgrp, parent_name = self._nix_to_neo_group(grp)

neo/io/nwbio.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,10 @@ def get_class(module, name):
8686
import pynwb
8787

8888
module_path = module.split(".")
89-
assert len(module_path) == 2 # todo: handle the general case where this isn't 2
89+
if len(module_path) != 2:
90+
raise ValueError(
91+
f"`module_path` must be 2, not {module_path}"
92+
) # todo: handle the general case where this isn't 2
9093
return getattr(getattr(pynwb, module_path[1]), name)
9194

9295

@@ -139,8 +142,10 @@ def _decompose_unit(unit):
139142
>>> _decompose_unit(pq.mV)
140143
('volt', 0.001)
141144
"""
142-
assert isinstance(unit, pq.quantity.Quantity)
143-
assert unit.magnitude == 1
145+
if not isinstance(unit, pq.quantity.Quantity):
146+
raise TypeError(f"`unit` must be of type pq.quantity.Quantity and not type {type(unit)}")
147+
if unit.magnitude != 1:
148+
raise ValueError(f"The magnitude of the `unit` must be 1 not {unit.magnitude}")
144149
conversion = 1.0
145150

146151
def _decompose(unit):
@@ -240,7 +245,8 @@ def read_all_blocks(self, lazy=False, **kwargs):
240245
"""
241246
import pynwb
242247

243-
assert self.nwb_file_mode in ("r",)
248+
if self.nwb_file_mode not in ("r",):
249+
raise ValueError("`mode` at init needs to be set to 'r' to read files")
244250
self._io_nwb = pynwb.NWBHDF5IO(
245251
self.filename, mode=self.nwb_file_mode, load_namespaces=True
246252
) # Open a file with NWBHDF5IO
@@ -320,7 +326,8 @@ def _read_epochs_group(self, lazy):
320326
epoch = epoch.load()
321327
segment_name = np.unique(segment_names[index])
322328
block_name = np.unique(block_names[index])
323-
assert segment_name.size == block_name.size == 1
329+
if segment_name.size != block_name.size == 1:
330+
raise ValueError("the `segment_name` and the `block_name` should be the same")
324331
segment = self._get_segment(block_name[0], segment_name[0])
325332
segment.epochs.append(epoch)
326333
else:
@@ -449,7 +456,8 @@ def write_all_blocks(self, blocks, validate=True, **kwargs):
449456
for i, block in enumerate(blocks):
450457
self._write_block(block)
451458

452-
assert self.nwb_file_mode in ("w",) # possibly expand to 'a'ppend later
459+
if self.nwb_file_mode not in ("w",):
460+
raise ValueError("mode must be 'w' in order to write files") # possibly expand to 'a'ppend later
453461
if self.nwb_file_mode == "w" and os.path.exists(self.filename):
454462
os.remove(self.filename)
455463
io_nwb = pynwb.NWBHDF5IO(self.filename, mode=self.nwb_file_mode)
@@ -483,7 +491,8 @@ def _write_block(self, block):
483491
if not block.name:
484492
block.name = f"block{self.blocks_written}"
485493
for i, segment in enumerate(block.segments):
486-
assert segment.block is block
494+
if segment.block is not block:
495+
raise TypeError(f"segment.block must be block it is {segment.block}")
487496
if not segment.name:
488497
segment.name = f"{block.name} : segment{i}"
489498
self._write_segment(self._nwbfile, segment, electrodes)
@@ -513,7 +522,8 @@ def _write_electrodes(self, nwbfile, block):
513522
def _write_segment(self, nwbfile, segment, electrodes):
514523
# maybe use NWB trials to store Segment metadata?
515524
for i, signal in enumerate(chain(segment.analogsignals, segment.irregularlysampledsignals)):
516-
assert signal.segment is segment
525+
if signal.segment is not segment:
526+
raise TypeError(f"signal.segment must be segment and is {signal.segment}")
517527
if hasattr(signal, "name"):
518528
signal.name = f"{segment.name} {signal.name} {i}"
519529
logging.warning(f"Warning signal name exists. New name: {signal.name}")
@@ -522,13 +532,15 @@ def _write_segment(self, nwbfile, segment, electrodes):
522532
self._write_signal(self._nwbfile, signal, electrodes)
523533

524534
for i, train in enumerate(segment.spiketrains):
525-
assert train.segment is segment
535+
if train.segment is not segment:
536+
raise TypeError(f"train.segment must be segment and is {train.segment}")
526537
if not train.name:
527538
train.name = f"{segment.name} : spiketrain{i}"
528539
self._write_spiketrain(self._nwbfile, train)
529540

530541
for i, event in enumerate(segment.events):
531-
assert event.segment is segment
542+
if event.segment is not segment:
543+
raise TypeError(f"event.segment mst be segment and is {event.segment}")
532544
if hasattr(event, "name"):
533545
event.name = f"{segment.name} {event.name} {i}"
534546
logging.warning(f"Warning event name exists. New name: {event.name}")

neo/io/pickleio.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
import pickle
1414

1515
from neo.io.baseio import BaseIO
16-
from neo.core import Block, Segment, AnalogSignal, SpikeTrain
16+
from neo.core import Block, Segment, AnalogSignal, SpikeTrain, NeoReadWriteError
1717

1818

1919
class PickleIO(BaseIO):
@@ -38,7 +38,8 @@ class PickleIO(BaseIO):
3838
extensions = ["pkl", "pickle"]
3939

4040
def read_block(self, lazy=False):
41-
assert not lazy, "Do not support lazy"
41+
if lazy:
42+
raise NeoReadWriteError("This IO does not support lazy reading")
4243
with open(self.filename, "rb") as fp:
4344
block = pickle.load(fp)
4445
return block

0 commit comments

Comments
 (0)