diff --git a/doc/old_stuffs/grouping.rst b/doc/old_stuffs/grouping.rst index c66875244..350b05d75 100644 --- a/doc/old_stuffs/grouping.rst +++ b/doc/old_stuffs/grouping.rst @@ -34,7 +34,6 @@ Using :class:`ChannelIndex`:: block = Block() segment = Segment() - segment.block = block block.segments.append(segment) for i in (0, 1): @@ -55,7 +54,6 @@ Using array annotations, we annotate the channels of the :class:`AnalogSignal` d block = Block() segment = Segment() - segment.block = block block.segments.append(segment) for i in (0, 1): @@ -77,7 +75,6 @@ Each :class:`ChannelIndex` also contains the list of channels on which that neur block = Block(name="probe data") segment = Segment() - segment.block = block block.segments.append(segment) # create 4-channel AnalogSignal with dummy data @@ -119,7 +116,6 @@ Using :class:`ChannelView` and :class:`Group`:: block = Block(name="probe data") segment = Segment() - segment.block = block block.segments.append(segment) # create 4-channel AnalogSignal with dummy data diff --git a/doc/old_stuffs/io_developers_guide.rst b/doc/old_stuffs/io_developers_guide.rst index c99c41974..3e4368849 100644 --- a/doc/old_stuffs/io_developers_guide.rst +++ b/doc/old_stuffs/io_developers_guide.rst @@ -38,7 +38,6 @@ Miscellaneous ============= * If your IO supports several versions of a format (like ABF1, ABF2), upload to the gin.g-node.org test file repository all file versions possible. (for test coverage). - * :py:func:`neo.core.Block.create_many_to_one_relationship` offers a utility to complete the hierarchy when all one-to-many relationships have been created. * In the docstring, explain where you obtained the file format specification if it is a closed one. * If your IO is based on a database mapper, keep in mind that the returned object MUST be detached, because this object can be written to another url for copying. diff --git a/doc/source/images/generate_diagram.py b/doc/source/images/generate_diagram.py index f1c6c0f4e..358372462 100644 --- a/doc/source/images/generate_diagram.py +++ b/doc/source/images/generate_diagram.py @@ -28,7 +28,6 @@ def get_rect_height(name, obj): nlines = 1.5 nlines += len(getattr(obj, '_all_attrs', [])) nlines += len(getattr(obj, '_single_child_objects', [])) - nlines += len(getattr(obj, '_multi_child_objects', [])) return nlines * line_heigth @@ -74,9 +73,7 @@ def generate_diagram(rect_pos, rect_width, figsize): alpha = [1., 1., 0.3] for name, pos in rect_pos.items(): obj = objs[name] - relationships = [getattr(obj, '_single_child_objects', []), - getattr(obj, '_multi_child_objects', []), - getattr(obj, '_child_properties', [])] + relationships = [getattr(obj, '_single_child_objects', [])] for r in range(3): for ch_name in relationships[r]: @@ -122,16 +119,6 @@ def generate_diagram(rect_pos, rect_width, figsize): facecolor='c', edgecolor='k', alpha=.5) ax.add_patch(rect) - # multi relationship - relationship = list(getattr(obj, '_multi_child_objects', [])) - pos2 = (pos[1] + htotal - line_heigth * (1.5 + len(relationship)) - - rect_height) - rect_height = len(relationship) * line_heigth - - rect = Rectangle((pos[0], pos2), rect_width, rect_height, - facecolor='m', edgecolor='k', alpha=.5) - ax.add_patch(rect) - # necessary attr pos2 = (pos[1] + htotal - line_heigth * (1.5 + len(allrelationship) + len(obj._necessary_attrs))) diff --git a/doc/source/scripts/multi_tetrode_example.py b/doc/source/scripts/multi_tetrode_example.py index e0f0ce842..7310e97b1 100644 --- a/doc/source/scripts/multi_tetrode_example.py +++ b/doc/source/scripts/multi_tetrode_example.py @@ -35,7 +35,6 @@ # Create dummy data, one segment at a time for segment in block.segments: - segment.block = block # create two 4-channel AnalogSignals with dummy data signals = { @@ -46,8 +45,6 @@ } if store_signals: segment.analogsignals.extend(signals.values()) - for signal in signals: - signal.segment = segment # create spike trains with dummy data # we will pretend the spikes have been extracted from the dummy signal @@ -56,7 +53,6 @@ spiketrain = SpikeTrain(np.random.uniform(0, 100, size=30) * ms, t_stop=100 * ms) # assign each spiketrain to the appropriate segment segment.spiketrains.append(spiketrain) - spiketrain.segment = segment # assign each spiketrain to a given neuron current_group = next(iter_group) current_group.add(spiketrain) diff --git a/examples/generated_data.py b/examples/generated_data.py index a77d86873..120d988b3 100644 --- a/examples/generated_data.py +++ b/examples/generated_data.py @@ -58,7 +58,6 @@ def generate_block(n_segments=3, n_channels=4, n_units=3, seg.spiketrains.append(train) u.spiketrains.append(train) - block.create_many_to_one_relationship() return block diff --git a/neo/core/block.py b/neo/core/block.py index 79bfb34cb..baa6f64f4 100644 --- a/neo/core/block.py +++ b/neo/core/block.py @@ -10,6 +10,10 @@ from datetime import datetime from neo.core.container import Container, unique_objs +from neo.core.group import Group +from neo.core.objectlist import ObjectList +from neo.core.regionofinterest import RegionOfInterest +from neo.core.segment import Segment class Block(Container): @@ -64,7 +68,6 @@ class Block(Container): ''' _container_child_objects = ('Segment', 'Group') - _child_properties = () _recommended_attrs = ((('file_datetime', datetime), ('rec_datetime', datetime), ('index', int)) + @@ -86,9 +89,27 @@ def __init__(self, name=None, description=None, file_origin=None, self.file_datetime = file_datetime self.rec_datetime = rec_datetime self.index = index - self.regionsofinterest = [] # temporary workaround. - # the goal is to store all sub-classes of RegionOfInterest in a single list - # but this will need substantial changes to container handling + self._segments = ObjectList(Segment, parent=self) + self._groups = ObjectList(Group, parent=self) + self._regionsofinterest = ObjectList(RegionOfInterest, parent=self) + + segments = property( + fget=lambda self: self._get_object_list("_segments"), + fset=lambda self, value: self._set_object_list("_segments", value), + doc="list of Segments contained in this block" + ) + + groups = property( + fget=lambda self: self._get_object_list("_groups"), + fset=lambda self, value: self._set_object_list("_groups", value), + doc="list of Groups contained in this block" + ) + + regionsofinterest = property( + fget=lambda self: self._get_object_list("_regionsofinterest"), + fset=lambda self, value: self._set_object_list("_regionsofinterest", value), + doc="list of RegionOfInterest objects contained in this block" + ) @property def data_children_recur(self): diff --git a/neo/core/container.py b/neo/core/container.py index 9e5e28ce0..89554a139 100644 --- a/neo/core/container.py +++ b/neo/core/container.py @@ -7,6 +7,7 @@ from copy import deepcopy from neo.core.baseneo import BaseNeo, _reference_name, _container_name +from neo.core.objectlist import ObjectList from neo.core.spiketrain import SpikeTrain from neo.core.spiketrainlist import SpikeTrainList @@ -103,9 +104,7 @@ class Container(BaseNeo): Each class can define one or more of the following class attributes (in addition to those of BaseNeo): :_container_child_objects: Neo container objects that can be children - of this object. This attribute is used in - cases where the child can only have one - parent of this type. An instance attribute + of this object. An instance attribute named class.__name__.lower()+'s' will be automatically defined to hold this child and will be initialized to an empty list. @@ -114,45 +113,24 @@ class Container(BaseNeo): class.__name__.lower()+'s' will be automatically defined to hold this child and will be initialized to an empty list. - :_multi_child_objects: Neo container objects that can be children - of this object. This attribute is used in - cases where the child can have multiple - parents of this type. An instance attribute - named class.__name__.lower()+'s' will be - automatically defined to hold this child and - will be initialized to an empty list. - :_child_properties: Properties that return sub-children of a particular - type. These properties must still be defined. - This is mostly used for generate_diagram. :_repr_pretty_containers: The names of containers attributes printed - when pretty-printing using iPython. + when pretty-printing using IPython. The following helper properties are available (in addition to those of BaseNeo): - :_single_child_objects: All neo container objects that can be children - of this object and where the child can only - have one parent of this type. + :_child_objects: All neo objects that can be children of this object. :_container_child_objects: + :_data_child_objects: - :_child_objects: All child objects. - :_single_child_objects: + :_multi_child_objects: :_container_child_containers: The names of the container attributes used to store :_container_child_objects: :_data_child_containers: The names of the container attributes used to store :_data_child_objects: :_single_child_containers: The names of the container attributes used to store :_single_child_objects: - :_multi_child_containers: The names of the container attributes used - to store :_multi_child_objects: - :_child_containers: All child container attributes. - :_single_child_containers: + - :_multi_child_containers: + :_child_containers: All child container attributes. Same as :_single_child_containers: :_single_children: All objects that are children of the current object where the child can only have one parent of this type. - :_multi_children: All objects that are children of the current object - where the child can have multiple parents of - this type. :data_children: All data objects that are children of the current object. :container_children: All container objects that are children of @@ -181,20 +159,9 @@ class Container(BaseNeo): object recursively that are of a particular class. - :create_many_to_one_relationship(**args): For each child of the current - object that can only have a - single parent, set its parent - to be the current object. - - :create_many_to_many_relationship(**args): For children of the current - object that can have more - than one parent of this - type, put the current - object in the parent list. - - :create_relationship(**args): Combines - :create_many_to_one_relationship: and - :create_many_to_many_relationship: + :create_relationship(**args): For each child of the current + object, set its parent + to be the current object. :merge(**args): Annotations are merged based on the rules of :merge_annotations:. Child objects with the same name @@ -214,14 +181,10 @@ class Container(BaseNeo): 2) process its non-universal recommended arguments (in its __new__ or __init__ method """ - # Child objects that are a container and have a single parent + # Child objects that are a container _container_child_objects = () - # Child objects that have data and have a single parent + # Child objects that have data _data_child_objects = () - # Child objects that can have multiple parents - _multi_child_objects = () - # Properties returning children of children [of children...] - _child_properties = () # Containers that are listed when pretty-printing _repr_pretty_containers = () @@ -233,14 +196,35 @@ def __init__(self, name=None, description=None, file_origin=None, super().__init__(name=name, description=description, file_origin=file_origin, **annotations) - # initialize containers - for container in self._child_containers: - setattr(self, container, []) + def _get_object_list(self, name): + """ + Return the container's ObjectList with the given (private) attribute name + + Example: + >>> segment._get_object_list("_analogsignals") + """ + return getattr(self, name) + + def _set_object_list(self, name, value): + """ + Set the contents of the container's ObjectList with the given (private) attribute name + + Example: + >>> segment._set_object_list("_analogsignals", [sig1, sig2]) + """ + if isinstance(value, list): + object_list = getattr(self, name) + object_list.clear() + object_list.extend(value) + elif isinstance(value, ObjectList): # from __iadd__ + setattr(self, name, value) + else: + TypeError("value must be a list or an ObjectList") @property - def _single_child_objects(self): + def _child_objects(self): """ - Child objects that have a single parent. + Return the names of the classes that can be children of this container. """ return self._container_child_objects + self._data_child_objects @@ -262,34 +246,12 @@ def _data_child_containers(self): self._data_child_objects]) @property - def _single_child_containers(self): + def _child_containers(self): """ Containers for child objects with a single parent. """ return tuple([_container_name(child) for child in - self._single_child_objects]) - - @property - def _multi_child_containers(self): - """ - Containers for child objects that can have multiple parents. - """ - return tuple([_container_name(child) for child in - self._multi_child_objects]) - - @property - def _child_objects(self): - """ - All types for child objects. - """ - return self._single_child_objects + self._multi_child_objects - - @property - def _child_containers(self): - """ - All containers for child objects. - """ - return self._single_child_containers + self._multi_child_containers + self._child_objects]) @property def _single_children(self): @@ -297,16 +259,7 @@ def _single_children(self): All child objects that can only have single parents. """ childs = [list(getattr(self, attr)) for attr in - self._single_child_containers] - return tuple(sum(childs, [])) - - @property - def _multi_children(self): - """ - All child objects that can have multiple parents. - """ - childs = [list(getattr(self, attr)) for attr in - self._multi_child_containers] + self._child_containers] return tuple(sum(childs, [])) @property @@ -326,8 +279,7 @@ def container_children(self): Not recursive. """ childs = [list(getattr(self, attr)) for attr in - self._container_child_containers + - self._multi_child_containers] + self._container_child_containers] return tuple(sum(childs, [])) @property @@ -417,11 +369,7 @@ def filter(self, targdict=None, data=True, container=False, recursive=True, data = True container = True - if objects == SpikeTrain: - children = SpikeTrainList() - else: - children = [] - + children = [] # get the objects we want if data: if recursive: @@ -434,8 +382,12 @@ def filter(self, targdict=None, data=True, container=False, recursive=True, else: children.extend(self.container_children) - return filterdata(children, objects=objects, - targdict=targdict, **kwargs) + filtered = filterdata(children, objects=objects, + targdict=targdict, **kwargs) + if objects == SpikeTrain: + return SpikeTrainList(items=filtered) + else: + return filtered def list_children_by_class(self, cls): """ @@ -452,80 +404,40 @@ def list_children_by_class(self, cls): objs.extend(getattr(child, container_name, [])) return objs - def create_many_to_one_relationship(self, force=False, recursive=True): + def check_relationships(self, recursive=True): """ - For each child of the current object that can only have a single - parent, set its parent to be the current object. - - Usage: - >>> a_block.create_many_to_one_relationship() - >>> a_block.create_many_to_one_relationship(force=True) - - If the current object is a :class:`Block`, you want to run - populate_RecordingChannel first, because this will create new objects - that this method will link up. - - If force is True overwrite any existing relationships - If recursive is True descend into child objects and create - relationships there + Check that the expected child-parent relationships exist. """ parent_name = _reference_name(self.__class__.__name__) for child in self._single_children: - if (hasattr(child, parent_name) and - getattr(child, parent_name) is None or force): - setattr(child, parent_name, self) - if recursive: - for child in self.container_children: - child.create_many_to_one_relationship(force=force, - recursive=True) - - def create_many_to_many_relationship(self, append=True, recursive=True): - """ - For children of the current object that can have more than one parent - of this type, put the current object in the parent list. - - If append is True add it to the list, otherwise overwrite the list. - If recursive is True descend into child objects and create - relationships there - """ - parent_name = _container_name(self.__class__.__name__) - for child in self._multi_children: - if not hasattr(child, parent_name): - continue - if append: - target = getattr(child, parent_name) - if self not in target: - target.append(self) - continue - setattr(child, parent_name, [self]) - + if hasattr(child, "proxy_for"): + container = getattr(self, _container_name(child.proxy_for.__name__)) + else: + container = getattr(self, _container_name(child.__class__.__name__)) + if container.parent is not None: + assert getattr(child, parent_name, None) is self if recursive: for child in self.container_children: - child.create_many_to_many_relationship(append=append, - recursive=True) + child.check_relationships(recursive=True) - def create_relationship(self, force=False, append=True, recursive=True): + def create_relationship(self, force=False, recursive=True): """ For each child of the current object that can only have a single parent, set its parent to be the current object. - For children of the current object that can have more than one parent - of this type, put the current object in the parent list. - - If the current object is a :class:`Block`, you want to run - populate_RecordingChannel first, because this will create new objects - that this method will link up. + For children of the current object, put the current object in the parent list. If force is True overwrite any existing relationships - If append is True add it to the list, otherwise overwrite the list. If recursive is True descend into child objects and create relationships there """ - self.create_many_to_one_relationship(force=force, recursive=False) - self.create_many_to_many_relationship(append=append, recursive=False) + parent_name = _reference_name(self.__class__.__name__) + for child in self._single_children: + if (hasattr(child, parent_name) and + getattr(child, parent_name) is None or force): + setattr(child, parent_name, self) if recursive: for child in self.container_children: - child.create_relationship(force=force, append=append, - recursive=True) + child.create_relationship(force=force, recursive=True) def __deepcopy__(self, memo): """ @@ -569,8 +481,7 @@ def merge(self, other): after the merge operation and should not be used further. """ # merge containers with the same name - for container in (self._container_child_containers + - self._multi_child_containers): + for container in self._container_child_containers: lookup = {obj.name: obj for obj in getattr(self, container)} ids = [id(obj) for obj in getattr(self, container)] for obj in getattr(other, container): diff --git a/neo/core/group.py b/neo/core/group.py index f55598e24..8f176ad5e 100644 --- a/neo/core/group.py +++ b/neo/core/group.py @@ -8,6 +8,16 @@ from os import close from neo.core.container import Container +from neo.core.analogsignal import AnalogSignal +from neo.core.container import Container +from neo.core.objectlist import ObjectList +from neo.core.epoch import Epoch +from neo.core.event import Event +from neo.core.imagesequence import ImageSequence +from neo.core.irregularlysampledsignal import IrregularlySampledSignal +from neo.core.segment import Segment +from neo.core.spiketrainlist import SpikeTrainList +from neo.core.view import ChannelView class Group(Container): @@ -48,13 +58,82 @@ def __init__(self, objects=None, name=None, description=None, file_origin=None, allowed_types=None, **annotations): super().__init__(name=name, description=description, file_origin=file_origin, **annotations) + + # note that we create the ObjectLists here _without_ a parent argument + # since objects do not have a reference to the group(s) + # they are contained in. + self._analogsignals = ObjectList(AnalogSignal) + self._irregularlysampledsignals = ObjectList(IrregularlySampledSignal) + self._spiketrains = SpikeTrainList() + self._events = ObjectList(Event) + self._epochs = ObjectList(Epoch) + self._channelviews = ObjectList(ChannelView) + self._imagesequences = ObjectList(ImageSequence) + self._segments = ObjectList(Segment) # to remove? + self._groups = ObjectList(Group) + if allowed_types is None: self.allowed_types = None else: self.allowed_types = tuple(allowed_types) + if objects: self.add(*objects) + analogsignals = property( + fget=lambda self: self._get_object_list("_analogsignals"), + fset=lambda self, value: self._set_object_list("_analogsignals", value), + doc="list of AnalogSignals contained in this group" + ) + + irregularlysampledsignals = property( + fget=lambda self: self._get_object_list("_irregularlysampledsignals"), + fset=lambda self, value: self._set_object_list("_irregularlysampledsignals", value), + doc="list of IrregularlySignals contained in this group" + ) + + events = property( + fget=lambda self: self._get_object_list("_events"), + fset=lambda self, value: self._set_object_list("_events", value), + doc="list of Events contained in this group" + ) + + epochs = property( + fget=lambda self: self._get_object_list("_epochs"), + fset=lambda self, value: self._set_object_list("_epochs", value), + doc="list of Epochs contained in this group" + ) + + channelviews = property( + fget=lambda self: self._get_object_list("_channelviews"), + fset=lambda self, value: self._set_object_list("_channelviews", value), + doc="list of ChannelViews contained in this group" + ) + + imagesequences = property( + fget=lambda self: self._get_object_list("_imagesequences"), + fset=lambda self, value: self._set_object_list("_imagesequences", value), + doc="list of ImageSequences contained in this group" + ) + + spiketrains = property( + fget=lambda self: self._get_object_list("_spiketrains"), + fset=lambda self, value: self._set_object_list("_spiketrains", value), + doc="list of SpikeTrains contained in this group" + ) + + segments = property( + fget=lambda self: self._get_object_list("_segments"), + fset=lambda self, value: self._set_object_list("_segments", value), + doc="list of Segments contained in this group" + ) + + groups = property( + fget=lambda self: self._get_object_list("_groups"), + fset=lambda self, value: self._set_object_list("_groups", value), + doc="list of Groups contained in this group" + ) + @property def _container_lookup(self): return { diff --git a/neo/core/objectlist.py b/neo/core/objectlist.py new file mode 100644 index 000000000..5dbf7d3f8 --- /dev/null +++ b/neo/core/objectlist.py @@ -0,0 +1,117 @@ +""" +This module implements the ObjectList class, which is used to peform type checks +and handle relationships within the Neo Block-Segment-Data hierarchy. +""" + +import sys +from neo.core.baseneo import BaseNeo + + +class ObjectList: + """ + This class behaves like a list, but has additional functionality + to handle relationships within Neo hierarchy, and perform type checks. + """ + + def __init__(self, allowed_contents, parent=None): + # validate allowed_contents and normalize it to a tuple + if isinstance(allowed_contents, type) and issubclass(allowed_contents, BaseNeo): + self.allowed_contents = (allowed_contents,) + else: + for item in allowed_contents: + assert issubclass(item, BaseNeo) + self.allowed_contents = tuple(allowed_contents) + self.contents = [] + self.parent = parent + + def _handle_append(self, obj): + if not ( + isinstance(obj, self.allowed_contents) + or ( # also allow proxy objects of the correct type + hasattr(obj, "proxy_for") and obj.proxy_for in self.allowed_contents + ) + ): + raise TypeError(f"Object is a {type(obj)}. It should be one of {self.allowed_contents}.") + # set the child-parent relationship + if self.parent: + relationship_name = self.parent.__class__.__name__.lower() + if relationship_name == "group": + raise Exception("Objects in groups should not link to the group as their parent") + current_parent = getattr(obj, relationship_name) + if current_parent != self.parent: + # use weakref here? - see https://github.com/NeuralEnsemble/python-neo/issues/684 + setattr(obj, relationship_name, self.parent) + + def __str__(self): + return str(self.contents) + + def __repr__(self): + return repr(self.contents) + + def __add__(self, objects): + # todo: decision: return a list, or a new DataObjectList? + if isinstance(objects, ObjectList): + return self.contents + objects.contents + else: + return self.contents + objects + + def __radd__(self, objects): + if isinstance(objects, ObjectList): + return objects.contents + self.contents + else: + return objects + self.contents + + def __contains__(self, key): + return key in self.contents + + def __iadd__(self, objects): + for obj in objects: + self._handle_append(obj) + self.contents.extend(objects) + return self + + def __iter__(self): + return iter(self.contents) + + def __getitem__(self, i): + return self.contents[i] + + def __len__(self): + return len(self.contents) + + def __setitem__(self, key, value): + self.contents[key] = value + + def append(self, obj): + self._handle_append(obj) + self.contents.append(obj) + + def extend(self, objects): + for obj in objects: + self._handle_append(obj) + self.contents.extend(objects) + + def clear(self): + self.contents = [] + + def count(self, value): + return self.contents.count(value) + + def index(self, value, start=0, stop=sys.maxsize): + return self.contents.index(value, start, stop) + + def insert(self, index, obj): + self._handle_append(obj) + self.contents.insert(index, obj) + + def pop(self, index=-1): + return self.contents.pop(index) + + def remove(self, value): + return self.contents.remove(value) + + def reverse(self): + raise self.contents.reverse() + + def sort(self, *args, key=None, reverse=False): + self.contents.sort(*args, key=key, reverse=reverse) diff --git a/neo/core/regionofinterest.py b/neo/core/regionofinterest.py index a94b34606..cdf463653 100644 --- a/neo/core/regionofinterest.py +++ b/neo/core/regionofinterest.py @@ -1,7 +1,9 @@ from math import floor, ceil +from neo.core.baseneo import BaseNeo -class RegionOfInterest: + +class RegionOfInterest(BaseNeo): """Abstract base class""" pass diff --git a/neo/core/segment.py b/neo/core/segment.py index 8ec5d69dc..ab1125136 100644 --- a/neo/core/segment.py +++ b/neo/core/segment.py @@ -7,13 +7,20 @@ ''' from datetime import datetime +from copy import deepcopy import numpy as np -from copy import deepcopy +from neo.core.analogsignal import AnalogSignal from neo.core.container import Container +from neo.core.objectlist import ObjectList +from neo.core.epoch import Epoch +from neo.core.event import Event +from neo.core.imagesequence import ImageSequence +from neo.core.irregularlysampledsignal import IrregularlySampledSignal from neo.core.spiketrainlist import SpikeTrainList +from neo.core.view import ChannelView class Segment(Container): @@ -88,11 +95,62 @@ def __init__(self, name=None, description=None, file_origin=None, ''' super().__init__(name=name, description=description, file_origin=file_origin, **annotations) - self.spiketrains = SpikeTrainList(segment=self) + + self._analogsignals = ObjectList(AnalogSignal, parent=self) + self._irregularlysampledsignals = ObjectList(IrregularlySampledSignal, parent=self) + self._spiketrains = SpikeTrainList(parent=self) + self._events = ObjectList(Event, parent=self) + self._epochs = ObjectList(Epoch, parent=self) + self._channelviews = ObjectList(ChannelView, parent=self) + self._imagesequences = ObjectList(ImageSequence, parent=self) + self.block = None + self.file_datetime = file_datetime self.rec_datetime = rec_datetime self.index = index + analogsignals = property( + fget=lambda self: self._get_object_list("_analogsignals"), + fset=lambda self, value: self._set_object_list("_analogsignals", value), + doc="list of AnalogSignals contained in this segment" + ) + + irregularlysampledsignals = property( + fget=lambda self: self._get_object_list("_irregularlysampledsignals"), + fset=lambda self, value: self._set_object_list("_irregularlysampledsignals", value), + doc="list of IrregularlySignals contained in this segment" + ) + + events = property( + fget=lambda self: self._get_object_list("_events"), + fset=lambda self, value: self._set_object_list("_events", value), + doc="list of Events contained in this segment" + ) + + epochs = property( + fget=lambda self: self._get_object_list("_epochs"), + fset=lambda self, value: self._set_object_list("_epochs", value), + doc="list of Epochs contained in this segment" + ) + + channelviews = property( + fget=lambda self: self._get_object_list("_channelviews"), + fset=lambda self, value: self._set_object_list("_channelviews", value), + doc="list of ChannelViews contained in this segment" + ) + + imagesequences = property( + fget=lambda self: self._get_object_list("_imagesequences"), + fset=lambda self, value: self._set_object_list("_imagesequences", value), + doc="list of ImageSequences contained in this segment" + ) + + spiketrains = property( + fget=lambda self: self._get_object_list("_spiketrains"), + fset=lambda self, value: self._set_object_list("_spiketrains", value), + doc="list of SpikeTrains contained in this segment" + ) + # t_start attribute is handled as a property so type checking can be done @property def t_start(self): @@ -231,6 +289,6 @@ def time_slice(self, t_start=None, t_stop=None, reset_time=False, **kwargs): if len(ep_time_slice): subseg.epochs.append(ep_time_slice) - subseg.create_relationship() + subseg.check_relationships() return subseg diff --git a/neo/core/spiketrain.py b/neo/core/spiketrain.py index 2e0837e6b..5b89a5f6e 100644 --- a/neo/core/spiketrain.py +++ b/neo/core/spiketrain.py @@ -347,8 +347,8 @@ def __init__(self, times, t_stop, units=None, dtype=None, copy=True, def _repr_pretty_(self, pp, cycle): waveforms = "" - if self.waveforms: - waveforms = "with waveforms" + if self.waveforms is not None: + waveforms = " with waveforms" pp.text(f"{self.__class__.__name__} containing {self.size} spikes{waveforms}; " f"units {self.units.dimensionality.string}; datatype {self.dtype} ") if self._has_repr_pretty_attrs_(): diff --git a/neo/core/spiketrainlist.py b/neo/core/spiketrainlist.py index 1fd80f3b4..b67ad1f11 100644 --- a/neo/core/spiketrainlist.py +++ b/neo/core/spiketrainlist.py @@ -10,6 +10,7 @@ import numpy as np import quantities as pq from .spiketrain import SpikeTrain, normalize_times_array +from .objectlist import ObjectList def is_spiketrain_or_proxy(obj): @@ -32,7 +33,7 @@ def unique(quantities): -class SpikeTrainList(object): +class SpikeTrainList(ObjectList): """ This class contains multiple spike trains, and can represent them either as a list of SpikeTrain objects or as a pair of arrays @@ -73,8 +74,9 @@ class SpikeTrainList(object): ] """ + allowed_contents = (SpikeTrain,) - def __init__(self, items=None, segment=None): + def __init__(self, items=None, parent=None): """Initialize self""" if items is None: self._items = items @@ -88,7 +90,13 @@ def __init__(self, items=None, segment=None): self._channel_id_array = None self._all_channel_ids = None self._spiketrain_metadata = {} - self.segment = segment + if parent is not None: + assert parent.__class__.__name__ == "Segment" + self.segment = parent + + @property + def parent(self): + return self.segment def __iter__(self): """Implement iter(self)""" @@ -119,6 +127,9 @@ def __str__(self): else: return str(self._items) + def __repr__(self): + return "" + def __len__(self): """Return len(self)""" if self._items is None: @@ -182,7 +193,7 @@ def __add__(self, other): return self._add_spiketrainlists(other) elif other and is_spiketrain_or_proxy(other[0]): return self._add_spiketrainlists( - self.__class__(items=other, segment=self.segment) + self.__class__(items=other, parent=self.segment) ) else: if self._items is None: @@ -195,7 +206,7 @@ def __iadd__(self, other): return self._add_spiketrainlists(other, in_place=True) elif other and is_spiketrain_or_proxy(other[0]): for obj in other: - obj.segment = self.segment + self._handle_append(obj) if self._items is None: self._spiketrains_from_array() self._items.extend(other) @@ -227,7 +238,7 @@ def append(self, obj): raise ValueError("Can only append SpikeTrain objects") if self._items is None: self._spiketrains_from_array() - obj.segment = self.segment + self._handle_append(obj) self._items.append(obj) def extend(self, iterable): @@ -235,7 +246,7 @@ def extend(self, iterable): if self._items is None: self._spiketrains_from_array() for obj in iterable: - obj.segment = self.segment + self._handle_append(obj) self._items.extend(iterable) @classmethod diff --git a/neo/io/asciiimageio.py b/neo/io/asciiimageio.py index 57c31cd94..fade497da 100644 --- a/neo/io/asciiimageio.py +++ b/neo/io/asciiimageio.py @@ -90,7 +90,6 @@ def read_block(self, lazy=False, **kwargs): segment.imagesequences = [image_sequence] block = Block(file_origin=self.filename) - segment.block = block block.segments.append(segment) print("returning block") diff --git a/neo/io/asciisignalio.py b/neo/io/asciisignalio.py index 81a795b74..ba05f383e 100644 --- a/neo/io/asciisignalio.py +++ b/neo/io/asciisignalio.py @@ -176,7 +176,6 @@ def __init__(self, filename=None, delimiter='\t', usecols=None, skiprows=0, time def read_block(self, lazy=False): block = Block(file_origin=os.path.basename(self.filename)) segment = self.read_segment(lazy=lazy) - segment.block = block block.segments.append(segment) return block @@ -295,7 +294,7 @@ def read_segment(self, lazy=False): name='Column %d' % i) seg.analogsignals.append(ana_sig) - seg.create_many_to_one_relationship() + seg.check_relationships() return seg def read_metadata(self): diff --git a/neo/io/asciispiketrainio.py b/neo/io/asciispiketrainio.py index 00dc2c5e2..5abf267c9 100644 --- a/neo/io/asciispiketrainio.py +++ b/neo/io/asciispiketrainio.py @@ -107,7 +107,7 @@ def read_segment(self, sptr.annotate(channel_index=i) seg.spiketrains.append(sptr) - seg.create_many_to_one_relationship() + seg.check_relationships() return seg def write_segment(self, segment, diff --git a/neo/io/basefromrawio.py b/neo/io/basefromrawio.py index f29df467f..92f5542f6 100644 --- a/neo/io/basefromrawio.py +++ b/neo/io/basefromrawio.py @@ -178,7 +178,7 @@ def read_block(self, block_index=0, lazy=False, for c, sptr in enumerate(seg.spiketrains): st_groups[c].add(sptr) - bl.create_many_to_one_relationship() + bl.check_relationships() return bl @@ -241,7 +241,6 @@ def read_segment(self, block_index=0, seg_index=0, lazy=False, # ... and get the real AnalogSignal if not lazy anasig = anasig.load(time_slice=time_slice, strict_slicing=strict_slicing) - anasig.segment = seg seg.analogsignals.append(anasig) # SpikeTrain and waveforms (optional) @@ -257,7 +256,6 @@ def read_segment(self, block_index=0, seg_index=0, lazy=False, load_waveforms=load_waveforms) # TODO magnitude_mode='rescaled'/'raw' - sptr.segment = seg seg.spiketrains.append(sptr) # Events/Epoch @@ -268,17 +266,15 @@ def read_segment(self, block_index=0, seg_index=0, lazy=False, block_index=block_index, seg_index=seg_index) if not lazy: e = e.load(time_slice=time_slice, strict_slicing=strict_slicing) - e.segment = seg seg.events.append(e) elif event_channels['type'][chan_ind] == b'epoch': e = EpochProxy(rawio=self, event_channel_index=chan_ind, block_index=block_index, seg_index=seg_index) if not lazy: e = e.load(time_slice=time_slice, strict_slicing=strict_slicing) - e.segment = seg seg.epochs.append(e) - seg.create_many_to_one_relationship() + seg.check_relationships() return seg def get_sub_signal_streams(self, signal_group_mode='group-by-same-units'): @@ -286,7 +282,7 @@ def get_sub_signal_streams(self, signal_group_mode='group-by-same-units'): When signal streams don't have homogeneous SI units across channels, they have to be split in sub streams to construct AnalogSignal objects with unique units. - For backward compatibility (neo version <= 0.5) sub-streams can also be + For backward compatibility (neo version <= 0.5) sub-streams can also be used to generate one AnalogSignal per channel. """ signal_streams = self.header['signal_streams'] diff --git a/neo/io/baseio.py b/neo/io/baseio.py index 3d4fa7fdb..acce7de6f 100644 --- a/neo/io/baseio.py +++ b/neo/io/baseio.py @@ -126,7 +126,7 @@ def read(self, lazy=False, **kargs): bl = Block(name='One segment only') seg = self.read_segment(lazy=lazy, **kargs) bl.segments.append(seg) - bl.create_many_to_one_relationship() + bl.check_relationships() return [bl] else: raise NotImplementedError diff --git a/neo/io/blkio.py b/neo/io/blkio.py index 68968f8ea..c9ee71258 100644 --- a/neo/io/blkio.py +++ b/neo/io/blkio.py @@ -322,7 +322,6 @@ def read_header(file_name): spatial_scale=self.spatial_scale) segment = Segment(file_origin=self.filename, description=("stim nb:"+str(stim))) segment.imagesequences = [image_sequence] - segment.block = block for key in header: block.annotations[key] = header[key] block.segments.append(segment) diff --git a/neo/io/brainwaredamio.py b/neo/io/brainwaredamio.py index 2ccabce49..5c3ba2573 100644 --- a/neo/io/brainwaredamio.py +++ b/neo/io/brainwaredamio.py @@ -129,7 +129,7 @@ def read_block(self, lazy=False, **kargs): # create the objects to store other objects gr = Group(file_origin=self._filename) - + # load objects into their containers block.groups.append(gr) @@ -149,7 +149,7 @@ def read_block(self, lazy=False, **kargs): # remove the file object self._fsrc = None - block.create_many_to_one_relationship() + block.check_relationships() return block # ------------------------------------------------------------------------- diff --git a/neo/io/brainwaref32io.py b/neo/io/brainwaref32io.py index a5e8ac2e5..aa780d2e1 100644 --- a/neo/io/brainwaref32io.py +++ b/neo/io/brainwaref32io.py @@ -158,7 +158,7 @@ def read_block(self, lazy=False, **kargs): while res: res = self.__read_id() - block.create_many_to_one_relationship() + block.check_relationships() # cleanup attributes self._fsrc = None diff --git a/neo/io/brainwaresrcio.py b/neo/io/brainwaresrcio.py index 914fb188d..471159398 100755 --- a/neo/io/brainwaresrcio.py +++ b/neo/io/brainwaresrcio.py @@ -279,7 +279,7 @@ def read_next_block(self, **kargs): raise # since we read at a Block level we always do this - self._blk.create_many_to_one_relationship() + self._blk.check_relationships() # put the Block in a local object so it can be gargabe collected blockobj = self._blk @@ -524,7 +524,6 @@ def _combine_segment_events(self, segment): event_t_start = event.annotations.pop('t_start') segment.rec_datetime = self._convert_timestamp(event_t_start) segment.events = [event] - event.segment = segment def _combine_spiketrains(self, spiketrains): """ diff --git a/neo/io/elphyio.py b/neo/io/elphyio.py index 336e412b2..fddfedbcc 100644 --- a/neo/io/elphyio.py +++ b/neo/io/elphyio.py @@ -3835,7 +3835,6 @@ def read_block(self, lazy=False, ): return block for episode in range(1, self.elphy_file.n_episodes + 1): segment = self.read_segment(episode) - segment.block = block block.segments.append(segment) # close file @@ -4237,7 +4236,6 @@ def read_segment(self, episode): sampling_period=signal.sampling_period * getattr(pq, x_unit), channel_name="episode {}, channel {}".format(int(episode + 1), int(channel + 1)) ) - analog_signal.segment = segment segment.analogsignals.append(analog_signal) # create a spiketrain for each # spike channel in the episode @@ -4248,7 +4246,6 @@ def read_segment(self, episode): if n_spikes > 0: for spk in range(1, n_spikes + 1): spiketrain = self.read_spiketrain(episode, spk) - spiketrain.segment = segment segment.spiketrains.append(spiketrain) # segment return segment diff --git a/neo/io/igorproio.py b/neo/io/igorproio.py index 9eca2ea2e..80bd4fa69 100644 --- a/neo/io/igorproio.py +++ b/neo/io/igorproio.py @@ -72,7 +72,6 @@ def read_block(self, lazy=False): block = Block(file_origin=str(self.filename)) block.segments.append(self.read_segment(lazy=lazy)) - block.segments[-1].block = block return block def read_segment(self, lazy=False): @@ -89,14 +88,12 @@ def read_segment(self, lazy=False): def callback(dirpath, key, value): if isinstance(value, WaveRecord): signal = self._wave_to_analogsignal(value.wave['wave'], dirpath) - signal.segment = segment segment.analogsignals.append(signal) pxp.walk(self.filesystem, callback) else: segment.analogsignals.append( self.read_analogsignal(lazy=lazy)) - segment.analogsignals[-1].segment = segment return segment def read_analogsignal(self, path=None, lazy=False): diff --git a/neo/io/klustakwikio.py b/neo/io/klustakwikio.py index efc57361f..ab8dd69ae 100644 --- a/neo/io/klustakwikio.py +++ b/neo/io/klustakwikio.py @@ -181,7 +181,7 @@ def read_block(self, lazy=False): u.add(st) seg.spiketrains.append(st) - block.create_many_to_one_relationship() + block.check_relationships() return block # Helper hidden functions for reading diff --git a/neo/io/kwikio.py b/neo/io/kwikio.py index ba711c52d..c897936f8 100644 --- a/neo/io/kwikio.py +++ b/neo/io/kwikio.py @@ -129,7 +129,7 @@ def read_block(self, seg.duration = model.duration * pq.s - blk.create_many_to_one_relationship() + blk.check_relationships() return blk def read_analogsignal(self, model, units='uV', lazy=False): diff --git a/neo/io/neomatlabio.py b/neo/io/neomatlabio.py index e2eb9a2db..a3f0048de 100644 --- a/neo/io/neomatlabio.py +++ b/neo/io/neomatlabio.py @@ -224,7 +224,7 @@ def read_block(self, lazy=False): bl_struct = d['block'] bl = self.create_ob_from_struct( bl_struct, 'Block') - bl.create_many_to_one_relationship() + bl.check_relationships() return bl def write_block(self, bl, **kargs): @@ -265,7 +265,7 @@ def create_struct_from_obj(self, ob): struct = {} # relationship - for childname in getattr(ob, '_single_child_containers', []): + for childname in getattr(ob, '_child_containers', []): supported_containers = [subob.__name__.lower() + 's' for subob in self.supported_objects] if childname in supported_containers: @@ -356,7 +356,7 @@ def create_ob_from_struct(self, struct, classname): for attrname in struct._fieldnames: # check children - if attrname in getattr(ob, '_single_child_containers', []): + if attrname in getattr(ob, '_child_containers', []): child_struct = getattr(struct, attrname) try: # try must only surround len() or other errors are captured diff --git a/neo/io/nestio.py b/neo/io/nestio.py index 1a08fd870..de1cab2c0 100644 --- a/neo/io/nestio.py +++ b/neo/io/nestio.py @@ -466,7 +466,6 @@ def read_block(self, gid_list=None, time_unit=pq.ms, t_start=None, value_units) blk = Block(file_origin=seg.file_origin, file_datetime=seg.file_datetime) blk.segments.append(seg) - seg.block = blk return blk def read_segment(self, gid_list=None, time_unit=pq.ms, t_start=None, diff --git a/neo/io/neuroshareapiio.py b/neo/io/neuroshareapiio.py index 145bea391..6d01db7f9 100644 --- a/neo/io/neuroshareapiio.py +++ b/neo/io/neuroshareapiio.py @@ -248,7 +248,7 @@ def read_segment(self, # add the spike object to segment seg.spiketrains += [sptr] - seg.create_many_to_one_relationship() + seg.check_relationships() return seg diff --git a/neo/io/neurosharectypesio.py b/neo/io/neurosharectypesio.py index 1dad0bce0..6d06dfc4f 100644 --- a/neo/io/neurosharectypesio.py +++ b/neo/io/neurosharectypesio.py @@ -315,7 +315,7 @@ def read_segment(self, import_neuroshare_segment=True, # close neuroshare.ns_CloseFile(hFile) - seg.create_many_to_one_relationship() + seg.check_relationships() return seg diff --git a/neo/io/nixio.py b/neo/io/nixio.py index 1368ff04c..aa7729a8f 100644 --- a/neo/io/nixio.py +++ b/neo/io/nixio.py @@ -294,14 +294,10 @@ def _nix_to_neo_block(self, nix_block): if grp.type == "neo.segment": newseg = self._nix_to_neo_segment(grp) neo_block.segments.append(newseg) - # parent reference - newseg.block = neo_block elif grp.type == "neo.group": newgrp, parent_name = self._nix_to_neo_group(grp) assert parent_name is None neo_block.groups.append(newgrp) - # parent reference - newgrp.block = neo_block elif grp.type == "neo.subgroup": newgrp, parent_name = self._nix_to_neo_group(grp) groups_to_resolve.append((newgrp, parent_name)) @@ -328,7 +324,7 @@ def _nix_to_neo_block(self, nix_block): self._nix_to_neo_spiketrain(mt) # create object links - neo_block.create_relationship() + neo_block.check_relationships() # reset maps self._neo_map = dict() @@ -358,36 +354,24 @@ def _nix_to_neo_segment(self, nix_group): if das[0].type == "neo.analogsignal": newasig = self._nix_to_neo_analogsignal(das) neo_segment.analogsignals.append(newasig) - # parent reference - newasig.segment = neo_segment elif das[0].type == "neo.irregularlysampledsignal": newisig = self._nix_to_neo_irregularlysampledsignal(das) neo_segment.irregularlysampledsignals.append(newisig) - # parent reference - newisig.segment = neo_segment elif das[0].type == "neo.imagesequence": new_imgseq = self._nix_to_neo_imagesequence(das) neo_segment.imagesequences.append(new_imgseq) - # parent reference - new_imgseq.segment = neo_segment # descend into MultiTags for mtag in nix_group.multi_tags: if mtag.type == "neo.event": newevent = self._nix_to_neo_event(mtag) neo_segment.events.append(newevent) - # parent reference - newevent.segment = neo_segment elif mtag.type == "neo.epoch": newepoch = self._nix_to_neo_epoch(mtag) neo_segment.epochs.append(newepoch) - # parent reference - newepoch.segment = neo_segment elif mtag.type == "neo.spiketrain": newst = self._nix_to_neo_spiketrain(mtag) neo_segment.spiketrains.append(newst) - # parent reference - newst.segment = neo_segment return neo_segment diff --git a/neo/io/nwbio.py b/neo/io/nwbio.py index 226dc4d5c..8bd291800 100644 --- a/neo/io/nwbio.py +++ b/neo/io/nwbio.py @@ -285,7 +285,6 @@ def _get_segment(self, block_name, segment_name): break if segment is None: segment = Segment(name=segment_name) - segment.block = block block.segments.append(segment) return segment @@ -312,14 +311,12 @@ def _read_epochs_group(self, lazy): assert segment_name.size == block_name.size == 1 segment = self._get_segment(block_name[0], segment_name[0]) segment.epochs.append(epoch) - epoch.segment = segment else: epoch = EpochProxy(self._file.epochs) if not lazy: epoch = epoch.load() segment = self._get_segment("default", "default") segment.epochs.append(epoch) - epoch.segment = segment def _read_timeseries_group(self, group_name, lazy): import pynwb @@ -345,19 +342,16 @@ def _read_timeseries_group(self, group_name, lazy): if not lazy: event = event.load() segment.events.append(event) - event.segment = segment elif timeseries.rate: # AnalogSignal signal = AnalogSignalProxy(timeseries, group_name) if not lazy: signal = signal.load() segment.analogsignals.append(signal) - signal.segment = segment else: # IrregularlySampledSignal signal = AnalogSignalProxy(timeseries, group_name) if not lazy: signal = signal.load() segment.irregularlysampledsignals.append(signal) - signal.segment = segment def _read_units(self, lazy): if self._file.units: @@ -376,7 +370,6 @@ def _read_units(self, lazy): if not lazy: spiketrain = spiketrain.load() segment.spiketrains.append(spiketrain) - spiketrain.segment = segment def _read_acquisition_group(self, lazy): self._read_timeseries_group("acquisition", lazy) diff --git a/neo/io/stimfitio.py b/neo/io/stimfitio.py index 5b0557b5f..69793a9ba 100644 --- a/neo/io/stimfitio.py +++ b/neo/io/stimfitio.py @@ -145,6 +145,6 @@ def read_block(self, lazy=False): bl.segments.append(seg) t_start = t_start + length * dt - bl.create_many_to_one_relationship() + bl.check_relationships() return bl diff --git a/neo/io/tiffio.py b/neo/io/tiffio.py index 5fb6e0838..fe0c67f1e 100644 --- a/neo/io/tiffio.py +++ b/neo/io/tiffio.py @@ -118,7 +118,6 @@ def natural_sort(l): segment.imagesequences = [image_sequence] block = Block(file_origin=self.filename) - segment.block = block block.segments.append(segment) print("returning block") return block diff --git a/neo/test/coretest/test_analogsignal.py b/neo/test/coretest/test_analogsignal.py index 09179e438..521e4bdba 100644 --- a/neo/test/coretest/test_analogsignal.py +++ b/neo/test/coretest/test_analogsignal.py @@ -256,7 +256,7 @@ def test__children(self): segment = Segment(name='seg1') segment.analogsignals = [signal] - segment.create_many_to_one_relationship() + segment.check_relationships() self.assertEqual(signal._parent_objects, ('Segment',)) diff --git a/neo/test/coretest/test_block.py b/neo/test/coretest/test_block.py index aef24c198..a6a7d8f9d 100644 --- a/neo/test/coretest/test_block.py +++ b/neo/test/coretest/test_block.py @@ -18,6 +18,7 @@ HAVE_IPYTHON = True from neo.core.block import Block +from neo.core.segment import Segment from neo.core.container import filterdata from neo.core import SpikeTrain, AnalogSignal, Event from neo.test.tools import (assert_neo_object_is_compliant, @@ -451,6 +452,20 @@ def test__deepcopy(self): for sptr in segment.spiketrains: self.assertEqual(id(sptr.segment), id(segment)) + def test_segment_list(self): + blk = Block() + assert len(blk.segments) == 0 + blk.segments.append(Segment()) + assert len(blk.segments) == 1 + blk.segments.extend([Segment(), Segment()]) + assert len(blk.segments) == 3 + blk.segments = [] + assert len(blk.segments) == 0 + blk.segments = [Segment()] + assert len(blk.segments) == 1 + blk.segments += [Segment(), Segment()] + assert len(blk.segments) == 3 + if __name__ == "__main__": unittest.main() diff --git a/neo/test/coretest/test_container.py b/neo/test/coretest/test_container.py index 99296b197..b21f86d75 100644 --- a/neo/test/coretest/test_container.py +++ b/neo/test/coretest/test_container.py @@ -56,22 +56,18 @@ def test__children(self): self.assertEqual(container._container_child_objects, ()) self.assertEqual(container._data_child_objects, ()) - self.assertEqual(container._multi_child_objects, ()) - self.assertEqual(container._child_properties, ()) self.assertEqual(container._repr_pretty_containers, ()) - self.assertEqual(container._single_child_objects, ()) + self.assertEqual(container._child_objects, ()) self.assertEqual(container._container_child_containers, ()) self.assertEqual(container._data_child_containers, ()) - self.assertEqual(container._single_child_containers, ()) - self.assertEqual(container._multi_child_containers, ()) + self.assertEqual(container._child_containers, ()) self.assertEqual(container._child_objects, ()) self.assertEqual(container._child_containers, ()) - self.assertEqual(container._multi_children, ()) self.assertEqual(container._single_children, ()) self.assertEqual(container.data_children, ()) self.assertEqual(container.container_children, ()) @@ -94,10 +90,7 @@ def test__children(self): self.assertEqual(container.size, {}) - container.create_many_to_one_relationship() - container.create_many_to_many_relationship() - container.create_relationship() - + container.check_relationships() def test_filter(self): container = Container() self.assertRaises(TypeError, container.filter, "foo") diff --git a/neo/test/coretest/test_epoch.py b/neo/test/coretest/test_epoch.py index 377ea12fe..f32b55530 100644 --- a/neo/test/coretest/test_epoch.py +++ b/neo/test/coretest/test_epoch.py @@ -183,7 +183,7 @@ def test__children(self): segment = Segment(name='seg1') segment.epochs = [epc] - segment.create_many_to_one_relationship() + segment.check_relationships() self.assertEqual(epc._parent_objects, ('Segment',)) @@ -206,8 +206,10 @@ def test__pretty(self): assert_neo_object_is_compliant(epc) prepr = pretty(epc) - targ = ("Epoch\nname: '%s'\ndescription: '%s'\nannotations: %s" - "" % (epc.name, epc.description, pretty(epc.annotations))) + targ = (f"Epoch containing {len(epc)} epochs with labels; " + f"time units {epc.units.dimensionality.string}; datatype float64 " + f"\nname: '{epc.name}'\ndescription: '{epc.description}'" + f"\nannotations: {pretty(epc.annotations)}") self.assertEqual(prepr, targ) diff --git a/neo/test/coretest/test_event.py b/neo/test/coretest/test_event.py index 0be99bf12..9acba8d77 100644 --- a/neo/test/coretest/test_event.py +++ b/neo/test/coretest/test_event.py @@ -450,7 +450,7 @@ def test__children(self): segment = Segment(name='seg1') segment.events = [evt] - segment.create_many_to_one_relationship() + segment.check_relationships() self.assertEqual(evt._parent_objects, ('Segment',)) @@ -473,8 +473,10 @@ def test__pretty(self): assert_neo_object_is_compliant(evt) prepr = pretty(evt) - targ = ("Event\nname: '%s'\ndescription: '%s'\nannotations: %s" - "" % (evt.name, evt.description, pretty(evt.annotations))) + targ = (f"Event containing {len(evt)} events with labels; " + f"time units {evt.units.dimensionality.string}; datatype float64 " + f"\nname: '{evt.name}'\ndescription: '{evt.description}'" + f"\nannotations: {pretty(evt.annotations)}") self.assertEqual(prepr, targ) diff --git a/neo/test/coretest/test_segment.py b/neo/test/coretest/test_segment.py index ecda68a31..7e626379d 100644 --- a/neo/test/coretest/test_segment.py +++ b/neo/test/coretest/test_segment.py @@ -156,7 +156,7 @@ def test__filter_none(self): # and then we have to convert to a SpikeTrainList # to match the output of segment.filter if all(isinstance(obj, SpikeTrain) for obj in targ): - targ = SpikeTrainList(items=targ, segment=segment) + targ = SpikeTrainList(items=targ, parent=segment) res0 = segment.filter() res1 = segment.filter({}) @@ -461,7 +461,7 @@ def test__time_slice(self): block = Block() block.segments = [seg] - block.create_many_to_one_relationship() + block.check_relationships() # test without resetting the time sliced = seg.time_slice(time_slice[0], time_slice[1]) @@ -499,7 +499,7 @@ def test__time_slice(self): block = Block() block.segments = [seg] - block.create_many_to_one_relationship() + block.check_relationships() # test with resetting the time sliced = seg.time_slice(time_slice[0], time_slice[1], reset_time=True) @@ -561,7 +561,7 @@ def test__time_slice(self): block = Block() block.segments = [seg] - block.create_many_to_one_relationship() + block.check_relationships() # test with proxy objects sliced = seg.time_slice(time_slice[0], time_slice[1]) @@ -607,7 +607,7 @@ def test_time_slice_None(self): block = Block() block.segments = [seg] - block.create_many_to_one_relationship() + block.check_relationships() # test without resetting the time for t_start, t_stop in time_slices: diff --git a/neo/test/coretest/test_spiketrain.py b/neo/test/coretest/test_spiketrain.py index 4dc30cc46..6a5a5b488 100644 --- a/neo/test/coretest/test_spiketrain.py +++ b/neo/test/coretest/test_spiketrain.py @@ -1177,8 +1177,6 @@ def setUp(self): self.segment = Segment() self.segment.spiketrains.extend([self.train1, self.train2]) - self.train1.segment = self.segment - self.train2.segment = self.segment def test_compliant(self): assert_neo_object_is_compliant(self.train1) @@ -1954,7 +1952,7 @@ def test__times(self): def test__children(self): segment = Segment(name='seg1') segment.spiketrains = [self.train1] - segment.create_many_to_one_relationship() + segment.check_relationships() self.assertEqual(self.train1._parent_objects, ('Segment',)) @@ -1973,6 +1971,12 @@ def test__pretty(self): res = pretty(self.train1) targ = ("SpikeTrain\n" + "name: '%s'\ndescription: '%s'\nannotations: %s" "" % (self.name1, self.description1, pretty(self.ann1))) + targ = (f"SpikeTrain containing {len(self.train1)} spikes with waveforms; " + f"units {self.train1.units.dimensionality.string}; datatype float64 " + f"\nname: '{self.name1}'\ndescription: '{self.description1}'" + f"\nannotations: {pretty(self.ann1)}" + f"\ntime: {self.train1.t_start} to {self.train1.t_stop}") + self.assertEqual(res, targ) diff --git a/neo/test/generate_datasets.py b/neo/test/generate_datasets.py index b8af584b1..e44ed4c15 100644 --- a/neo/test/generate_datasets.py +++ b/neo/test/generate_datasets.py @@ -158,8 +158,6 @@ def random_segment(): seg.spiketrains.append(random_spiketrain()) # todo: add some ImageSequence and ROI objects - for child in seg.data_children: - child.segment = seg return seg @@ -206,7 +204,6 @@ def random_block(): for i in range(n_seg): seg = random_segment() block.segments.append(seg) - seg.block = block children = list(block.data_children_recur) views = [] for child in children: @@ -222,7 +219,6 @@ def random_block(): group = random_group(children) if group: block.groups.append(group) - group.block = block children.append(group) # this can give us nested groups return block @@ -240,8 +236,6 @@ def simple_block(): cell_type="pyramidal", thing="amajig") ] - for segment in block.segments: - segment.block = block block.segments[0].analogsignals.extend(( random_signal(name="signal #1 in segment #1", thing="wotsit"), random_signal(name="signal #2 in segment #1", thing="frooble"), @@ -277,10 +271,7 @@ def generate_one_simple_block(block_name='block_0', nb_segment=3, supported_obje **kws) bl.segments.append(seg) - # if RecordingChannel in objects: - # populate_RecordingChannel(bl) - - bl.create_many_to_one_relationship() + bl.check_relationships() return bl @@ -373,12 +364,11 @@ def generate_one_simple_segment(seg_name='segment 0', supported_objects=[], nb_a # TODO : Spike, Event - seg.create_many_to_one_relationship() + seg.check_relationships() return seg def generate_from_supported_objects(supported_objects): - # ~ create_many_to_one_relationship if not supported_objects: raise ValueError('No objects specified') objects = supported_objects @@ -390,5 +380,5 @@ def generate_from_supported_objects(supported_objects): # TODO return None - higher.create_many_to_one_relationship() + higher.check_relationships() return higher diff --git a/neo/test/iotest/test_asciisignalio.py b/neo/test/iotest/test_asciisignalio.py index e29a5b941..4e6a03754 100644 --- a/neo/test/iotest/test_asciisignalio.py +++ b/neo/test/iotest/test_asciisignalio.py @@ -349,7 +349,6 @@ def test_write_without_timecolumn(self): seg1 = Segment() block1 = Block() seg1.analogsignals.append(signal1) - seg1.block = block1 block1.segments.append(seg1) iow = AsciiSignalIO(filename, @@ -393,7 +392,6 @@ def test_write_with_timecolumn(self): seg1 = Segment() block1 = Block() seg1.analogsignals.append(signal1) - seg1.block = block1 block1.segments.append(seg1) iow = AsciiSignalIO(filename, @@ -437,7 +435,6 @@ def test_write_with_timeunits_different_from_those_of_signal(self): seg1 = Segment() block1 = Block() seg1.analogsignals.append(signal1) - seg1.block = block1 block1.segments.append(seg1) iow = AsciiSignalIO(filename, @@ -515,7 +512,6 @@ def test_roundtrip_with_json_metadata(self): seg1 = Segment() block1 = Block() seg1.analogsignals.append(signal1) - seg1.block = block1 block1.segments.append(seg1) iow = AsciiSignalIO(filename, metadata_filename=metadata_filename) diff --git a/neo/test/iotest/test_brainwaredamio.py b/neo/test/iotest/test_brainwaredamio.py index 551bf7da2..c05c8413c 100644 --- a/neo/test/iotest/test_brainwaredamio.py +++ b/neo/test/iotest/test_brainwaredamio.py @@ -72,7 +72,7 @@ def proc_dam(filename): gr.analogsignals.append(sig) sig.group = gr - block.create_many_to_one_relationship() + block.check_relationships() return block diff --git a/neo/test/iotest/test_brainwaref32io.py b/neo/test/iotest/test_brainwaref32io.py index 2ecf425fc..67b56c838 100644 --- a/neo/test/iotest/test_brainwaref32io.py +++ b/neo/test/iotest/test_brainwaref32io.py @@ -49,7 +49,7 @@ def proc_f32(filename): f32file = list(f32obj.items())[0][1].flatten() except OSError as exc: if 'as a pickle' in exc.message: - block.create_many_to_one_relationship() + block.check_relationships() return block else: raise @@ -78,7 +78,7 @@ def proc_f32(filename): gr.spiketrains.append(train) block.segments.append(segment) - block.create_many_to_one_relationship() + block.check_relationships() return block diff --git a/neo/test/iotest/test_brainwaresrcio.py b/neo/test/iotest/test_brainwaresrcio.py index d378c42a2..d6814d88e 100644 --- a/neo/test/iotest/test_brainwaresrcio.py +++ b/neo/test/iotest/test_brainwaresrcio.py @@ -84,7 +84,7 @@ def proc_src(filename): for rep in srcfile['sets'][0, 0].flatten(): proc_src_condition(rep, filename, ADperiod, side, block) - block.create_many_to_one_relationship() + block.check_relationships() return block diff --git a/neo/test/iotest/test_cedio.py b/neo/test/iotest/test_cedio.py index 3b5795207..00ba563d0 100644 --- a/neo/test/iotest/test_cedio.py +++ b/neo/test/iotest/test_cedio.py @@ -1,9 +1,27 @@ import unittest +from platform import system +from sys import maxsize -from neo.io import CedIO -from neo.test.iotest.common_io_test import BaseTestIO +try: + if system() == 'Windows': + if maxsize > 2**32: + import sonpy.amd64.sonpy + else: + import sonpy.win32.sonpy + elif system() == 'Darwin': + import sonpy.darwin.sonpy + elif system() == 'Linux': + import sonpy.linux.sonpy + from neo.io import CedIO +except ImportError: + HAVE_SONPY = False + CedIO = None +else: + HAVE_SONPY = True +from neo.test.iotest.common_io_test import BaseTestIO +@unittest.skipUnless(HAVE_SONPY, "sonpy") class TestCedIO(BaseTestIO, unittest.TestCase, ): ioclass = CedIO entities_to_test = [ diff --git a/neo/test/iotest/test_nixio.py b/neo/test/iotest/test_nixio.py index b5507dbff..87db61a20 100644 --- a/neo/test/iotest/test_nixio.py +++ b/neo/test/iotest/test_nixio.py @@ -890,9 +890,6 @@ def test_group_write(self): seg.analogsignals.extend(signals) seg.spiketrains.extend(spiketrains) seg.epochs.extend(epochs) - for obj in chain(signals, spiketrains, epochs): - obj.segment = seg - views = [ChannelView(index=np.array([0, 3, 4]), obj=signals[0], name="view_of_sig1")] groups = [ Group(objects=(signals[0:1] + spiketrains[0:2] + epochs + views), name="group1"), @@ -902,8 +899,6 @@ def test_group_write(self): block = Block(name="block1") block.segments.append(seg) block.groups.extend(groups) - for obj in chain([seg], groups): - obj.block = block self.write_and_compare([block]) @@ -930,8 +925,6 @@ def test_group_write_nested(self): seg.analogsignals.extend(signals) seg.spiketrains.extend(spiketrains) seg.epochs.extend(epochs) - for obj in chain(signals, spiketrains, epochs): - obj.segment = seg views = [ChannelView(index=np.array([0, 3, 4]), obj=signals[0], name="view_of_sig1")] @@ -944,8 +937,6 @@ def test_group_write_nested(self): block = Block(name="block1") block.segments.append(seg) block.groups.extend(groups) - for obj in chain([seg], groups): - obj.block = block self.write_and_compare([block]) @@ -1514,7 +1505,7 @@ def generate_complete_block(): unit.add(spiketrain) # make sure everything is linked properly - block.create_relationship() + block.check_relationships() return block diff --git a/neo/test/iotest/test_nwbio.py b/neo/test/iotest/test_nwbio.py index 39b50ab34..0a8b8cba0 100644 --- a/neo/test/iotest/test_nwbio.py +++ b/neo/test/iotest/test_nwbio.py @@ -58,7 +58,6 @@ def test_roundtrip(self): for ind in range(num_seg): # number of Segments seg = Segment(index=ind) - seg.block = blk blk.segments.append(seg) for seg in blk.segments: # AnalogSignal objects @@ -110,15 +109,6 @@ def test_roundtrip(self): seg.analogsignals.append(c) seg.irregularlysampledsignals.append(d) seg.events.append(evt) - a.segment = seg - b.segment = seg - c.segment = seg - d.segment = seg - evt.segment = seg - train.segment = seg - train2.segment = seg - epc.segment = seg - epc2.segment = seg # write to file test_file_name = "test_round_trip.nwb" @@ -187,7 +177,6 @@ def test_roundtrip_with_annotations(self): original_block = Block(name="experiment", session_start_time=datetime.now()) segment = Segment(name="session 1") original_block.segments.append(segment) - segment.block = original_block electrode_annotations = { "name": "electrode #1", @@ -224,7 +213,7 @@ def test_roundtrip_with_annotations(self): name="response", **response_annotations) segment.analogsignals = [stimulus, response] - stimulus.segment = response.segment = segment + assert stimulus.segment is response.segment is segment test_file_name = "test_round_trip_with_annotations.nwb" iow = NWBIO(filename=test_file_name, mode='w') @@ -268,25 +257,21 @@ def test_write_proxy_objects(self): # create proxy objects proxy_anasig = AnalogSignalProxy(rawio=self.proxy_reader, stream_index=0, inner_stream_channels=None, block_index=0, seg_index=0,) - proxy_anasig.segment = seg seg.analogsignals.append(proxy_anasig) proxy_sptr = SpikeTrainProxy(rawio=self.proxy_reader, spike_channel_index=0, block_index=0, seg_index=0) - proxy_sptr.segment = seg seg.spiketrains.append(proxy_sptr) proxy_event = EventProxy(rawio=self.proxy_reader, event_channel_index=0, block_index=0, seg_index=0) - proxy_event.segment = seg seg.events.append(proxy_event) proxy_epoch = EpochProxy(rawio=self.proxy_reader, event_channel_index=1, block_index=0, seg_index=0) - proxy_epoch.segment = seg seg.epochs.append(proxy_epoch) - original_block.create_relationship() + original_block.check_relationships() iow = NWBIO(filename=test_file_name, mode='w') diff --git a/neo/test/iotest/test_pickleio.py b/neo/test/iotest/test_pickleio.py index 2e34ba661..ed7a7e614 100644 --- a/neo/test/iotest/test_pickleio.py +++ b/neo/test/iotest/test_pickleio.py @@ -39,7 +39,6 @@ def test__issue_285(self): seg = Segment() seg.spiketrains.append(train) seg.epochs.append(epoch) - epoch.segment = seg blk.segments.append(seg) reader = PickleIO(filename="blk.pkl") @@ -56,7 +55,6 @@ def test__issue_285(self): epoch = Epoch(times=np.arange(0, 30, 10) * pq.s, durations=[10, 5, 7] * pq.ms, labels=np.array(['btn0', 'btn1', 'btn2'], dtype='U')) - epoch.segment = Segment() blk = Block() seg = Segment() seg.epochs.append(epoch) @@ -74,8 +72,6 @@ def test__issue_285(self): # Event event = Event(np.arange(0, 30, 10) * pq.s, labels=np.array(['trig0', 'trig1', 'trig2'], dtype='U')) - event.segment = Segment() - blk = Block() seg = Segment() seg.events.append(event) @@ -93,13 +89,10 @@ def test__issue_285(self): # IrregularlySampledSignal signal = IrregularlySampledSignal( [0.0, 1.23, 6.78], [1, 2, 3], units='mV', time_units='ms') - signal.segment = Segment() - blk = Block() seg = Segment() seg.irregularlysampledsignals.append(signal) blk.segments.append(seg) - blk.segments[0].block = blk reader = PickleIO(filename="blk.pkl") reader.write(blk) diff --git a/neo/test/tools.py b/neo/test/tools.py index 70d9dd2ef..946a4b9ba 100644 --- a/neo/test/tools.py +++ b/neo/test/tools.py @@ -13,6 +13,7 @@ from neo.core.baseneo import _reference_name, _container_name from neo.core.basesignal import BaseSignal from neo.core.container import Container +from neo.core.objectlist import ObjectList from neo.core.spiketrainlist import SpikeTrainList from neo.io.basefromrawio import proxyobjectlist, EventProxy, EpochProxy @@ -175,6 +176,17 @@ def assert_neo_object_is_compliant(ob, check_type=True): raise +def types_match(ob1, ob2): + if type(ob1) == type(ob2): + return True + elif isinstance(ob1, ObjectList): + return isinstance(ob2, (list, ObjectList)) + elif isinstance(ob2, ObjectList): + return isinstance(ob1, (list, ObjectList)) + else: + return False + + def assert_same_sub_schema(ob1, ob2, equal_almost=True, threshold=1e-10, exclude=None): ''' Test if ob1 and ob2 has the same sub schema. @@ -193,13 +205,14 @@ def assert_same_sub_schema(ob1, ob2, equal_almost=True, threshold=1e-10, exclude # for debugging occasional test failure raise Exception("items={}\nspike_time_array={}\nlist length: {}".format( str(ob1._items), str(ob1._spike_time_array), len(ob2))) - assert type(ob1) == type(ob2), 'type({}) != type({})'.format(type(ob1), type(ob2)) + errmsg = 'type({}) != type({})'.format(type(ob1), type(ob2)) + assert types_match(ob1, ob2), errmsg classname = ob1.__class__.__name__ if exclude is None: exclude = [] - if isinstance(ob1, (list, SpikeTrainList)): + if isinstance(ob1, (list, ObjectList)): assert len(ob1) == len(ob2), 'lens %s and %s not equal for %s and %s' \ '' % (len(ob1), len(ob2), ob1, ob2) for i, (sub1, sub2) in enumerate(zip(ob1, ob2)): diff --git a/neo/test/utils/test_misc.py b/neo/test/utils/test_misc.py index f547cd159..29566efc7 100644 --- a/neo/test/utils/test_misc.py +++ b/neo/test/utils/test_misc.py @@ -373,7 +373,7 @@ def test__cut_block_by_epochs(self): original_block = Block() original_block.segments = [seg, seg2] - original_block.create_many_to_one_relationship() + original_block.check_relationships() with warnings.catch_warnings(record=True) as w: # This should raise a warning as one segment does not contain epochs @@ -432,7 +432,7 @@ def test__cut_block_by_epochs(self): original_block = Block() original_block.segments = [seg, seg2] - original_block.create_many_to_one_relationship() + original_block.check_relationships() with warnings.catch_warnings(record=True) as w: # This should raise a warning as one segment does not contain epochs @@ -604,7 +604,7 @@ def test__cut_block_by_epochs(self): original_block = Block() original_block.segments = [seg] - original_block.create_many_to_one_relationship() + original_block.check_relationships() block = cut_block_by_epochs(original_block, properties={'pick': 'me'}) @@ -651,7 +651,7 @@ def test__cut_block_by_epochs(self): epoch.annotate(pick='me instead') seg2.epochs = [proxy_epoch, epoch] block2.segments = [seg2] - block2.create_many_to_one_relationship() + block2.check_relationships() # test correct loading and slicing of EpochProxy objects # (not tested above since we used the EpochProxy to cut the block) diff --git a/neo/utils/misc.py b/neo/utils/misc.py index 12c37f82c..eb2937823 100644 --- a/neo/utils/misc.py +++ b/neo/utils/misc.py @@ -351,7 +351,7 @@ def add_epoch( if attach_result: segment.epochs.append(ep) - segment.create_relationship() + segment.check_relationships() return ep @@ -495,7 +495,7 @@ def cut_block_by_epochs(block, properties=None, reset_time=False): seg, epoch=epoch, reset_time=reset_time) new_block.segments.extend(new_segments) - new_block.create_many_to_one_relationship(force=True) + new_block.check_relationships() return new_block