@@ -50,8 +50,21 @@ def _source_name(self):
5050 def _parse_header (self ):
5151 all_streams , nb_block , nb_segment_per_block = explore_folder (self .dirname )
5252
53- sig_stream_names = sorted (list (all_streams [0 ][0 ]['continuous' ].keys ()))
54- event_stream_names = sorted (list (all_streams [0 ][0 ]['events' ].keys ()))
53+ # streams can be different across blocks. Gather all and assign a stream index
54+ sig_stream_names = {}
55+ event_stream_names = {}
56+ sig_stream_index = 0
57+ evt_stream_index = 0
58+
59+ for block_index in range (nb_block ):
60+ sig_stream_names [block_index ] = {}
61+ for stream_name in sorted (list (all_streams [block_index ][0 ]['continuous' ].keys ())):
62+ sig_stream_names [block_index ][sig_stream_index ] = stream_name
63+ sig_stream_index += 1
64+ event_stream_names [block_index ] = {}
65+ for stream_name in sorted (list (all_streams [block_index ][0 ]['events' ].keys ())):
66+ event_stream_names [block_index ][evt_stream_index ] = stream_name
67+ evt_stream_index += 1
5568
5669 # first loop to reassign stream by "stream_index" instead of "stream_name"
5770 self ._sig_streams = {}
@@ -62,40 +75,42 @@ def _parse_header(self):
6275 for seg_index in range (nb_segment_per_block [block_index ]):
6376 self ._sig_streams [block_index ][seg_index ] = {}
6477 self ._evt_streams [block_index ][seg_index ] = {}
65- for stream_index , stream_name in enumerate ( sig_stream_names ):
78+ for stream_index , stream_name in sig_stream_names [ block_index ]. items ( ):
6679 d = all_streams [block_index ][seg_index ]['continuous' ][stream_name ]
6780 d ['stream_name' ] = stream_name
6881 self ._sig_streams [block_index ][seg_index ][stream_index ] = d
69- for i , stream_name in enumerate ( event_stream_names ):
82+ for i , stream_name in event_stream_names [ block_index ]. items ( ):
7083 d = all_streams [block_index ][seg_index ]['events' ][stream_name ]
7184 d ['stream_name' ] = stream_name
7285 self ._evt_streams [block_index ][seg_index ][i ] = d
7386
7487 # signals zone
7588 # create signals channel map: several channel per stream
7689 signal_channels = []
77- for stream_index , stream_name in enumerate (sig_stream_names ):
78- # stream_index is the index in vector sytream names
79- stream_id = str (stream_index )
80- d = self ._sig_streams [0 ][0 ][stream_index ]
81- new_channels = []
82- for chan_info in d ['channels' ]:
83- chan_id = chan_info ['channel_name' ]
84- if chan_info ["units" ] == "" :
85- # in some cases for some OE version the unit is "", but the gain is to "uV"
86- units = "uV"
87- else :
88- units = chan_info ["units" ]
89- new_channels .append ((chan_info ['channel_name' ],
90- chan_id , float (d ['sample_rate' ]), d ['dtype' ], units ,
91- chan_info ['bit_volts' ], 0. , stream_id ))
92- signal_channels .extend (new_channels )
90+ for block_index in range (nb_block ):
91+ for stream_index , stream_name in sig_stream_names [block_index ].items ():
92+ # stream_index is the index in vector stream names
93+ stream_id = str (stream_index )
94+ d = self ._sig_streams [block_index ][0 ][stream_index ]
95+ new_channels = []
96+ for chan_info in d ['channels' ]:
97+ chan_id = chan_info ['channel_name' ]
98+ if chan_info ["units" ] == "" :
99+ # in some cases for some OE version the unit is "", but the gain is to "uV"
100+ units = "uV"
101+ else :
102+ units = chan_info ["units" ]
103+ new_channels .append ((chan_info ['channel_name' ],
104+ chan_id , float (d ['sample_rate' ]), d ['dtype' ], units ,
105+ chan_info ['bit_volts' ], 0. , stream_id ))
106+ signal_channels .extend (new_channels )
93107 signal_channels = np .array (signal_channels , dtype = _signal_channel_dtype )
94108
95109 signal_streams = []
96- for stream_index , stream_name in enumerate (sig_stream_names ):
97- stream_id = str (stream_index )
98- signal_streams .append ((stream_name , stream_id ))
110+ for block_index in range (nb_block ):
111+ for stream_index , stream_name in sig_stream_names [block_index ].items ():
112+ stream_id = str (stream_index )
113+ signal_streams .append ((stream_name , stream_id ))
99114 signal_streams = np .array (signal_streams , dtype = _signal_stream_dtype )
100115
101116 # create memmap for signals
@@ -110,42 +125,44 @@ def _parse_header(self):
110125 # events zone
111126 # channel map: one channel one stream
112127 event_channels = []
113- for stream_ind , stream_name in enumerate (event_stream_names ):
114- d = self ._evt_streams [0 ][0 ][stream_ind ]
115- event_channels .append ((d ['channel_name' ], stream_ind , 'event' ))
128+ for block_index in range (nb_block ):
129+ for stream_ind , stream_name in event_stream_names [block_index ].items ():
130+ d = self ._evt_streams [block_index ][0 ][stream_ind ]
131+ event_channels .append ((d ['channel_name' ], stream_ind , 'event' ))
116132 event_channels = np .array (event_channels , dtype = _event_channel_dtype )
117133
118134 # create memmap
119- for stream_ind , stream_name in enumerate (event_stream_names ):
120- # inject memmap loaded into main dict structure
121- d = self ._evt_streams [0 ][0 ][stream_ind ]
122-
123- for name in _possible_event_stream_names :
124- if name + '_npy' in d :
125- data = np .load (d [name + '_npy' ], mmap_mode = 'r' )
126- d [name ] = data
127-
128- # check that events have timestamps
129- assert 'timestamps' in d
130-
131- # for event the neo "label" will change depending the nature
132- # of event (ttl, text, binary)
133- # and this is transform into unicode
134- # all theses data are put in event array annotations
135- if 'text' in d :
136- # text case
137- d ['labels' ] = d ['text' ].astype ('U' )
138- elif 'metadata' in d :
139- # binary case
140- d ['labels' ] = d ['channels' ].astype ('U' )
141- elif 'channels' in d :
142- # ttl case use channels
143- d ['labels' ] = d ['channels' ].astype ('U' )
144- elif 'states' in d :
145- # ttl case use states
146- d ['labels' ] = d ['states' ].astype ('U' )
147- else :
148- raise ValueError (f'There is no possible labels for this event: { stream_name } ' )
135+ for block_index in range (nb_block ):
136+ for stream_ind , stream_name in event_stream_names [block_index ].items ():
137+ # inject memmap loaded into main dict structure
138+ d = self ._evt_streams [block_index ][0 ][stream_ind ]
139+
140+ for name in _possible_event_stream_names :
141+ if name + '_npy' in d :
142+ data = np .load (d [name + '_npy' ], mmap_mode = 'r' )
143+ d [name ] = data
144+
145+ # check that events have timestamps
146+ assert 'timestamps' in d
147+
148+ # for event the neo "label" will change depending the nature
149+ # of event (ttl, text, binary)
150+ # and this is transform into unicode
151+ # all theses data are put in event array annotations
152+ if 'text' in d :
153+ # text case
154+ d ['labels' ] = d ['text' ].astype ('U' )
155+ elif 'metadata' in d :
156+ # binary case
157+ d ['labels' ] = d ['channels' ].astype ('U' )
158+ elif 'channels' in d :
159+ # ttl case use channels
160+ d ['labels' ] = d ['channels' ].astype ('U' )
161+ elif 'states' in d :
162+ # ttl case use states
163+ d ['labels' ] = d ['states' ].astype ('U' )
164+ else :
165+ raise ValueError (f'There is no possible labels for this event: { stream_name } ' )
149166
150167 # no spike read yet
151168 # can be implemented on user demand
@@ -172,8 +189,8 @@ def _parse_header(self):
172189 global_t_stop = t_stop
173190
174191 # loop over events
175- for stream_index , stream_name in enumerate ( event_stream_names ):
176- d = self ._evt_streams [0 ][0 ][stream_index ]
192+ for stream_ind , stream_name in event_stream_names [ block_index ]. items ( ):
193+ d = self ._evt_streams [block_index ][0 ][stream_ind ]
177194 if d ['timestamps' ].size == 0 :
178195 continue
179196 t_start = d ['timestamps' ][0 ] / d ['sample_rate' ]
@@ -203,9 +220,9 @@ def _parse_header(self):
203220 seg_ann = bl_ann ['segments' ][seg_index ]
204221
205222 # array annotations for signal channels
206- for stream_index , stream_name in enumerate ( sig_stream_names ):
223+ for stream_index , stream_name in sig_stream_names [ block_index ]. items ( ):
207224 sig_ann = seg_ann ['signals' ][stream_index ]
208- d = self ._sig_streams [0 ][0 ][stream_index ]
225+ d = self ._sig_streams [block_index ][0 ][stream_index ]
209226 for k in ('identifier' , 'history' , 'source_processor_index' ,
210227 'recorded_processor_index' ):
211228 if k in d ['channels' ][0 ]:
@@ -214,9 +231,9 @@ def _parse_header(self):
214231
215232 # array annotations for event channels
216233 # use other possible data in _possible_event_stream_names
217- for stream_index , stream_name in enumerate ( event_stream_names ):
234+ for stream_index , stream_name in event_stream_names [ block_index ]. items ( ):
218235 ev_ann = seg_ann ['events' ][stream_index ]
219- d = self ._evt_streams [0 ][0 ][stream_index ]
236+ d = self ._evt_streams [block_index ][0 ][stream_index ]
220237 for k in _possible_event_stream_names :
221238 if k in ('timestamps' , ):
222239 continue
@@ -329,7 +346,7 @@ def explore_folder(dirname):
329346 nested dictionaries containing structure and stream information
330347 """
331348 nb_block = 0
332- nb_segment_per_block = []
349+ nb_segment_per_block = {}
333350 # nested dictionary: block_index > seg_index > data_type > stream_name
334351 all_streams = {}
335352 for root , dirs , files in os .walk (dirname ):
@@ -347,9 +364,8 @@ def explore_folder(dirname):
347364 block_index = int (root .parents [0 ].stem .lower ().replace ('experiment' , '' )) - 1
348365 if block_index not in all_streams :
349366 all_streams [block_index ] = {}
350- if block_index >= nb_block :
351- nb_block = block_index + 1
352- nb_segment_per_block .append (0 )
367+ nb_block += 1
368+ nb_segment_per_block [block_index ] = 0
353369
354370 seg_index = int (root .stem .replace ('recording' , '' )) - 1
355371 if seg_index not in all_streams [block_index ]:
0 commit comments