55f20b1219eba9d3330d5d7cd7ce8d8924a255b8c2/spikeextractors/
66extractors/phyextractors/phyextractors.py
77
8- ExampleRawIO is a class of a fake example.
9- This is to be used when coding a new RawIO.
10-
11-
12- Rules for creating a new class:
13- 1. Step 1: Create the main class
14- * Create a file in **neo/rawio/** that endith with "rawio.py"
15- * Create the class that inherits BaseRawIO
16- * copy/paste all methods that need to be implemented.
17- See the end a neo.rawio.baserawio.BaseRawIO
18- * code hard! The main difficulty **is _parse_header()**.
19- In short you have a create a mandatory dict than
20- contains channel informations::
21-
22- self.header = {}
23- self.header['nb_block'] = 2
24- self.header['nb_segment'] = [2, 3]
25- self.header['signal_channels'] = sig_channels
26- self.header['unit_channels'] = unit_channels
27- self.header['event_channels'] = event_channels
28-
29- 2. Step 2: RawIO test:
30- * create a file in neo/rawio/tests with the same name with "test_" prefix
31- * copy paste neo/rawio/tests/test_examplerawio.py and do the same
32-
33- 3. Step 3 : Create the neo.io class with the wrapper
34- * Create a file in neo/io/ that ends with "io.py"
35- * Create a class that inherits both your RawIO class and BaseFromRaw class
36- * copy/paste from neo/io/exampleio.py
37-
38- 4.Step 4 : IO test
39- * create a file in neo/test/iotest with the same previous name with "test_" prefix
40- * copy/paste from neo/test/iotest/test_exampleio.py
41-
42-
43-
8+ Author: Regimantas Jurkus
449"""
4510
4611from .baserawio import (BaseRawIO , _signal_channel_dtype , _unit_channel_dtype ,
@@ -163,21 +128,23 @@ def _parse_header(self):
163128 for seg_index in range ([1 ][block_index ]):
164129 seg_ann = bl_ann ['segments' ][seg_index ]
165130 seg_ann ['name' ] = f'Seg #{ seg_index } Block #{ block_index } '
166- seg_ann ['seg_extra_info' ] = f'This is the seg { seg_index } of ' \
167- f'block { block_index } '
131+ seg_ann ['seg_extra_info' ] = f'This is the seg { seg_index } ' \
132+ f'of block { block_index } '
168133 for index , clust_id in enumerate (clust_ids ):
169134 spiketrain_an = seg_ann ['units' ][index ]
170135
171136 # Loop over list of list of dict and annotate each st
172137 for annotation_list in annotation_lists :
173- clust_key , property_name = tuple (annotation_list [0 ].keys ())
138+ clust_key , property_name = tuple (annotation_list [0 ].
139+ keys ())
174140 if property_name == 'KSLabel' :
175141 annotation_name = 'quality'
176142 else :
177143 annotation_name = property_name .lower ()
178144 for annotation_dict in annotation_list :
179145 if int (annotation_dict [clust_key ]) == clust_id :
180- spiketrain_an [annotation_name ] = annotation_dict [property_name ]
146+ spiketrain_an [annotation_name ] = \
147+ annotation_dict [property_name ]
181148 break
182149
183150 def _segment_t_start (self , block_index , seg_index ):
@@ -194,7 +161,8 @@ def _get_signal_size(self, block_index, seg_index, channel_indexes=None):
194161 def _get_signal_t_start (self , block_index , seg_index , channel_indexes ):
195162 return None
196163
197- def _get_analogsignal_chunk (self , block_index , seg_index , i_start , i_stop , channel_indexes ):
164+ def _get_analogsignal_chunk (self , block_index , seg_index , i_start , i_stop ,
165+ channel_indexes ):
198166 return None
199167
200168 def _spike_count (self , block_index , seg_index , unit_index ):
@@ -205,7 +173,8 @@ def _spike_count(self, block_index, seg_index, unit_index):
205173 nb_spikes = np .sum (mask )
206174 return nb_spikes
207175
208- def _get_spike_timestamps (self , block_index , seg_index , unit_index , t_start , t_stop ):
176+ def _get_spike_timestamps (self , block_index , seg_index , unit_index ,
177+ t_start , t_stop ):
209178 assert block_index == 0
210179 assert seg_index == 0
211180
@@ -215,7 +184,8 @@ def _get_spike_timestamps(self, block_index, seg_index, unit_index, t_start, t_s
215184
216185 if t_start is not None :
217186 start_frame = int (t_start * self ._sampling_frequency )
218- spike_timestamps = spike_timestamps [spike_timestamps >= start_frame ]
187+ spike_timestamps = spike_timestamps [spike_timestamps >=
188+ start_frame ]
219189 if t_stop is not None :
220190 end_frame = int (t_stop * self ._sampling_frequency )
221191 spike_timestamps = spike_timestamps [spike_timestamps < end_frame ]
@@ -227,13 +197,15 @@ def _rescale_spike_timestamp(self, spike_timestamps, dtype):
227197 spike_times /= self ._sampling_frequency
228198 return spike_times
229199
230- def _get_spike_raw_waveforms (self , block_index , seg_index , unit_index , t_start , t_stop ):
200+ def _get_spike_raw_waveforms (self , block_index , seg_index , unit_index ,
201+ t_start , t_stop ):
231202 return None
232203
233204 def _event_count (self , block_index , seg_index , event_channel_index ):
234205 return None
235206
236- def _get_event_timestamps (self , block_index , seg_index , event_channel_index , t_start , t_stop ):
207+ def _get_event_timestamps (self , block_index , seg_index ,
208+ event_channel_index , t_start , t_stop ):
237209 return None
238210
239211 def _rescale_event_timestamp (self , event_timestamps , dtype ):
@@ -256,4 +228,3 @@ def _parse_tsv_or_csv_to_list_of_dict(filename):
256228 list_of_dict .append (row )
257229
258230 return list_of_dict
259-
0 commit comments