@@ -93,6 +93,9 @@ def generate_input_spike_trains(
9393 logger .info (f"{ comm .size } ranks have been allocated" )
9494
9595 population_name = population .name
96+ start_gid = 0
97+ if hasattr (population , "start_gid" ):
98+ start_gid = population .start_gid
9699
97100 soma_positions_dict = None
98101 if coords_path is not None :
@@ -186,6 +189,8 @@ def generate_input_spike_trains(
186189 feature_items = list (population .features .items ())
187190 n_iter = comm .allreduce (len (feature_items ), op = MPI .MAX )
188191
192+ logger .info (f"n_iter = { n_iter } feature_items = { feature_items } " )
193+
189194 if not dry_run and rank == 0 :
190195 if output_path is None :
191196 raise RuntimeError ("generate_input_spike_trains: missing output_path" )
@@ -198,6 +203,7 @@ def generate_input_spike_trains(
198203 for iter_count in range (n_iter ):
199204 if iter_count < len (feature_items ):
200205 gid , input_feature = feature_items [iter_count ]
206+ gid += start_gid
201207 else :
202208 gid , input_feature = None , None
203209 if gid is not None :
@@ -218,13 +224,11 @@ def generate_input_spike_trains(
218224
219225 # Get spike response
220226 response = input_feature .get_response (processed_signal )
227+ if isinstance (response , list ):
228+ response = np .concatenate (np .concatenate (response , dtype = np .float32 ))
221229
222230 if len (response ) > 0 :
223- spikes_attr_dict [gid ] = {
224- output_spike_train_attr_name : np .concatenate (
225- response , dtype = np .float32
226- )
227- }
231+ spikes_attr_dict [gid ] = {output_spike_train_attr_name : response }
228232
229233 gid_count += 1
230234 if (iter_count > 0 and iter_count % write_every == 0 ) or (
0 commit comments