@@ -184,7 +184,6 @@ def turn_events_into_actions(events_list, fees: typing.Dict, denorms: pd.DataFra
184184 # Get basic info from first event log, no matter how many there actually are
185185 first_event_log = events .iloc [0 ]
186186 ts = first_event_log ["block_timestamp" ]
187- tx_hash = first_event_log ["transaction_hash" ]
188187 block_number = first_event_log .name
189188
190189 # Invariant data that exists parallel to these actions. Merge them
@@ -193,9 +192,15 @@ def turn_events_into_actions(events_list, fees: typing.Dict, denorms: pd.DataFra
193192 denorm = format_denorms (denorms .loc [block_number ].to_dict (orient = "records" ))
194193 # convert block_number and swap_fee to string to painlessly
195194 # convert to JSON later (numpy.int64 can't be JSON serialized)
196- a = Action (timestamp = ts .to_pydatetime (), tx_hash = tx_hash , block_number = str (block_number ), swap_fee = str (fee ),
197- denorms = denorm , action_type = first_event_log ["type" ], action = events .to_dict (orient = "records" ))
198- actions .append (a )
195+ if first_event_log ["type" ] == "swap" and len (events ) > 1 :
196+ print (txhash , "might be an aggregate swap" )
197+ for _ , e in events .iterrows ():
198+ actions .append (Action (timestamp = ts .to_pydatetime (), tx_hash = txhash , block_number = str (block_number ), swap_fee = str (fee ),
199+ denorms = denorm , action_type = first_event_log ["type" ], action = [e .to_dict ()]))
200+ else :
201+ a = Action (timestamp = ts .to_pydatetime (), tx_hash = txhash , block_number = str (block_number ), swap_fee = str (fee ),
202+ denorms = denorm , action_type = first_event_log ["type" ], action = events .to_dict (orient = "records" ))
203+ actions .append (a )
199204
200205 return actions
201206
0 commit comments