@@ -310,13 +310,13 @@ def stage3_merge_actions(pool_address, grouped_actions):
310310 return actions_final
311311
312312def produce_actions ():
313- new_results , join_results , swap_results , exit_results , transfer_results , fees_results , denorms_results = stage1_load_sql_data (args .pool_address )
313+ new_events , join_events , swap_events , exit_events , transfer_events , fees_results , denorms_results = stage1_load_sql_data (args .pool_address )
314314
315- new_results ["type" ] = "new"
316- join_results ["type" ] = "join"
317- swap_results ["type" ] = "swap"
318- exit_results ["type" ] = "exit"
319- transfer_results ["type" ] = "transfer"
315+ new_events ["type" ] = "new"
316+ join_events ["type" ] = "join"
317+ swap_events ["type" ] = "swap"
318+ exit_events ["type" ] = "exit"
319+ transfer_events ["type" ] = "transfer"
320320
321321 # Later we will drop the column "address" from denorms, because it is
322322 # just the pool address - it never changes.
@@ -328,28 +328,34 @@ def produce_actions():
328328 # Pandas, please don't truncate columns when I print them out
329329 pd .set_option ('display.max_colwidth' , None )
330330
331- initial_state = stage2_produce_initial_state (new_results , fees_results , transfer_results )
331+ initial_state = stage2_produce_initial_state (new_events , fees_results , transfer_events )
332332 # save_pickle(initial_state, f"{args.pool_address}/initial_state.pickle")
333333
334- actions = []
335- actions .extend (turn_events_into_actions (new_results , fees_dict , denorms_results ))
336- actions .extend (turn_events_into_actions (join_results , fees_dict , denorms_results ))
337- actions .extend (turn_events_into_actions (swap_results , fees_dict , denorms_results ))
338- actions .extend (turn_events_into_actions (exit_results , fees_dict , denorms_results ))
339- actions .extend (turn_events_into_actions (transfer_results , fees_dict , denorms_results ))
340-
341- grouped_by_tx_actions = {}
342- for i , action in enumerate (actions ):
343- tx_hash = actions [i ].tx_hash
344- if grouped_by_tx_actions .get (tx_hash ) is None :
345- grouped_by_tx_actions [tx_hash ] = []
346- grouped_by_tx_actions [tx_hash ].append (action )
347- grouped_actions = list (map (lambda key : grouped_by_tx_actions [key ], grouped_by_tx_actions ))
348-
349- # Filter out pool share transfer
350- grouped_actions = list (filter (lambda acts : not (len (acts ) == 1 and acts [0 ].action_type == 'transfer' ), grouped_actions ))
351-
352- actions = stage3_merge_actions (args .pool_address , grouped_actions )
334+ events = []
335+ events .extend (turn_events_into_actions (new_events , fees_dict , denorms_results ))
336+ events .extend (turn_events_into_actions (join_events , fees_dict , denorms_results ))
337+ events .extend (turn_events_into_actions (swap_events , fees_dict , denorms_results ))
338+ events .extend (turn_events_into_actions (exit_events , fees_dict , denorms_results ))
339+ events .extend (turn_events_into_actions (transfer_events , fees_dict , denorms_results ))
340+
341+ # save_pickle(events, f"{args.pool_address}/events.pickle")
342+ # events = load_pickle(f"{args.pool_address}/events.pickle")
343+
344+ events_grouped_by_txhash = {}
345+ for i , action in enumerate (events ):
346+ tx_hash = events [i ].tx_hash
347+ if events_grouped_by_txhash .get (tx_hash ) is None :
348+ events_grouped_by_txhash [tx_hash ] = []
349+ events_grouped_by_txhash [tx_hash ].append (action )
350+ save_pickle (events_grouped_by_txhash , f'{ args .pool_address } /events_grouped_by_txhash.pickle' )
351+ events_grouped_by_txhash = load_pickle (f'{ args .pool_address } /events_grouped_by_txhash.pickle' )
352+
353+ grouped_events = list (map (lambda key : events_grouped_by_txhash [key ], events_grouped_by_txhash ))
354+
355+ # Remove pool share transfers
356+ grouped_events = list (filter (lambda acts : not (len (acts ) == 1 and acts [0 ].action_type == 'transfer' ), grouped_events ))
357+
358+ actions = stage3_merge_actions (args .pool_address , grouped_events )
353359
354360 # save_pickle(actions, f"{args.pool_address}/actions.pickle")
355361 # actions = load_pickle(f"{args.pool_address}/actions.pickle")
0 commit comments