@@ -12,6 +12,41 @@ def safe_rate(success, fail):
1212def avg (times ):
1313 return round (sum (times ) / len (times ), 2 ) if times else 0.0
1414
15+ def create_aggregated_error_file ():
16+ """Create aggregated error_stats.json from individual node files"""
17+ aggregated_errors = {}
18+
19+ # Define all possible nodes
20+ testnet_nodes = [
21+ "Node 01" , "Node 04" , "Node 05" , "Node 06" , "Node 07" , "Node 08" ,
22+ "Node 09" , "Node 10" , "Node 13" , "Node 14" , "Node 21" , "Node 23" , "Node 37"
23+ ]
24+
25+ mainnet_nodes = [
26+ "Node 25" , "Node 26" , "Node 27" , "Node 28" , "Node 29" , "Node 30"
27+ ]
28+
29+ all_nodes = testnet_nodes + mainnet_nodes
30+
31+ # Read each individual node error file
32+ for node_name in all_nodes :
33+ node_file = os .path .join (ERROR_DIR , f"errors_{ node_name .replace (' ' , '_' )} .json" )
34+ if os .path .exists (node_file ):
35+ try :
36+ with open (node_file , 'r' ) as f :
37+ node_errors = json .load (f )
38+ if node_errors : # Only add if there are errors
39+ aggregated_errors [node_name ] = node_errors
40+ except Exception as e :
41+ print (f"⚠️ Warning: Could not read { node_file } : { e } " )
42+
43+ # Write aggregated file
44+ aggregated_file = os .path .join (ERROR_DIR , "error_stats.json" )
45+ with open (aggregated_file , 'w' ) as f :
46+ json .dump (aggregated_errors , f , indent = 2 )
47+
48+ return aggregated_errors
49+
1550def get_all_errors_for_node (node_name ):
1651 """Get all errors for a specific node from multiple sources"""
1752 all_errors = {}
@@ -56,6 +91,9 @@ def get_all_errors_for_node(node_name):
5691def print_all_errors ():
5792 print ("\n 📊 Error Breakdown by Node:\n " )
5893
94+ # Create aggregated error file from individual files
95+ aggregated_errors = create_aggregated_error_file ()
96+
5997 # Define all possible nodes for both testnet and mainnet
6098 testnet_nodes = [
6199 "Node 01" , "Node 04" , "Node 05" , "Node 06" , "Node 07" , "Node 08" ,
@@ -68,19 +106,10 @@ def print_all_errors():
68106
69107 all_nodes = testnet_nodes + mainnet_nodes
70108
71- # Check for aggregated error file first
72- aggregated_file = os .path .join (ERROR_DIR , "error_stats.json" )
73- nodes_with_errors = []
74-
75- if os .path .exists (aggregated_file ):
76- try :
77- with open (aggregated_file , "r" ) as f :
78- error_data = json .load (f )
79- nodes_with_errors = list (error_data .keys ())
80- except Exception :
81- pass
109+ # Get nodes that have errors
110+ nodes_with_errors = list (aggregated_errors .keys ())
82111
83- # If no aggregated file or no errors, check individual files
112+ # If no aggregated errors, check individual files
84113 if not nodes_with_errors :
85114 for node_name in all_nodes :
86115 node_file = os .path .join (ERROR_DIR , f"errors_{ node_name .replace (' ' , '_' )} .json" )
0 commit comments