Skip to content

Commit 2394b35

Browse files
Bojan131usebeforefree
authored andcommitted
Update agreggate and fix for publish
1 parent 77c47af commit 2394b35

File tree

7 files changed

+132
-157
lines changed

7 files changed

+132
-157
lines changed

scripts/print_aggregated_errors.py

Lines changed: 41 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,41 @@ def safe_rate(success, fail):
1212
def avg(times):
1313
return round(sum(times) / len(times), 2) if times else 0.0
1414

15+
def create_aggregated_error_file():
16+
"""Create aggregated error_stats.json from individual node files"""
17+
aggregated_errors = {}
18+
19+
# Define all possible nodes
20+
testnet_nodes = [
21+
"Node 01", "Node 04", "Node 05", "Node 06", "Node 07", "Node 08",
22+
"Node 09", "Node 10", "Node 13", "Node 14", "Node 21", "Node 23", "Node 37"
23+
]
24+
25+
mainnet_nodes = [
26+
"Node 25", "Node 26", "Node 27", "Node 28", "Node 29", "Node 30"
27+
]
28+
29+
all_nodes = testnet_nodes + mainnet_nodes
30+
31+
# Read each individual node error file
32+
for node_name in all_nodes:
33+
node_file = os.path.join(ERROR_DIR, f"errors_{node_name.replace(' ', '_')}.json")
34+
if os.path.exists(node_file):
35+
try:
36+
with open(node_file, 'r') as f:
37+
node_errors = json.load(f)
38+
if node_errors: # Only add if there are errors
39+
aggregated_errors[node_name] = node_errors
40+
except Exception as e:
41+
print(f"⚠️ Warning: Could not read {node_file}: {e}")
42+
43+
# Write aggregated file
44+
aggregated_file = os.path.join(ERROR_DIR, "error_stats.json")
45+
with open(aggregated_file, 'w') as f:
46+
json.dump(aggregated_errors, f, indent=2)
47+
48+
return aggregated_errors
49+
1550
def get_all_errors_for_node(node_name):
1651
"""Get all errors for a specific node from multiple sources"""
1752
all_errors = {}
@@ -56,6 +91,9 @@ def get_all_errors_for_node(node_name):
5691
def print_all_errors():
5792
print("\n📊 Error Breakdown by Node:\n")
5893

94+
# Create aggregated error file from individual files
95+
aggregated_errors = create_aggregated_error_file()
96+
5997
# Define all possible nodes for both testnet and mainnet
6098
testnet_nodes = [
6199
"Node 01", "Node 04", "Node 05", "Node 06", "Node 07", "Node 08",
@@ -68,19 +106,10 @@ def print_all_errors():
68106

69107
all_nodes = testnet_nodes + mainnet_nodes
70108

71-
# Check for aggregated error file first
72-
aggregated_file = os.path.join(ERROR_DIR, "error_stats.json")
73-
nodes_with_errors = []
74-
75-
if os.path.exists(aggregated_file):
76-
try:
77-
with open(aggregated_file, "r") as f:
78-
error_data = json.load(f)
79-
nodes_with_errors = list(error_data.keys())
80-
except Exception:
81-
pass
109+
# Get nodes that have errors
110+
nodes_with_errors = list(aggregated_errors.keys())
82111

83-
# If no aggregated file or no errors, check individual files
112+
# If no aggregated errors, check individual files
84113
if not nodes_with_errors:
85114
for node_name in all_nodes:
86115
node_file = os.path.join(ERROR_DIR, f"errors_{node_name.replace(' ', '_')}.json")

tests/mainnet/Base_Mainnet.py

Lines changed: 15 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -116,38 +116,29 @@ def log_error(error, node_name, step='unknown', remote_node=None):
116116
else:
117117
error_stats[node_name][key] = 1
118118

119-
# Also store in a temporary file to ensure persistence across test session
120-
error_file = "test_output/error_stats.json"
119+
# Write to individual node error file (parallel-safe)
120+
node_error_file = f"test_output/errors_{node_name.replace(' ', '_')}.json"
121121
os.makedirs("test_output", exist_ok=True)
122122

123-
# Load existing errors - merge instead of overwrite
124-
if os.path.exists(error_file):
125-
with open(error_file, 'r') as f:
123+
# Load existing errors for this node
124+
if os.path.exists(node_error_file):
125+
with open(node_error_file, 'r') as f:
126126
try:
127-
error_data = json.load(f)
127+
node_errors = json.load(f)
128128
except:
129-
error_data = {}
129+
node_errors = {}
130130
else:
131-
error_data = {}
132-
133-
# Update errors - merge instead of overwrite
134-
if node_name not in error_data:
135-
error_data[node_name] = {}
131+
node_errors = {}
136132

137-
if key in error_data[node_name]:
138-
error_data[node_name][key] += 1
133+
# Update errors for this node
134+
if key in node_errors:
135+
node_errors[key] += 1
139136
else:
140-
error_data[node_name][key] = 1
137+
node_errors[key] = 1
141138

142-
# Save back to file immediately
143-
f = open(error_file, 'w')
144-
try:
145-
json.dump(error_data, f, indent=2)
146-
# Force flush to ensure file is written
147-
f.flush()
148-
os.fsync(f.fileno())
149-
finally:
150-
f.close()
139+
# Save back to individual node file
140+
with open(node_error_file, 'w') as f:
141+
json.dump(node_errors, f, indent=2)
151142

152143
def safe_rate(success, fail):
153144
total = success + fail

tests/mainnet/Gnosis_Mainnet.py

Lines changed: 15 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -116,38 +116,29 @@ def log_error(error, node_name, step='unknown', remote_node=None):
116116
else:
117117
error_stats[node_name][key] = 1
118118

119-
# Also store in a temporary file to ensure persistence across test session
120-
error_file = "test_output/error_stats.json"
119+
# Write to individual node error file (parallel-safe)
120+
node_error_file = f"test_output/errors_{node_name.replace(' ', '_')}.json"
121121
os.makedirs("test_output", exist_ok=True)
122122

123-
# Load existing errors - merge instead of overwrite
124-
if os.path.exists(error_file):
125-
with open(error_file, 'r') as f:
123+
# Load existing errors for this node
124+
if os.path.exists(node_error_file):
125+
with open(node_error_file, 'r') as f:
126126
try:
127-
error_data = json.load(f)
127+
node_errors = json.load(f)
128128
except:
129-
error_data = {}
129+
node_errors = {}
130130
else:
131-
error_data = {}
132-
133-
# Update errors - merge instead of overwrite
134-
if node_name not in error_data:
135-
error_data[node_name] = {}
131+
node_errors = {}
136132

137-
if key in error_data[node_name]:
138-
error_data[node_name][key] += 1
133+
# Update errors for this node
134+
if key in node_errors:
135+
node_errors[key] += 1
139136
else:
140-
error_data[node_name][key] = 1
137+
node_errors[key] = 1
141138

142-
# Save back to file immediately
143-
f = open(error_file, 'w')
144-
try:
145-
json.dump(error_data, f, indent=2)
146-
# Force flush to ensure file is written
147-
f.flush()
148-
os.fsync(f.fileno())
149-
finally:
150-
f.close()
139+
# Save back to individual node file
140+
with open(node_error_file, 'w') as f:
141+
json.dump(node_errors, f, indent=2)
151142

152143
def safe_rate(success, fail):
153144
total = success + fail

tests/mainnet/Neuroweb_Mainnet.py

Lines changed: 16 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
# https://positron.origin-trail.network - public node
2626

2727
nodes = [
28-
{"name": "Node 25", "hostname": "https://v6-pegasus-node-25.origin-trail.network"},
28+
{"name": "Node 25", "hostname": "https://positron.origin-trail.network"},
2929
{"name": "Node 26", "hostname": "https://v6-pegasus-node-26.origin-trail.network"},
3030
{"name": "Node 27", "hostname": "https://v6-pegasus-node-27.origin-trail.network"},
3131
{"name": "Node 28", "hostname": "https://v6-pegasus-node-28.origin-trail.network"},
@@ -116,38 +116,29 @@ def log_error(error, node_name, step='unknown', remote_node=None):
116116
else:
117117
error_stats[node_name][key] = 1
118118

119-
# Also store in a temporary file to ensure persistence across test session
120-
error_file = "test_output/error_stats.json"
119+
# Write to individual node error file (parallel-safe)
120+
node_error_file = f"test_output/errors_{node_name.replace(' ', '_')}.json"
121121
os.makedirs("test_output", exist_ok=True)
122122

123-
# Load existing errors - merge instead of overwrite
124-
if os.path.exists(error_file):
125-
with open(error_file, 'r') as f:
123+
# Load existing errors for this node
124+
if os.path.exists(node_error_file):
125+
with open(node_error_file, 'r') as f:
126126
try:
127-
error_data = json.load(f)
127+
node_errors = json.load(f)
128128
except:
129-
error_data = {}
129+
node_errors = {}
130130
else:
131-
error_data = {}
132-
133-
# Update errors - merge instead of overwrite
134-
if node_name not in error_data:
135-
error_data[node_name] = {}
131+
node_errors = {}
136132

137-
if key in error_data[node_name]:
138-
error_data[node_name][key] += 1
133+
# Update errors for this node
134+
if key in node_errors:
135+
node_errors[key] += 1
139136
else:
140-
error_data[node_name][key] = 1
137+
node_errors[key] = 1
141138

142-
# Save back to file immediately
143-
f = open(error_file, 'w')
144-
try:
145-
json.dump(error_data, f, indent=2)
146-
# Force flush to ensure file is written
147-
f.flush()
148-
os.fsync(f.fileno())
149-
finally:
150-
f.close()
139+
# Save back to individual node file
140+
with open(node_error_file, 'w') as f:
141+
json.dump(node_errors, f, indent=2)
151142

152143
def safe_rate(success, fail):
153144
total = success + fail

tests/testnet/Base_Testnet.py

Lines changed: 15 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -121,38 +121,29 @@ def log_error(error, node_name, step='unknown', remote_node=None):
121121
else:
122122
error_stats[node_name][key] = 1
123123

124-
# Also store in a temporary file to ensure persistence across test session
125-
error_file = "test_output/error_stats.json"
124+
# Write to individual node error file (parallel-safe)
125+
node_error_file = f"test_output/errors_{node_name.replace(' ', '_')}.json"
126126
os.makedirs("test_output", exist_ok=True)
127127

128-
# Load existing errors - merge instead of overwrite
129-
if os.path.exists(error_file):
130-
with open(error_file, 'r') as f:
128+
# Load existing errors for this node
129+
if os.path.exists(node_error_file):
130+
with open(node_error_file, 'r') as f:
131131
try:
132-
error_data = json.load(f)
132+
node_errors = json.load(f)
133133
except:
134-
error_data = {}
134+
node_errors = {}
135135
else:
136-
error_data = {}
137-
138-
# Update errors - merge instead of overwrite
139-
if node_name not in error_data:
140-
error_data[node_name] = {}
136+
node_errors = {}
141137

142-
if key in error_data[node_name]:
143-
error_data[node_name][key] += 1
138+
# Update errors for this node
139+
if key in node_errors:
140+
node_errors[key] += 1
144141
else:
145-
error_data[node_name][key] = 1
142+
node_errors[key] = 1
146143

147-
# Save back to file immediately
148-
f = open(error_file, 'w')
149-
try:
150-
json.dump(error_data, f, indent=2)
151-
# Force flush to ensure file is written
152-
f.flush()
153-
os.fsync(f.fileno())
154-
finally:
155-
f.close()
144+
# Save back to individual node file
145+
with open(node_error_file, 'w') as f:
146+
json.dump(node_errors, f, indent=2)
156147

157148
def safe_rate(success, fail):
158149
total = success + fail

tests/testnet/Gnosis_Testnet.py

Lines changed: 15 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -121,38 +121,29 @@ def log_error(error, node_name, step='unknown', remote_node=None):
121121
else:
122122
error_stats[node_name][key] = 1
123123

124-
# Also store in a temporary file to ensure persistence across test session
125-
error_file = "test_output/error_stats.json"
124+
# Write to individual node error file (parallel-safe)
125+
node_error_file = f"test_output/errors_{node_name.replace(' ', '_')}.json"
126126
os.makedirs("test_output", exist_ok=True)
127127

128-
# Load existing errors - merge instead of overwrite
129-
if os.path.exists(error_file):
130-
with open(error_file, 'r') as f:
128+
# Load existing errors for this node
129+
if os.path.exists(node_error_file):
130+
with open(node_error_file, 'r') as f:
131131
try:
132-
error_data = json.load(f)
132+
node_errors = json.load(f)
133133
except:
134-
error_data = {}
134+
node_errors = {}
135135
else:
136-
error_data = {}
137-
138-
# Update errors - merge instead of overwrite
139-
if node_name not in error_data:
140-
error_data[node_name] = {}
136+
node_errors = {}
141137

142-
if key in error_data[node_name]:
143-
error_data[node_name][key] += 1
138+
# Update errors for this node
139+
if key in node_errors:
140+
node_errors[key] += 1
144141
else:
145-
error_data[node_name][key] = 1
142+
node_errors[key] = 1
146143

147-
# Save back to file immediately
148-
f = open(error_file, 'w')
149-
try:
150-
json.dump(error_data, f, indent=2)
151-
# Force flush to ensure file is written
152-
f.flush()
153-
os.fsync(f.fileno())
154-
finally:
155-
f.close()
144+
# Save back to individual node file
145+
with open(node_error_file, 'w') as f:
146+
json.dump(node_errors, f, indent=2)
156147

157148
def safe_rate(success, fail):
158149
total = success + fail

0 commit comments

Comments
 (0)