|
1 | 1 | import os |
2 | 2 | import json |
3 | 3 |
|
4 | | -ERROR_DIR = "test_output" # or "." if your files are in the current folder |
| 4 | +ERROR_DIR = "test_output" # Directory where error files are stored |
5 | 5 |
|
6 | 6 | def load_error_files(): |
| 7 | + """Load individual error files for each node""" |
| 8 | + if not os.path.exists(ERROR_DIR): |
| 9 | + print(f"⚠️ Error directory not found: {ERROR_DIR}") |
| 10 | + return [] |
| 11 | + |
7 | 12 | return [ |
8 | 13 | f for f in os.listdir(ERROR_DIR) |
9 | 14 | if f.startswith("errors_Node_") and f.endswith(".json") |
10 | 15 | ] |
11 | 16 |
|
| 17 | +def load_aggregated_error_file(): |
| 18 | + """Load the aggregated error file if it exists""" |
| 19 | + error_file_path = os.path.join(ERROR_DIR, "error_stats.json") |
| 20 | + |
| 21 | + if os.path.exists(error_file_path): |
| 22 | + try: |
| 23 | + with open(error_file_path, "r") as f: |
| 24 | + return json.load(f) |
| 25 | + except Exception as e: |
| 26 | + print(f"⚠️ Failed to read aggregated error file: {str(e)}") |
| 27 | + |
| 28 | + return {} |
| 29 | + |
12 | 30 | def print_error_summary(): |
13 | 31 | print("\n\n📊 Global Error Summary:\n") |
14 | 32 |
|
| 33 | + # Try individual files first (preferred method) |
15 | 34 | error_files = load_error_files() |
| 35 | + |
| 36 | + if error_files: |
| 37 | + print("📁 Using individual node error files:") |
| 38 | + for filename in sorted(error_files): |
| 39 | + node_name = filename.replace("errors_", "").replace(".json", "").replace("_", " ") |
| 40 | + print(f"🔧 {node_name}") |
16 | 41 |
|
17 | | - for filename in sorted(error_files): |
18 | | - node_name = filename.replace("errors_", "").replace(".json", "").replace("_", " ") |
19 | | - print(f"🔧 {node_name}") |
| 42 | + try: |
| 43 | + with open(os.path.join(ERROR_DIR, filename), "r") as f: |
| 44 | + errors = json.load(f) |
20 | 45 |
|
21 | | - try: |
22 | | - with open(os.path.join(ERROR_DIR, filename), "r") as f: |
23 | | - errors = json.load(f) |
| 46 | + if not errors: |
| 47 | + print(" ✅ No errors") |
| 48 | + else: |
| 49 | + for message, count in errors.items(): |
| 50 | + print(f" • {count}x {message}") |
| 51 | + |
| 52 | + except Exception as e: |
| 53 | + print(f" ⚠️ Failed to read or parse {filename}: {str(e)}") |
24 | 54 |
|
25 | | - if not errors: |
| 55 | + print() |
| 56 | + |
| 57 | + else: |
| 58 | + # Fallback to aggregated file |
| 59 | + print("📁 Using aggregated error file:") |
| 60 | + error_data = load_aggregated_error_file() |
| 61 | + |
| 62 | + if not error_data: |
| 63 | + print("✅ No error data found") |
| 64 | + return |
| 65 | + |
| 66 | + for node_name in sorted(error_data.keys()): |
| 67 | + print(f"🔧 {node_name}") |
| 68 | + |
| 69 | + node_errors = error_data[node_name] |
| 70 | + |
| 71 | + if not node_errors: |
26 | 72 | print(" ✅ No errors") |
27 | 73 | else: |
28 | | - for message, count in errors.items(): |
29 | | - print(f" • {count}x {message}") |
30 | | - |
31 | | - except Exception as e: |
32 | | - print(f" ⚠️ Failed to read or parse {filename}: {str(e)}") |
| 74 | + for error_key, count in node_errors.items(): |
| 75 | + print(f" • {count}x {error_key}") |
33 | 76 |
|
34 | | - print() |
| 77 | + print() |
35 | 78 |
|
36 | 79 | if __name__ == "__main__": |
37 | 80 | print_error_summary() |
0 commit comments