Skip to content

Commit 0a5a890

Browse files
committed
MX-17306 Header V3 fixes
1 parent 574c585 commit 0a5a890

File tree

9 files changed

+380
-139
lines changed

9 files changed

+380
-139
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,4 +43,5 @@ pip install -r ./requirements-dev.txt --upgrade
4343
EXAMPLE USAGE
4444
```
4545
python -m multiversx_cross_shard_analysis.gather_data --path /home/mihaela/Downloads/cross-shard-execution-anal-9afe696daf.zip
46+
python -m multiversx_cross_shard_analysis.headers_timeline_report --run-name cross-shard-execution-anal-6cc663f7af
4647
```

multiversx_cross_shard_analysis/gather_data.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ def gather_data():
2626
# Generate reports
2727
mb_data = MiniblockData(handler.shard_data.miniblocks).get_data_for_round_report()
2828
out_folder = os.path.join(handler.run_name, "MiniblocksShardTimeline")
29+
out_folder = os.path.join('Reports', out_folder)
2930
os.makedirs(out_folder, exist_ok=True)
3031

3132
# generate PDFs per epoch
@@ -38,6 +39,7 @@ def gather_data():
3839

3940
mb_data = MiniblockData(handler.shard_data.miniblocks).get_data_for_detail_report()
4041
out_folder = os.path.join(handler.run_name, "MiniblocksTimelineDetail")
42+
out_folder = os.path.join('Reports', out_folder)
4143
os.makedirs(out_folder, exist_ok=True)
4244

4345
for epoch in sorted(mb_data.keys()):
@@ -48,6 +50,7 @@ def gather_data():
4850

4951
input_data = handler.shard_data.get_data_for_header_horizontal_report()
5052
out_folder = os.path.join(handler.run_name, "NonceTimeline")
53+
out_folder = os.path.join('Reports', out_folder)
5154
os.makedirs(out_folder, exist_ok=True)
5255

5356
for epoch in sorted(input_data.keys()):

multiversx_cross_shard_analysis/header_structures.py

Lines changed: 16 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,8 @@ def __init__(self):
5858
self.seen_miniblock_hashes = set()
5959

6060
def add_node(self, node_data: HeaderData):
61+
if node_data.header_dictionary['commited_headers'] == []:
62+
node_data.header_dictionary['commited_headers'] = node_data.header_dictionary['proposed_headers'].copy()
6163
for header_status in node_data.header_dictionary.keys():
6264
for header in node_data.header_dictionary[header_status]:
6365
shard_id = get_shard_id(header)
@@ -74,102 +76,16 @@ def add_node(self, node_data: HeaderData):
7476
def add_miniblocks(self, header: dict[str, Any], status: str):
7577
header_struct = Header(header, status)
7678

77-
for mention_type, mb in header_struct.miniblocks:
79+
for mention_type, mb, metadata in header_struct.miniblocks:
7880
mb_hash = mb.get('hash')
7981
if mb_hash not in self.seen_miniblock_hashes:
8082
self.seen_miniblock_hashes.add(mb_hash)
8183
self.miniblocks[mb_hash] = mb.copy()
8284
self.miniblocks[mb_hash]['mentioned'] = []
83-
metadata = header_struct.metadata.copy()
85+
# metadata = header_struct.metadata.copy()
8486
metadata["reserved"] = decode_reserved_field(mb.get("reserved", ""), mb.get("txCount", 0))
8587
self.miniblocks[mb_hash]['mentioned'].append((mention_type, metadata))
8688

87-
def get_data_for_header_vertical_report(self) -> dict[str, dict[int, Any]]:
88-
miniblocks = MiniblockData(self.miniblocks)
89-
report: dict[str, dict[int, Any]] = {}
90-
last_epoch = None
91-
92-
for shard_id, header_data in self.parsed_headers.items():
93-
header_group_count = 0
94-
header_count = 0
95-
header_group_name = ""
96-
last_epoch = None
97-
98-
for header in sorted(header_data.header_dictionary['commited_headers'],
99-
key=lambda x: get_value('nonce', x)):
100-
101-
epoch = get_value('epoch', header)
102-
103-
# reset counters when epoch changes
104-
if epoch != last_epoch:
105-
header_group_count = 0
106-
header_count = 0
107-
header_group_name = ""
108-
last_epoch = epoch
109-
110-
# ensure epoch entry exists and contains all shards as keys
111-
if epoch not in report:
112-
report[epoch] = {sid: {} for sid in self.parsed_headers.keys()}
113-
114-
if get_value('miniBlockHeaders', header) == []:
115-
continue
116-
117-
nonce = get_value('nonce', header)
118-
round_num = get_value('round', header)
119-
print(f"Processing header: epoch={epoch}, shard={shard_id}, nonce={nonce}, round={round_num}")
120-
121-
# build result for this header (only cross-shard miniblocks)
122-
result: dict[int, list] = {}
123-
for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id and mb.get('receiverShardID') != mb.get('senderShardID')]:
124-
print(f" Processing miniblock: hash={miniblock.get('hash')}, senderShardID={miniblock.get('senderShardID')}, receiverShardID={miniblock.get('receiverShardID')}")
125-
mb_hash = miniblock.get('hash')
126-
for mention_type, metadata in self.miniblocks[mb_hash]['mentioned']:
127-
# skip proposed mentions
128-
if 'proposed' in mention_type:
129-
continue
130-
131-
rn = metadata['round']
132-
color = miniblocks.get_color_for_state(mention_type, miniblock['txCount'], metadata)
133-
shard_name = f'Shard {metadata["shard_id"]}' if metadata["shard_id"] != 4294967295 else "MetaShard"
134-
# append tuple (label, info, color)
135-
result.setdefault(rn, []).append((shard_name, mb_hash[:15] + '...', COLORS_MAPPING[color]))
136-
137-
# if result empty -> we don't include this nonce at all, don't count it
138-
if not result:
139-
continue
140-
141-
# --- Add this nonce to the report and handle grouping ---
142-
# if group start (every 5 actual added nonces)
143-
group_size = 5
144-
if header_count % group_size == 0:
145-
header_group_count += 1
146-
header_group_name = f"Nonces {nonce}"
147-
# initialize structure for this group
148-
report[epoch][shard_id][header_group_count] = {
149-
'group_name': header_group_name,
150-
'rounds': (round_num, round_num),
151-
'nonces': {}
152-
}
153-
print(f"Creating new header group: {header_group_name}")
154-
else:
155-
# extend existing group's name
156-
header_group_name += f" - {nonce}"
157-
report[epoch][shard_id][header_group_count]['group_name'] = header_group_name
158-
159-
# store the nonce's data
160-
report[epoch][shard_id][header_group_count]['nonces'][nonce] = result
161-
162-
# update group's rounds min/max based on result keys
163-
min_r, max_r = report[epoch][shard_id][header_group_count]['rounds']
164-
actual_min = min(result.keys())
165-
actual_max = max(result.keys())
166-
report[epoch][shard_id][header_group_count]['rounds'] = (min(min_r, actual_min), max(max_r, actual_max))
167-
168-
# increment header_count because we added this nonce
169-
header_count += 1
170-
171-
return report
172-
17389
def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]:
17490
miniblocks = MiniblockData(self.miniblocks)
17591
report: dict[str, dict[int, Any]] = {}
@@ -193,7 +109,7 @@ def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]:
193109

194110
# build result for this header (only cross-shard miniblocks)
195111
result: dict[int, list] = {}
196-
for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id and mb.get('receiverShardID') != mb.get('senderShardID')]:
112+
for miniblock in [mb for mb in get_value('miniBlockHeaders', header) if mb.get('senderShardID') == shard_id]:
197113
mb_hash = miniblock.get('hash')
198114
for mention_type, metadata in self.miniblocks[mb_hash]['mentioned']:
199115
# skip proposed mentions
@@ -228,7 +144,7 @@ def get_data_for_header_horizontal_report(self) -> dict[str, dict[int, Any]]:
228144
class Header:
229145
def __init__(self, header: dict[str, Any], status: str):
230146
self.metadata: dict[str, Any] = self.get_header_metadata(header)
231-
self.miniblocks: list[tuple[str, dict[str, Any]]] = self.get_miniblocks(header, status)
147+
self.miniblocks: list[tuple[str, dict[str, Any], dict[str, Any]]] = self.get_miniblocks(header, status)
232148

233149
# returns 'origin' or 'dest' based on miniblock senderShardID
234150
def get_miniblock_shard_type(self, miniblock_shard_id: int) -> str:
@@ -244,29 +160,34 @@ def get_header_metadata(self, header: dict[str, Any]) -> dict[str, Any]:
244160
"shard_id": header.get('shardID', 4294967295),
245161
}
246162

247-
def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, dict[str, Any]]]:
163+
def get_miniblocks(self, header: dict[str, Any], status: str) -> list[tuple[str, dict[str, Any], dict[str, Any]]]:
248164
miniblocks = []
249165
if Header.isHeaderV2(header):
250166
header = header['header']
251167
for miniblock in header.get('miniBlockHeaders', []):
252168
miniblock_mention = self.get_miniblock_shard_type(miniblock["senderShardID"]) + f'_{status}'
253-
miniblocks.append((miniblock_mention, miniblock))
169+
miniblocks.append((miniblock_mention, miniblock, self.metadata.copy()))
254170
if Header.isMetaHeader(header):
255171
for shard_header in header['shardInfo']:
256172
shard_metadata = self.get_header_metadata(shard_header)
257173
for miniblock in shard_header.get('shardMiniBlockHeaders', []):
258174
miniblock_mention = f'{meta}_{origin_shard if shard_metadata['shard_id'] == miniblock['senderShardID'] else dest_shard}_{status}'
259-
miniblocks.append((miniblock_mention, miniblock))
175+
miniblocks.append((miniblock_mention, miniblock, self.metadata.copy()))
260176
if Header.isMetaHeaderV3(header):
261177
for exec_result in shard_header.get('executionResults', []):
262178
for miniblock in exec_result.get('miniBlockHeaders', []):
263179
miniblock_mention = f'{meta}_{origin_shard if shard_metadata["shard_id"] == miniblock["senderShardID"] else dest_shard}_exec_{status}'
264-
miniblocks.append((miniblock_mention, miniblock))
180+
miniblocks.append((miniblock_mention, miniblock, self.metadata.copy()))
265181
if Header.isHeaderV3(header):
266182
for exec_result in header['executionResults']:
183+
base_exec_result = exec_result.get('baseExecutionResult', {})
184+
exec_result_metadata = self.metadata.copy()
185+
exec_result_metadata['nonce'] = base_exec_result.get('headerNonce', 0)
186+
267187
for miniblock in exec_result.get('miniBlockHeaders', []):
268188
miniblock_mention = self.get_miniblock_shard_type(miniblock["senderShardID"]) + f'_{status}_exec'
269-
miniblocks.append((miniblock_mention, miniblock))
189+
exec_result_metadata['exec_result_hash'] = miniblock.get('hash', '')
190+
miniblocks.append((miniblock_mention, miniblock, exec_result_metadata.copy()))
270191

271192
return miniblocks
272193

multiversx_cross_shard_analysis/headers_timeline_report.py

Lines changed: 55 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,14 @@
99
from reportlab.lib.pagesizes import A4
1010
from reportlab.lib.styles import getSampleStyleSheet
1111
from reportlab.platypus import (Flowable, PageBreak, Paragraph,
12-
SimpleDocTemplate, Spacer, Table, TableStyle)
12+
SimpleDocTemplate, Spacer, LongTable, TableStyle)
1313

1414
from multiversx_cross_shard_analysis.constants import COLORS_MAPPING, Colors
1515
from multiversx_cross_shard_analysis.header_structures import (HeaderData,
1616
ShardData)
1717

18+
from multiversx_cross_shard_analysis.miniblock_data import MiniblockData
19+
1820
# -----------------------------
1921
# CONFIG (mirrors miniblock report)
2022
# -----------------------------
@@ -38,32 +40,35 @@
3840
# build stacked rectangles (same as miniblock version)
3941
# -----------------------------
4042

41-
def build_stack_for_round(items: list[tuple[str, str, colors.Color]], col_width: float) -> Drawing:
42-
rows = max(1, len(items))
43-
total_h = rows * RECT_H
44-
d = Drawing(col_width, total_h)
43+
def build_stack_rows(items: list[tuple[str, str, colors.Color]], col_width: float) -> list[Drawing]:
44+
"""
45+
Instead of one giant Drawing, we return a list of small ones.
46+
Each drawing represents one row in the vertical stack.
47+
"""
48+
row_drawings = []
49+
50+
if len(items) == 0:
51+
# Create a single "no data" row
52+
d = Drawing(col_width, RECT_H)
53+
rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4
54+
d.add(Rect(0, 2, rect_w, 12, fillColor=colors.whitesmoke, strokeColor=colors.grey)) # type: ignore
55+
d.add(String(RECT_PADDING_X + 2, 6, "no data", fontSize=RECT_LABEL_FONT))
56+
row_drawings.append(d)
57+
return row_drawings
4558

46-
y = total_h - RECT_H
4759
for label, info, col in items:
60+
# Create a small drawing for just this one item
61+
d = Drawing(col_width, RECT_H)
4862
rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4
4963

50-
d.add(Rect(0, y + 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black)) # type: ignore
64+
d.add(Rect(0, 2, rect_w, RECT_H - 4, fillColor=col, strokeColor=colors.black)) # type: ignore
5165

5266
text_x = RECT_PADDING_X + 3
53-
base_y = y + 4
67+
d.add(String(text_x, 12, label, fontSize=RECT_LABEL_FONT))
68+
d.add(String(text_x, 4, info, fontSize=RECT_INFO_FONT))
69+
row_drawings.append(d)
5470

55-
d.add(String(text_x, base_y + 8, label, fontSize=RECT_LABEL_FONT))
56-
d.add(String(text_x, base_y, info, fontSize=RECT_INFO_FONT))
57-
58-
y -= RECT_H
59-
60-
if len(items) == 0:
61-
rect_w = max(2, col_width - RECT_PADDING_X * 2) - 4
62-
mid = total_h / 2
63-
d.add(Rect(0, mid - 6, rect_w, 12, fillColor=colors.whitesmoke, strokeColor=colors.grey)) # type: ignore
64-
d.add(String(RECT_PADDING_X + 2, mid - 2, "no data", fontSize=RECT_LABEL_FONT))
65-
66-
return d
71+
return row_drawings
6772

6873
# -----------------------------
6974
# check for round gaps
@@ -96,29 +101,43 @@ def build_nonce_section(shard_id: int, nonce: int, rounds: list[int], data: dict
96101
num_cols = len(rounds)
97102
col_width = usable_width / max(1, num_cols)
98103

104+
# 1. Build the Header Row
99105
header = [Paragraph(f"<b>{r}</b>", styles["BodyText"]) for r in rounds]
100106

101-
cells = []
102-
for r in rounds:
103-
items = data.get(r, [])
104-
drawing = build_stack_for_round(items, col_width)
105-
cells.append(drawing)
106-
107-
tbl = Table(
108-
[header, cells],
107+
# 2. Transpose the stacks into rows
108+
# We need to find the max height among all columns to normalize the row count
109+
column_stacks = [build_stack_rows(data.get(r, []), col_width) for r in rounds]
110+
max_rows = max(len(stack) for stack in column_stacks)
111+
112+
table_data = [header]
113+
114+
# Fill the table row by row
115+
for i in range(max_rows):
116+
row = []
117+
for stack in column_stacks:
118+
if i < len(stack):
119+
row.append(stack[i])
120+
else:
121+
row.append("") # Empty cell if this column has fewer items
122+
table_data.append(row)
123+
124+
tbl = LongTable(
125+
table_data,
109126
colWidths=[col_width] * num_cols,
110127
hAlign="LEFT",
128+
splitByRow=True, # This allows the table to break across pages between rows
111129
)
112130

113131
tbl_style = [
114132
("GRID", (0, 0), (-1, -1), 0.25, colors.grey),
115133
("BACKGROUND", (0, 0), (-1, 0), colors.whitesmoke),
116134
("ALIGN", (0, 0), (-1, 0), "CENTER"),
117135
("VALIGN", (0, 1), (-1, -1), "TOP"),
136+
("TOPPADDING", (0, 0), (-1, -1), 0), # Tighten padding for large lists
137+
("BOTTOMPADDING", (0, 0), (-1, -1), 0),
118138
("FONTSIZE", (0, 0), (-1, 0), ROUND_HEADER_FONT),
119139
]
120140

121-
# add red border if highlighted
122141
if highlight:
123142
tbl_style.append(("BOX", (0, 0), (-1, -1), 2, colors.red))
124143

@@ -245,7 +264,8 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any
245264
}
246265

247266

248-
if __name__ == "__main__":
267+
def main():
268+
249269
parser = argparse.ArgumentParser(description="Nonce timeline report generator")
250270

251271
group = parser.add_mutually_exclusive_group(required=True)
@@ -298,7 +318,7 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any
298318
headers.miniblocks = data["miniblocks"]
299319

300320
# process
301-
input_data = headers.get_data_for_header_horizontal_report()
321+
input_data = MiniblockData(headers.miniblocks).get_data_for_header_report()
302322

303323
# output path
304324
out_folder = os.path.join(base_path, "NonceTimeline")
@@ -308,3 +328,7 @@ def build_nonce_timeline_pdf(shards_data: dict[int, dict[int, dict[int, list[Any
308328
outfile = os.path.join(out_folder, f"nonce_timeline_report_{epoch}.pdf")
309329
build_nonce_timeline_pdf(input_data[epoch], outname=outfile)
310330
print(f"Nonce timeline report for Epoch {epoch} generated: {outfile}")
331+
332+
333+
if __name__ == "__main__":
334+
main()

0 commit comments

Comments
 (0)