@@ -81,7 +81,10 @@ def handler():
8181
8282 information = darshanll .log_get_job (log )
8383
84- log_version = information ['metadata' ]['lib_ver' ]
84+ if 'log_ver' in information :
85+ log_version = information ['log_ver' ]
86+ else :
87+ log_version = information ['metadata' ]['lib_ver' ]
8588 library_version = darshanll .get_lib_version ()
8689
8790 # Make sure log format is of the same version
@@ -144,6 +147,100 @@ def handler():
144147 df_mpiio = None
145148
146149 total_size_mpiio = 0
150+
151+ dxt_posix = None
152+ dxt_posix_read_data = None
153+ dxt_posix_write_data = None
154+ dxt_mpiio = None
155+
156+ df_lustre = None
157+ if "LUSTRE" in report .records :
158+ df_lustre = report .records ['LUSTRE' ].to_df ()
159+
160+ if args .backtrace :
161+ if "DXT_POSIX" in report .records :
162+ dxt_posix = report .records ["DXT_POSIX" ].to_df ()
163+ dxt_posix = pd .DataFrame (dxt_posix )
164+ if "address_line_mapping" not in dxt_posix :
165+ args .backtrace = False
166+ else :
167+ read_id = []
168+ read_rank = []
169+ read_length = []
170+ read_offsets = []
171+ read_end_time = []
172+ read_start_time = []
173+ read_operation = []
174+
175+ write_id = []
176+ write_rank = []
177+ write_length = []
178+ write_offsets = []
179+ write_end_time = []
180+ write_start_time = []
181+ write_operation = []
182+
183+ for r in zip (dxt_posix ['rank' ], dxt_posix ['read_segments' ], dxt_posix ['write_segments' ], dxt_posix ['id' ]):
184+ if not r [1 ].empty :
185+ read_id .append ([r [3 ]] * len ((r [1 ]['length' ].to_list ())))
186+ read_rank .append ([r [0 ]] * len ((r [1 ]['length' ].to_list ())))
187+ read_length .append (r [1 ]['length' ].to_list ())
188+ read_end_time .append (r [1 ]['end_time' ].to_list ())
189+ read_start_time .append (r [1 ]['start_time' ].to_list ())
190+ read_operation .append (['read' ] * len ((r [1 ]['length' ].to_list ())))
191+ read_offsets .append (r [1 ]['offset' ].to_list ())
192+
193+ if not r [2 ].empty :
194+ write_id .append ([r [3 ]] * len ((r [2 ]['length' ].to_list ())))
195+ write_rank .append ([r [0 ]] * len ((r [2 ]['length' ].to_list ())))
196+ write_length .append (r [2 ]['length' ].to_list ())
197+ write_end_time .append (r [2 ]['end_time' ].to_list ())
198+ write_start_time .append (r [2 ]['start_time' ].to_list ())
199+ write_operation .append (['write' ] * len ((r [2 ]['length' ].to_list ())))
200+ write_offsets .append (r [2 ]['offset' ].to_list ())
201+
202+ read_id = [element for nestedlist in read_id for element in nestedlist ]
203+ read_rank = [element for nestedlist in read_rank for element in nestedlist ]
204+ read_length = [element for nestedlist in read_length for element in nestedlist ]
205+ read_offsets = [element for nestedlist in read_offsets for element in nestedlist ]
206+ read_end_time = [element for nestedlist in read_end_time for element in nestedlist ]
207+ read_operation = [element for nestedlist in read_operation for element in nestedlist ]
208+ read_start_time = [element for nestedlist in read_start_time for element in nestedlist ]
209+
210+ write_id = [element for nestedlist in write_id for element in nestedlist ]
211+ write_rank = [element for nestedlist in write_rank for element in nestedlist ]
212+ write_length = [element for nestedlist in write_length for element in nestedlist ]
213+ write_offsets = [element for nestedlist in write_offsets for element in nestedlist ]
214+ write_end_time = [element for nestedlist in write_end_time for element in nestedlist ]
215+ write_operation = [element for nestedlist in write_operation for element in nestedlist ]
216+ write_start_time = [element for nestedlist in write_start_time for element in nestedlist ]
217+
218+ dxt_posix_read_data = pd .DataFrame (
219+ {
220+ 'id' : read_id ,
221+ 'rank' : read_rank ,
222+ 'length' : read_length ,
223+ 'end_time' : read_end_time ,
224+ 'start_time' : read_start_time ,
225+ 'operation' : read_operation ,
226+ 'offsets' : read_offsets ,
227+ })
228+
229+ dxt_posix_write_data = pd .DataFrame (
230+ {
231+ 'id' : write_id ,
232+ 'rank' : write_rank ,
233+ 'length' : write_length ,
234+ 'end_time' : write_end_time ,
235+ 'start_time' : write_start_time ,
236+ 'operation' : write_operation ,
237+ 'offsets' : write_offsets ,
238+ })
239+
240+ if "DXT_MPIIO" in report .records :
241+ dxt_mpiio = report .records ["DXT_MPIIO" ].to_df ()
242+ dxt_mpiio = pd .DataFrame (dxt_mpiio )
243+
147244
148245 # Since POSIX will capture both POSIX-only accesses and those comming from MPI-IO, we can subtract those
149246 if total_size_posix > 0 and total_size_posix >= total_size_mpiio :
@@ -262,7 +359,7 @@ def handler():
262359 detected_files .columns = ['id' , 'total_reads' , 'total_writes' ]
263360 detected_files .loc [:, 'id' ] = detected_files .loc [:, 'id' ].astype (str )
264361
265- check_small_operation (total_reads , total_reads_small , total_writes , total_writes_small , detected_files , modules , file_map , df_posix )
362+ check_small_operation (total_reads , total_reads_small , total_writes , total_writes_small , detected_files , modules , file_map , dxt_posix , dxt_posix_read_data , dxt_posix_write_data )
266363
267364 #########################################################################################################################################################################
268365
@@ -271,7 +368,7 @@ def handler():
271368 total_mem_not_aligned = df ['counters' ]['POSIX_MEM_NOT_ALIGNED' ].sum ()
272369 total_file_not_aligned = df ['counters' ]['POSIX_FILE_NOT_ALIGNED' ].sum ()
273370
274- check_misaligned (total_operations , total_mem_not_aligned , total_file_not_aligned , modules )
371+ check_misaligned (total_operations , total_mem_not_aligned , total_file_not_aligned , modules , file_map , df_lustre , dxt_posix , dxt_posix_read_data )
275372
276373 #########################################################################################################################################################################
277374
@@ -280,7 +377,7 @@ def handler():
280377 max_read_offset = df ['counters' ]['POSIX_MAX_BYTE_READ' ].max ()
281378 max_write_offset = df ['counters' ]['POSIX_MAX_BYTE_WRITTEN' ].max ()
282379
283- check_traffic (max_read_offset , total_read_size , max_write_offset , total_written_size )
380+ check_traffic (max_read_offset , total_read_size , max_write_offset , total_written_size , dxt_posix , dxt_posix_read_data , dxt_posix_write_data )
284381
285382 #########################################################################################################################################################################
286383
@@ -305,7 +402,7 @@ def handler():
305402 write_random = total_writes - write_consecutive - write_sequential
306403 #print('WRITE Random: {} ({:.2f}%)'.format(write_random, write_random / total_writes * 100))
307404
308- check_random_operation (read_consecutive , read_sequential , read_random , total_reads , write_consecutive , write_sequential , write_random , total_writes )
405+ check_random_operation (read_consecutive , read_sequential , read_random , total_reads , write_consecutive , write_sequential , write_random , total_writes , dxt_posix , dxt_posix_read_data , dxt_posix_write_data )
309406
310407 #########################################################################################################################################################################
311408
@@ -385,7 +482,7 @@ def handler():
385482
386483 column_names = ['id' , 'data_imbalance' ]
387484 detected_files = pd .DataFrame (detected_files , columns = column_names )
388- check_shared_data_imblance (stragglers_count , detected_files , file_map )
485+ check_shared_data_imblance (stragglers_count , detected_files , file_map , dxt_posix , dxt_posix_read_data , dxt_posix_write_data )
389486
390487 # POSIX_F_FASTEST_RANK_TIME
391488 # POSIX_F_SLOWEST_RANK_TIME
@@ -442,7 +539,7 @@ def handler():
442539
443540 column_names = ['id' , 'write_imbalance' ]
444541 detected_files = pd .DataFrame (detected_files , columns = column_names )
445- check_individual_write_imbalance (imbalance_count , detected_files , file_map )
542+ check_individual_write_imbalance (imbalance_count , detected_files , file_map , dxt_posix , dxt_posix_write_data )
446543
447544 imbalance_count = 0
448545
@@ -458,7 +555,7 @@ def handler():
458555
459556 column_names = ['id' , 'read_imbalance' ]
460557 detected_files = pd .DataFrame (detected_files , columns = column_names )
461- check_individual_read_imbalance (imbalance_count , detected_files , file_map )
558+ check_individual_read_imbalance (imbalance_count , detected_files , file_map , dxt_posix , dxt_posix_read_data )
462559
463560 #########################################################################################################################################################################
464561
@@ -493,7 +590,7 @@ def handler():
493590 column_names = ['id' , 'absolute_indep_reads' , 'percent_indep_reads' ]
494591 detected_files = pd .DataFrame (detected_files , columns = column_names )
495592
496- check_mpi_collective_read_operation (mpiio_coll_reads , mpiio_indep_reads , total_mpiio_read_operations , detected_files , file_map )
593+ check_mpi_collective_read_operation (mpiio_coll_reads , mpiio_indep_reads , total_mpiio_read_operations , detected_files , file_map , dxt_mpiio )
497594
498595 df_mpiio_collective_writes = df_mpiio ['counters' ] #.loc[(df_mpiio['counters']['MPIIO_COLL_WRITES'] > 0)]
499596
@@ -518,7 +615,7 @@ def handler():
518615 column_names = ['id' , 'absolute_indep_writes' , 'percent_indep_writes' ]
519616 detected_files = pd .DataFrame (detected_files , columns = column_names )
520617
521- check_mpi_collective_write_operation (mpiio_coll_writes , mpiio_indep_writes , total_mpiio_write_operations , detected_files , file_map )
618+ check_mpi_collective_write_operation (mpiio_coll_writes , mpiio_indep_writes , total_mpiio_write_operations , detected_files , file_map , dxt_mpiio )
522619
523620 #########################################################################################################################################################################
524621
0 commit comments