1111import re
1212import os
1313
14- def main ():
14+ def filter (filters , record ):
15+ # Always filter non-records:
16+ if not "\" ph\" :" in record : return False
17+ # Handle the case where there are no filters:
18+ if len (filters ) == 0 : return True
19+ # Always keep metadata records:
20+ if "\" ph\" :\" M\" " in record : return True
21+ # Otherwise, check against all filters:
22+ for filter in filters :
23+ if filter in record : return True
24+ return False
25+
26+ def fixup (record ):
27+ if not record .strip ().endswith (',' ):
28+ record = record .strip () + ",\n "
29+ return record
1530
31+ def main ():
1632 printHelp = False
1733
1834 if (len (sys .argv ) < 4 or sys .argv [1 ] == '-h' or sys .argv [1 ] == '-?' ):
1935 printHelp = True
2036
2137 # Help message
2238 if (printHelp ):
23- print ("" )
24- print (" A script to combine multiple Chrome traces captured by the opencl-intercept-layer." )
25- print (" The combined trace can be viewed on a common timeline in the Chrome browser." )
39+ print (r "" )
40+ print (r " A script to combine multiple Chrome traces captured by the opencl-intercept-layer." )
41+ print (r " The combined trace can be viewed on a common timeline in the Chrome browser." )
2642 print ()
27- print (" This is useful for analyzing multi-process execution." )
28- print (" Set CLI_AppendPid=1 when collecting Chrome traces to obtain separate per-process traces." )
29- print (" Can also be useful to compare two or more single process executions on a common timeline." )
43+ print (r " This is useful for analyzing multi-process execution." )
44+ print (r " Set CLI_AppendPid=1 when collecting Chrome traces to obtain separate per-process traces." )
45+ print (r " Can also be useful to compare two or more single process executions on a common timeline." )
3046 print ()
31- print (" Use as:" )
32- print (" combine_chrome_traces.py <number of traces> <space-separated paths to all json traces> [space-separated event filters]" )
47+ print (r " Use as:" )
48+ print (r " combine_chrome_traces.py <number of traces> <space-separated paths to all json traces> [space-separated event filters]" )
3349 print ()
34- print (" Optional arguments: event-filters are names of OpenCL kernels or OpenCL API calls" )
35- print (" that should be retained in the filtered output." )
50+ print (r " Optional arguments: event-filters are names of OpenCL kernels or OpenCL API calls" )
51+ print (r " that should be retained in the filtered output." )
3652 print ()
37- print (" Example:" )
53+ print (r " Example:" )
3854 print ()
39- print (" combine_chrome_traces.py 4 \ # specifies 4 traces to combine" )
40- print (" CLIntercept_Dump.45682/clintercept_trace.json \ # paths to the four traces follow" )
41- print (" CLIntercept_Dump.45683/clintercept_trace.json \ " )
42- print (" CLIntercept_Dump.45684/clintercept_trace.json \ " )
43- print (" CLIntercept_Dump.45685/clintercept_trace.json \ " )
44- print (" kernelA kernelB clEnqueueWriteBuffer clEnqueueReadBuffer # specifies kernel/API names as filters [optional]" )
55+ print (r " combine_chrome_traces.py 4 \ # specifies 4 traces to combine" )
56+ print (r " CLIntercept_Dump.45682/clintercept_trace.json \ # paths to the four traces follow" )
57+ print (r " CLIntercept_Dump.45683/clintercept_trace.json \ " )
58+ print (r " CLIntercept_Dump.45684/clintercept_trace.json \ " )
59+ print (r " CLIntercept_Dump.45685/clintercept_trace.json \ " )
60+ print (r " kernelA kernelB clEnqueueWriteBuffer clEnqueueReadBuffer # specifies kernel/API names as filters [optional]" )
4561 print ()
46- print (" Note: This script modifies events records so that all traces have a common epoch." )
62+ print (r " Note: This script modifies events records so that all traces have a common epoch." )
4763 print ()
4864 sys .exit (0 )
4965
5066 # Get input arguments
51- numFiles = int (sys .argv [1 ]);
52- numStrings = len (sys .argv ) - numFiles - 2 ;
67+ files = sys . argv [ 2 : 2 + int (sys .argv [1 ])]
68+ filters = sys . argv [ 2 + int (sys .argv [ 1 ]):]
5369
5470 # Sanity checks
55- if ( numFiles < 2 ) :
71+ if len ( files ) < 2 :
5672 print ("ERROR: you must specify at least two traces to combine." )
5773 sys .exit (1 )
58- for j in range ( numFiles ) :
59- if ( not os .path .isfile (sys . argv [ j + 2 ]) ):
60- print ("ERROR: specified file " + sys . argv [ j + 2 ] + " cannot be found." )
74+ for filename in files :
75+ if not os .path .isfile (filename ):
76+ print ("ERROR: specified file {} cannot be found." . format ( filename ) )
6177 sys .exit (1 )
6278
6379 # Read input files
6480 inputFiles = []
65- for j in range ( numFiles ) :
66- f = open (sys . argv [ j + 2 ] ,'r' )
81+ for filename in files :
82+ f = open (filename ,'r' )
6783 currentFile = f .readlines ()
6884 f .close ()
6985 inputFiles .append (currentFile )
7086
7187 # Figure out epoch (earliest start_time across all records)
7288 start_times = []
73- for j in range (numFiles ):
89+ for j in range (len ( files ) ):
7490 for k in range (len (inputFiles [j ])):
7591 if (inputFiles [j ][k ].find ("start_time" ) != - 1 ):
7692 start_times .append (int (inputFiles [j ][2 ].split (":" )[- 1 ].split ("}" )[0 ].strip ('"' )))
@@ -79,39 +95,43 @@ def main():
7995 print ("ERROR: start_time not found in trace file " + sys .argv [j + 2 ]+ ". Please check if the trace is valid." )
8096 sys .exit (1 )
8197 epoch = min (start_times )
98+ print ("Found minimum start time {}" .format (epoch ))
8299
83- # Perform filtering if necessary
100+ # Perform filtering
84101 filteredFiles = []
85- if (numStrings == 0 ):
86- filteredFiles = inputFiles
87- else :
88- for j in range (numFiles ):
89- flt = [i for i in inputFiles [j ] if "\" ph\" :\" M\" " in i ] # copy metadata
90- for k in range (numStrings ):
91- flt = flt + [i for i in inputFiles [j ] if sys .argv [2 + numFiles + k ] in i ]
92- filteredFiles .append (flt )
102+ for j in range (len (files )):
103+ flt = [fixup (i ) for i in inputFiles [j ] if filter (filters , i )]
104+ filteredFiles .append (flt )
93105
94106 # Perform epoch normalization
95- for j in range (numFiles ):
107+ for j in range (len ( files ) ):
96108 offset = start_times [j ] - epoch
109+ print ("Processing file {} with offset {}" .format (files [j ], offset ))
97110 for k in range (len (filteredFiles [j ])):
98111 if (filteredFiles [j ][k ].find ("\" ts\" " ) != - 1 ):
99- ts = int (filteredFiles [j ][k ].split ("\" ts\" :" )[- 1 ].split ("," )[0 ]) + offset
100- filteredFiles [j ][k ] = re .sub ("\" ts\" :\d+" , "\" ts\" :" + str (ts ), filteredFiles [j ][k ])
112+ ts = float (filteredFiles [j ][k ].split ("\" ts\" :" )[- 1 ].split ("," )[0 ]) + offset
113+ #print('old record was: {}'.format(filteredFiles[j][k].strip()))
114+ filteredFiles [j ][k ] = re .sub ("\" ts\" :[\\ d.]+" , "\" ts\" :" + str (ts ), filteredFiles [j ][k ])
115+ #print('new record is: {}'.format(filteredFiles[j][k].strip()))
101116 elif (filteredFiles [j ][k ].find ("start_time" ) != - 1 ):
102- filteredFiles [j ][k ] = re .sub ('\" start_time\" :["]?\d+["]?' , "\" start_time\" :" + str (epoch ), filteredFiles [j ][k ])
103-
117+ #print('old record was: {}'.format(filteredFiles[j][k].strip()))
118+ filteredFiles [j ][k ] = re .sub ('\" start_time\" :["]?\\ d+["]?' , "\" start_time\" :" + str (epoch ), filteredFiles [j ][k ])
119+ #print('new record is: {}'.format(filteredFiles[j][k].strip()))
120+
104121 # Write to output file
105122 tstamp = datetime .datetime .now ()
106123 fName = "merged_" + str (tstamp .year ) + '-' + str (tstamp .month ) + '-' + str (tstamp .day ) \
107124 + '-' + str (tstamp .hour ) + '-' + str (tstamp .minute )+ '-' + str (tstamp .second ) + ".json"
108- print ("Combining in " + fName )
125+ print ("Writing to combined file " + fName )
109126 fo = open (fName , 'w' )
110127 fo .write ("[\n " )
111- for j in range (numFiles ):
128+ for j in range (len ( files ) ):
112129 for k in range (1 ,len (filteredFiles [j ])):
113130 fo .write ("%s" % filteredFiles [j ][k ])
131+ fo .write ("{\" ph\" :\" M\" ,\" name\" :\" clintercept_merged_eof\" ,\" pid\" :0,\" tid\" :0}\n " )
132+ fo .write ("]\n " )
114133 f .close ()
134+ print ("Done." )
115135
116136if __name__ == "__main__" :
117137 main ()
0 commit comments