44import glob
55import yt
66import numpy as np
7+ import pandas and pd
78import argparse
89from concurrent .futures import ProcessPoolExecutor , as_completed
910from yt .frontends .boxlib .api import CastroDataset
@@ -95,21 +96,22 @@ def track_flame_front(ds, metric):
9596 theta_loc = thetas [max_index :][loc_index ][0 ]
9697
9798 # Returns 5 quantities
98- # 1) time in ms
99- # 2) theta that corresponds to the maximum averaged value
100- # 3) maximum averaged value
101- # 4) theta that corresponds to the maximum global value
102- # 5) maximum global value
103- timeTheta = [time , theta_loc , max (averaged_field ), max_theta_loc , max_val ]
99+ # 1) file name of the dataset
100+ # 2) time in ms
101+ # 3) theta that corresponds to the flame front
102+ # 4) theta that corresponds to the maximum averaged value
103+ # 5) maximum averaged value
104+ # 6) theta that corresponds to the maximum global value
105+ # 7) maximum global value
106+ tracking_data = [str (ds ), time , theta_loc , thetas [max_index ],
107+ max (averaged_field ), max_theta_loc , max_val ]
104108
105- return timeTheta
109+ return tracking_data
106110
107111
108112def process_dataset (fname , metric ):
109113 ds = CastroDataset (fname )
110-
111- # Returns a list [time, theta, max averaged value, theta_max, max value]
112- return track_flame_front (ds , args )
114+ return track_flame_front (ds , metric )
113115
114116if __name__ == "__main__" :
115117 parser = argparse .ArgumentParser (description = """
@@ -126,12 +128,12 @@ def process_dataset(fname, metric):
126128 help = """Float number between (0, 1]. Representing the percent of
127129 the averaged maximum of the field quantity used to track the flame.""" )
128130 parser .add_argument ('--threshold' , '-t' , default = 1.e-6 , type = float ,
129- help = """Float number between (0, 1]. Representine the percent of
131+ help = """Float number between (0, 1]. Representing the percent of
130132 the global maximum of the field quantity used to select valid zones
131133 for averaging""" )
132134 parser .add_argument ('--jobs' , '-j' , default = 1 , type = int ,
133135 help = """Number of workers to process plot files in parallel""" )
134- parser .add_argument ('--out' , '-o' , default = "front_tracking.dat " , type = str ,
136+ parser .add_argument ('--out' , '-o' , default = "front_tracking.csv " , type = str ,
135137 help = """Output filename for the tracking information""" )
136138
137139 args = parser .parse_args ()
@@ -153,7 +155,7 @@ def process_dataset(fname, metric):
153155 percent = args .percent ,
154156 )
155157
156- timeThetaArray = []
158+ tracking_data_array = []
157159
158160 ###
159161 ### Parallelize the loop. Copied from flame_wave/analysis/front_tracker.py
@@ -167,7 +169,7 @@ def process_dataset(fname, metric):
167169 for future in as_completed (future_to_index ):
168170 i = future_to_index .pop (future )
169171 try :
170- timeThetaArray .append (future .result ())
172+ tracking_data_array .append (future .result ())
171173 except Exception as exc :
172174 print (f"{ args .fnames [i ]} generated an exception: { exc } " , file = sys .stderr , flush = True )
173175 except KeyboardInterrupt :
@@ -178,8 +180,12 @@ def process_dataset(fname, metric):
178180 executor .shutdown (wait = True , cancel_futures = True )
179181 sys .exit (1 )
180182
181- # Sort array by time and write to file
182- timeThetaArray .sort (key = lambda x : x [0 ])
183- timeThetaArray = np .array (timeThetaArray )
183+ # Sort array by time
184+ tracking_data_array .sort (key = lambda x : x [1 ])
185+
186+ # Write to file
187+ columns = ["fname" , "time" , "front_theta" , "theta_max_avg" ,
188+ "max_avg_" + args .field , "theta_max" , "max_global_" + args .field ]
184189
185- np .savetxt (args .out , timeThetaArray , delimiter = ',' )
190+ df = pd .DataFrame (tracking_data_array , columns = columns )
191+ df .to_csv (args .out , index = False )
0 commit comments