Skip to content

Commit 799313f

Browse files
committed
add a slice_sequence.py script to plot all slice plots
1 parent fe2f74e commit 799313f

File tree

3 files changed

+79
-20
lines changed

3 files changed

+79
-20
lines changed

Exec/science/xrb_spherical/analysis/front_tracker.py

Lines changed: 24 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import glob
55
import yt
66
import numpy as np
7+
import pandas and pd
78
import argparse
89
from concurrent.futures import ProcessPoolExecutor, as_completed
910
from yt.frontends.boxlib.api import CastroDataset
@@ -95,21 +96,22 @@ def track_flame_front(ds, metric):
9596
theta_loc = thetas[max_index:][loc_index][0]
9697

9798
# Returns 5 quantities
98-
# 1) time in ms
99-
# 2) theta that corresponds to the maximum averaged value
100-
# 3) maximum averaged value
101-
# 4) theta that corresponds to the maximum global value
102-
# 5) maximum global value
103-
timeTheta = [time, theta_loc, max(averaged_field), max_theta_loc, max_val]
99+
# 1) file name of the dataset
100+
# 2) time in ms
101+
# 3) theta that corresponds to the flame front
102+
# 4) theta that corresponds to the maximum averaged value
103+
# 5) maximum averaged value
104+
# 6) theta that corresponds to the maximum global value
105+
# 7) maximum global value
106+
tracking_data = [str(ds), time, theta_loc, thetas[max_index],
107+
max(averaged_field), max_theta_loc, max_val]
104108

105-
return timeTheta
109+
return tracking_data
106110

107111

108112
def process_dataset(fname, metric):
109113
ds = CastroDataset(fname)
110-
111-
# Returns a list [time, theta, max averaged value, theta_max, max value]
112-
return track_flame_front(ds, args)
114+
return track_flame_front(ds, metric)
113115

114116
if __name__ == "__main__":
115117
parser = argparse.ArgumentParser(description="""
@@ -126,12 +128,12 @@ def process_dataset(fname, metric):
126128
help="""Float number between (0, 1]. Representing the percent of
127129
the averaged maximum of the field quantity used to track the flame.""")
128130
parser.add_argument('--threshold', '-t', default=1.e-6, type=float,
129-
help="""Float number between (0, 1]. Representine the percent of
131+
help="""Float number between (0, 1]. Representing the percent of
130132
the global maximum of the field quantity used to select valid zones
131133
for averaging""")
132134
parser.add_argument('--jobs', '-j', default=1, type=int,
133135
help="""Number of workers to process plot files in parallel""")
134-
parser.add_argument('--out', '-o', default="front_tracking.dat", type=str,
136+
parser.add_argument('--out', '-o', default="front_tracking.csv", type=str,
135137
help="""Output filename for the tracking information""")
136138

137139
args = parser.parse_args()
@@ -153,7 +155,7 @@ def process_dataset(fname, metric):
153155
percent=args.percent,
154156
)
155157

156-
timeThetaArray = []
158+
tracking_data_array = []
157159

158160
###
159161
### Parallelize the loop. Copied from flame_wave/analysis/front_tracker.py
@@ -167,7 +169,7 @@ def process_dataset(fname, metric):
167169
for future in as_completed(future_to_index):
168170
i = future_to_index.pop(future)
169171
try:
170-
timeThetaArray.append(future.result())
172+
tracking_data_array.append(future.result())
171173
except Exception as exc:
172174
print(f"{args.fnames[i]} generated an exception: {exc}", file=sys.stderr, flush=True)
173175
except KeyboardInterrupt:
@@ -178,8 +180,12 @@ def process_dataset(fname, metric):
178180
executor.shutdown(wait=True, cancel_futures=True)
179181
sys.exit(1)
180182

181-
# Sort array by time and write to file
182-
timeThetaArray.sort(key=lambda x: x[0])
183-
timeThetaArray = np.array(timeThetaArray)
183+
# Sort array by time
184+
tracking_data_array.sort(key=lambda x: x[1])
185+
186+
# Write to file
187+
columns = ["fname", "time", "front_theta", "theta_max_avg",
188+
"max_avg_" + args.field, "theta_max", "max_global_" + args.field]
184189

185-
np.savetxt(args.out, timeThetaArray, delimiter=',')
190+
df = pd.DataFrame(tracking_data_array, columns=columns)
191+
df.to_csv(args.out, index=False)

Exec/science/xrb_spherical/analysis/slice.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def slice(fnames:List[str], fields:List[str],
164164
If multiple file names are given, a grid of slice plots of different
165165
files will be plotted for a given field parameter.
166166
Note that either fnames or field must be single valued.""")
167-
parser.add_argument('--fields', nargs='+', type=str,
167+
parser.add_argument('-f', '--fields', nargs='+', type=str,
168168
help="""field parameters for plotting. Accepts one or more datasets.
169169
If multiple parameters are given, a grid of slice plots of different
170170
field parameters will be plotted for a given fname.
@@ -179,7 +179,6 @@ def slice(fnames:List[str], fields:List[str],
179179
parser.add_argument('-w', '--width', default=4.0, type=float,
180180
help="scaling for the domain width of the slice plot")
181181

182-
183182
args = parser.parse_args()
184183

185184
if len(args.fnames) > 1 and len(args.fields) > 1:
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
#!/usr/bin/env python3
2+
3+
import sys
4+
import argparse
5+
import numpy as np
6+
import pandas as pd
7+
from slice import slice
8+
from concurrent.futures import ProcessPoolExecutor, as_completed
9+
10+
parser = argparse.ArgumentParser(description="""
11+
This script uses the front_tracking.dat from front_tracker.py
12+
and slice.py to create a sequence of slice plots along with
13+
flame front position.""")
14+
15+
parser.add_argument('tracking_fname', type=str,
16+
help="txt file generated from front_tracker.py to track flame front position.")
17+
parser.add_argument('-f', '--fields', nargs='+', type=str,
18+
help="field parameters for plotting, e.g. enuc abar.")
19+
parser.add_argument('-w', '--width', default=4.0, type=float,
20+
help="scaling for the domain width of the slice plot")
21+
parser.add_argument('--jobs', '-j', default=1, type=int,
22+
help="""Number of workers to plot in parallel""")
23+
24+
args = parser.parse_args()
25+
26+
# data has columns: fname, time, front_theta, theta_max_avg, max_avg, theta_max, max_val.
27+
# See front_tracker.py for more info
28+
tracking_data = pd.read_csv(args.tracking_fname)
29+
30+
# Get file name and theta of flame front
31+
fnames = tracking_data["fname"]
32+
front_thetas = tracking_data["front_theta"]
33+
34+
# Parallelize the plotting
35+
with ProcessPoolExecutor(max_workers=args.jobs) as executor:
36+
future_to_index = {
37+
executor.submit(slice, fname, args.fields,
38+
theta=front_thetas[i], widthScale=args.width): i
39+
for i, fname in enumerate(fnames)
40+
}
41+
try:
42+
for future in as_completed(future_to_index):
43+
i = future_to_index.pop(future)
44+
try:
45+
future.result()
46+
except Exception as exc:
47+
print(f"{fnames[i]} generated an exception: {exc}", file=sys.stderr, flush=True)
48+
except KeyboardInterrupt:
49+
print(
50+
"\n*** got ctrl-c, cancelling remaining tasks and waiting for existing ones to finish...\n",
51+
flush=True,
52+
)
53+
executor.shutdown(wait=True, cancel_futures=True)
54+
sys.exit(1)

0 commit comments

Comments
 (0)