-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgrid_code.py
More file actions
190 lines (161 loc) · 6.22 KB
/
grid_code.py
File metadata and controls
190 lines (161 loc) · 6.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 28 15:33:31 2025
@author: Elina
"""
print('Grid script started')
import scipy.interpolate as spint
from concurrent.futures import ProcessPoolExecutor, as_completed
from joblib import Parallel, delayed
import threading
from multiprocessing import shared_memory
import os
import numpy as np
import pickle
import h5py
import matplotlib.pyplot as plt
from scipy.interpolate import RegularGridInterpolator as RGI
from tqdm import tqdm
from kuibit import simdir as sd
#%% Load in paths, get iterations.
paths = [] #Enter list of file paths to simulation data here (chronologically from earliest to latest).
#%% Define functions that save our data into H5 files and dictionaries
def WriteDictionaryVar(h5f, data):
for varname in sorted(list(data)):
x_data = data[varname]
x_type = type(x_data)
if x_type == dict:
grp = h5f.create_group(varname)
WriteDictionaryVar(grp, x_data)
else:
if x_type != np.ndarray:
values = np.array(x_data)
else:
values = x_data
try:
h5f.create_dataset(varname, data=values)
except:
print("Error creating dataset ", varname)
def WriteScalarHDF5(file, data, group='', mode='a'):
f = h5py.File(file, mode)
if len(group) > 0:
try:
grp = f.create_group(group)
except:
grp = f[group]
grp = f[group]
else:
grp = f
WriteDictionaryVar(grp, data)
f.close()
return 1
def save_grids(grids, filename):
with open(filename, "wb") as f:
pickle.dump(grids, f)
def load_grid(filename):
with open(filename, "rb") as f:
return pickle.load(f)
#%% Finds the refinement of grids of different Refinement Levels (RLs)
sim = sd.SimDir(paths[-1]) #Extracts data from last iteration from the last day of simulation output (which we can certainly open without patch errors, as this should be post-merger)
its = sim.gf.xyz["vx"].available_iterations
lastit = its[-1]
n_j = 20
levels = [5, 4, 3, 2, 1, 0]
def get_spacings(L):
dx = sim.gf.xyz["vx"][lastit].dx_at_level(L)
return {f"{L}": dx}
spacing_dicts = Parallel(
n_jobs=n_j,
backend='loky',
verbose=n_j
)(
delayed(get_spacings)(L)
for L in levels
)
spacings = {k: v for d in spacing_dicts for k, v in d.items()}
print("spacing at rl=5 at last frame", spacings['5'])
#%% Resamples grid data where we cannot immediately open it with Kuibit's get_level (due to a patch error)
n_j = 20
vars_to_grid = {'vx':'vx','vy':'vy', 'vz':'vz', 'igm_temperature':'T', 'rho_b':'rho', 'igm_Ye':'Ye'}
def make_grids(path):
messages = []
working_paths = []
working_its = []
sim = sd.SimDir(path)
try:
simvx = sim.gf.xyz["vx"] #This checks if we can open the data with Kuibit's get_level method. If we already can, no resampling is necessary.
working_paths.append(path)
except KeyError:
msg = f"'vx' variable not present in simulation at {path} — skipping to next path"
print(msg)
messages.append(msg)
return {
"path": path,
"iterations": [],
"times": [],
"log": messages,
}
print(f"Loaded simulation at {path}")
its = sim.gf.xyz["vx"].available_iterations
times = sim.gf.xyz["vx"].times
print("iterations:", its)
print("times", times)
for it in its:
for L in [5, 4, 3, 2, 1]: #RL=0 omitted as this is the coarsest level and is guaranteed to exist if data is not corrupted/absent
try: #List can be changed, but checking all levels (and not just 5 and 4) is more robust
sim.gf.xyz['vx'][it].get_level(L)
working_its.append(it)
continue
except Exception:
try:
v_x = sim.gf.xyz['vx'][it].get_level(L-1)
except:
try:
v_x = sim.gf.xyz['vx'][it].get_level(0)
except:
messages.append(f"Iteration {it} in file {path} is probably corrupted/empty - skipping to next iteration")
continue
try:
x, y, z = v_x.coordinates_meshgrid()
gridshape = x.shape
dxx = tuple(float(v) for v in spacings[f'{L}'])
origin = (dxx[0]*gridshape[0], dxx[1]*gridshape[1], dxx[2]*gridshape[2])
gridshape = tuple(gridshape)
for var, pref in vars_to_grid.items():
grd = sim.gf.xyz[var][it].to_UniformGridData(
shape=gridshape,
x0=origin,
dx=dxx,
resample=True
)
save_grids(grd, f"{pref}{L}_grid_{it}.pkl")
print("Created and saved grid.")
del grd
messages.append(f"[{it}] grid saved")
working_its.append(it)
except Exception as e:
messages.append(f"Failed to make fallback grid for it={it}: {e}")
print(f"Failed to make fallback grid for it={it}: {e}")
continue #If fallback grids cannot be made for whatever reason, this gets flagged up in the printout. However, subsequent grids can be made.
return {
"path": path,
"iterations": working_its,
"times": times,
"log": messages,
}
results = Parallel(
n_jobs=n_j,
backend='loky',
verbose=n_j
)(
delayed(make_grids)(path) for path in paths
)
print ("grids made!")
#Make a dictionary of iterations to use later
its_dict = {
res["path"]: {"iterations": res["iterations"], "times": res["times"]}
for res in results
}
with open("its_dict.pkl", "wb") as f:
pickle.dump(its_dict, f)
print("End of script.")