Skip to content

Commit 12a025f

Browse files
authored
New sandbox experiments (#38)
* add three different glacier states per sandbox geometry * typo * fix tests * adapted dashboard for different glacier states * added initial_flux to data_logger * added tests for glacier_state mb_models * small experiment adaptions
1 parent 672e7bb commit 12a025f

11 files changed

+930
-503
lines changed

agile1d/core/cost_function.py

Lines changed: 34 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,6 @@ def get_indices_for_unknown_parameters(data_logger):
138138

139139

140140
def define_scaling_terms(data_logger):
141-
142141
# define scaling terms for observation stuff
143142
if 'scale' in data_logger.obs_scaling_parameters.keys():
144143
observations = data_logger.observations
@@ -260,10 +259,7 @@ def cost_fct(unknown_parameters, data_logger):
260259

261260
mb_models = initialise_mb_models(unknown_parameters_descaled, data_logger)
262261

263-
if 'smoothed_flux' in data_logger.regularisation_terms.keys():
264-
initial_flux = dynamic_model(flowline).flux_stag[0]
265-
else:
266-
initial_flux = None
262+
initial_flux = dynamic_model(flowline).flux_stag[0]
267263

268264
if data_logger.spinup_type == 'height_shift_spinup':
269265
try:
@@ -318,7 +314,7 @@ def cost_fct(unknown_parameters, data_logger):
318314
initial_flux, # for regularisation term 'smoothed_flux'
319315
unknown_parameters, # for distance from fg regularisation
320316
data_logger # for reg_parameters and observations
321-
)
317+
)
322318

323319
# sum up cost function terms using cost lambda
324320
cost_lambda = torch.tensor(data_logger.cost_lambda,
@@ -344,6 +340,9 @@ def cost_fct(unknown_parameters, data_logger):
344340
data_logger.save_data_in_datalogger('sfc_h_start', sfc_h_start)
345341
data_logger.save_data_in_datalogger('section_start', section_start)
346342
data_logger.save_data_in_datalogger('flowlines', detach_flowline(final_fl))
343+
# exclude last grid point because initial flux is defined on staggered grid
344+
data_logger.save_data_in_datalogger('initial_flux',
345+
initial_flux.detach().clone()[:-1])
347346
data_logger.save_data_in_datalogger('costs', cost)
348347
data_logger.save_data_in_datalogger('grads', grad)
349348
data_logger.save_data_in_datalogger('c_terms', c_terms)
@@ -625,20 +624,36 @@ def initialise_mb_models(unknown_parameters,
625624
# -1 because period defined as [y0 - halfsize, y0 + halfsize + 1]
626625
y0 = (y_start + y_end - 1) / 2
627626
halfsize = (y_end - y_start - 1) / 2
628-
mb_models[mb_mdl_set] = {'mb_model':
629-
ConstantMassBalanceTorch(
630-
gdir, y0=y0, halfsize=halfsize,
631-
torch_type=torch_type, device=device),
632-
'years':
633-
mb_models_settings[mb_mdl_set]['years']}
627+
mb_models[mb_mdl_set] = {
628+
'mb_model': ConstantMassBalanceTorch(
629+
gdir, y0=y0, halfsize=halfsize,
630+
torch_type=torch_type, device=device),
631+
'years':
632+
mb_models_settings[mb_mdl_set]['years']}
634633
elif mb_models_settings[mb_mdl_set]['type'] == 'TIModel':
635-
mb_models[mb_mdl_set] = {'mb_model':
636-
MBModelTorchWrapper(
637-
gdir=gdir,
638-
mb_model=MonthlyTIModel(gdir)),
639-
'years':
640-
mb_models_settings[mb_mdl_set]['years']
641-
}
634+
mb_model_args = mb_models_settings[mb_mdl_set]['model_args']
635+
mb_models[mb_mdl_set] = {
636+
'mb_model': MBModelTorchWrapper(
637+
gdir=gdir,
638+
mb_model=MonthlyTIModel(
639+
gdir,
640+
**mb_model_args)
641+
),
642+
'years':
643+
mb_models_settings[mb_mdl_set]['years']
644+
}
645+
elif mb_models_settings[mb_mdl_set]['type'] == 'ConstantModel':
646+
mb_model_args = mb_models_settings[mb_mdl_set]['model_args']
647+
mb_models[mb_mdl_set] = {
648+
'mb_model': MBModelTorchWrapper(
649+
gdir=gdir,
650+
mb_model=ConstantMassBalance(
651+
gdir,
652+
**mb_model_args)
653+
),
654+
'years':
655+
mb_models_settings[mb_mdl_set]['years']
656+
}
642657
else:
643658
raise NotImplementedError("The MassBalance type "
644659
f"{mb_models_settings[mb_mdl_set]['type']} "

agile1d/core/data_logging.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,7 @@ def __init__(self, gdir, fls_init, inversion_input, climate_filename,
140140
self.time_needed = None
141141
self.grads = None
142142
self.flowlines = None
143+
self.initial_flux = None
143144
self.sfc_h_start = None
144145
self.section_start = None
145146
self.observations_mdl = None
@@ -157,7 +158,7 @@ def __init__(self, gdir, fls_init, inversion_input, climate_filename,
157158
self.first_guess = None
158159

159160
self.filename = gdir.name + '_' + \
160-
inversion_input['experiment_description']
161+
inversion_input['experiment_description']
161162

162163
# create info Text for callback_fct TODO: think about showing the evolution of the c_terms
163164
self.info_text = '''
@@ -248,6 +249,7 @@ def filter_data_from_optimization(self):
248249
self.reg_terms = self.reg_terms[index]
249250
self.time_needed = self.squeeze_generic(self.time_needed[index])
250251
self.flowlines = self.squeeze_generic(self.flowlines[index])
252+
self.initial_flux = self.squeeze_generic(self.initial_flux[index])
251253
self.sfc_h_start = self.squeeze_generic(self.sfc_h_start[index])
252254
self.section_start = self.squeeze_generic(self.section_start[index])
253255
self.observations_mdl = self.squeeze_generic(self.observations_mdl[index])
@@ -269,6 +271,7 @@ def create_and_save_dataset(self):
269271
ds['costs'] = (['iteration'], self.costs)
270272
ds['grads'] = (['iteration', 'nr_unknown_parameters'], self.grads)
271273
ds['flowlines'] = (['iteration'], self.flowlines)
274+
ds['initial_flux'] = (['iteration', 'x'], self.initial_flux)
272275
ds['sfc_h_start'] = (['iteration', 'x'], self.sfc_h_start)
273276
ds['section_start'] = (['iteration', 'x'], self.section_start)
274277
ds['observations_mdl'] = (['iteration'], self.observations_mdl)

agile1d/core/inversion.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -60,9 +60,11 @@ def add_setting():
6060
"MassBalanceModel must start at least " \
6161
"one year before first given observation year!" \
6262
"Default: {'MB': {'type': 'TIModel'," \
63-
" 'years': np.array([1980, 2020])}}"
63+
" 'years': np.array([1980, 2020])," \
64+
" 'model_args': {}}}"
6465
_default = {'MB': {'type': 'TIModel',
65-
'years': np.array([1980, 2020])}}
66+
'years': np.array([1980, 2020]),
67+
'model_args': {}}}
6668

6769
add_setting()
6870

@@ -103,10 +105,10 @@ def add_setting():
103105

104106
_key = "bed_h_bounds"
105107
_doc = "Define how large the boundaries for the bed_h are, in relation of " \
106-
"first guess thickness. (e.g. (0.2, 1.4) means the bed height can " \
108+
"first guess thickness. (e.g. (0.4, 1.6) means the bed height can " \
107109
"be between 1.4*fg_thick and 0.2*fg_thick). " \
108-
"Default: (0.2, 1.4)"
109-
_default = (0.2, 1.4)
110+
"Default: (0.4, 1.6)"
111+
_default = (0.4, 1.6)
110112
add_setting()
111113

112114
_key = "max_deviation_surface_h"
@@ -245,7 +247,7 @@ def add_setting():
245247
"glacier at the inital state." \
246248
"e.g. {'section':" \
247249
" {'extra_grid_points': 10," \
248-
" 'limits': (0.75, 1.25)," \
250+
" 'limits': (0.6, 1.4)," \
249251
" 'fg_years': 1" \
250252
" }" \
251253
" }" \

agile1d/core/utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,9 @@ def inversion_settings(self):
144144

145145
def write_inversion_settings(self,
146146
control_vars=['bed_h'],
147-
mb_models_settings={'MB1': {'type': 'constant', 'years': np.array([1950, 2016])}},
147+
mb_models_settings={'MB1': {
148+
'type': 'constant',
149+
'years': np.array([1950, 2016])}},
148150
min_w0_m=10.,
149151
observations=None, # {'Area', {'2010': np.array([23])}}
150152
reg_parameters=None, # [0, 0.1, 10]

agile1d/sandbox/calculate_statistics.py

Lines changed: 38 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,8 @@ def add_0d_stats(x, y):
3030
'abs_diff': float(np.abs(x - y))}
3131

3232

33-
def calculate_result_statistics(gdir, data_logger, print_statistic=False):
33+
def calculate_result_statistics(gdir, glacier_state, data_logger,
34+
print_statistic=False):
3435
"""calculate some statistics of the result for analysis"""
3536

3637
# open the dataset of the run to add our calculated statistics
@@ -69,7 +70,8 @@ def calculate_result_statistics(gdir, data_logger, print_statistic=False):
6970
ds.unknown_parameters[-1][control_indices[control_var]].values
7071
controls_true = {}
7172
fls_true = gdir.read_pickle('model_flowlines',
72-
filesuffix='_agile_true_init')[0]
73+
filesuffix='_agile_true_init_'
74+
f'{glacier_state}')[0]
7375
for control_var in controls_mdl:
7476
if control_var in ['bed_h', 'area_bed_h']:
7577
controls_true[control_var] = \
@@ -81,7 +83,8 @@ def calculate_result_statistics(gdir, data_logger, print_statistic=False):
8183
controls_true[control_var] = controls_mdl[control_var]
8284
elif control_var in ['section']:
8385
fls_1980_true = gdir.read_pickle('model_flowlines',
84-
filesuffix='_creation_spinup')[0]
86+
filesuffix='_creation_spinup_'
87+
f'{glacier_state}')[0]
8588
controls_true[control_var] = \
8689
fls_1980_true.section[:len(controls_mdl['section'])]
8790
else:
@@ -117,7 +120,8 @@ def calculate_result_statistics(gdir, data_logger, print_statistic=False):
117120
sfc_h_start = copy.deepcopy(data_logger.sfc_h_start[-1])
118121
fls_start_mdl.surface_h = sfc_h_start
119122
fls_start_true = gdir.read_pickle('model_flowlines',
120-
filesuffix='_creation_spinup')[0]
123+
filesuffix='_creation_spinup_'
124+
f'{glacier_state}')[0]
121125

122126
past_state_stats = {}
123127
for var in ['thick', 'area_m2', 'volume_m3']:
@@ -150,7 +154,8 @@ def get_volume(fl):
150154
with xr.open_dataset(fp) as ds_diag:
151155
past_evol_mdl = ds_diag.load()
152156
fp = gdir.get_filepath('model_diagnostics',
153-
filesuffix='_agile_true_total_run')
157+
filesuffix='_agile_true_total_run_'
158+
f'{glacier_state}')
154159
with xr.open_dataset(fp) as ds_diag:
155160
past_evol_true = ds_diag.load()
156161

@@ -165,7 +170,8 @@ def get_volume(fl):
165170
# how well do we match today's glacier state ------------------------------
166171
fls_end_mdl = copy.deepcopy(data_logger.flowlines[-1])
167172
fls_end_true = gdir.read_pickle('model_flowlines',
168-
filesuffix='_agile_true_end')[0]
173+
filesuffix='_agile_true_end_'
174+
f'{glacier_state}')[0]
169175

170176
today_state_stats = {}
171177
for var in ['thick', 'area_m2', 'volume_m3']:
@@ -196,7 +202,8 @@ def get_volume(fl):
196202
with xr.open_dataset(fp) as ds_diag:
197203
future_evol_mdl = ds_diag.load()
198204
fp = gdir.get_filepath('model_diagnostics',
199-
filesuffix='_agile_true_future')
205+
filesuffix='_agile_true_future_'
206+
f'{glacier_state}')
200207
with xr.open_dataset(fp) as ds_diag:
201208
future_evol_true = ds_diag.load()
202209

@@ -218,7 +225,8 @@ def get_volume(fl):
218225
# Here print the statistics in comparision to the default run
219226

220227
# open default statistics
221-
fp_default = os.path.join(gdir.dir, 'default_oggm_statistics.pkl')
228+
fp_default = os.path.join(gdir.dir, f'default_oggm_statistics_'
229+
f'{glacier_state}.pkl')
222230
with open(fp_default, 'rb') as handle:
223231
default_stats = pickle.load(handle)
224232

@@ -271,28 +279,32 @@ def get_volume(fl):
271279
raise NotImplementedError(f'{stat_clean}')
272280

273281

274-
def calculate_default_oggm_statistics(gdir):
282+
def calculate_default_oggm_statistics(gdir, glacier_state):
275283

276284
default_oggm_statistics = {}
277285

278286
for reali in ['dynamic', 'static']:
279287
# open the run files
280288
with xr.open_dataset(
281289
gdir.get_filepath('model_diagnostics',
282-
filesuffix=f'_oggm_{reali}_past')) as ds:
290+
filesuffix=f'_oggm_{reali}_past_'
291+
f'{glacier_state}')) as ds:
283292
diag_past = ds.load()
284293
f = gdir.get_filepath('fl_diagnostics',
285-
filesuffix=f'_oggm_{reali}_past')
294+
filesuffix=f'_oggm_{reali}_past_'
295+
f'{glacier_state}')
286296
with xr.open_dataset(f, group=f'fl_0') as ds:
287297
fl_diag_past = ds.load()
288298
with xr.open_dataset(
289299
gdir.get_filepath('model_diagnostics',
290-
filesuffix=f'_oggm_{reali}_future')) as ds:
300+
filesuffix=f'_oggm_{reali}_future_'
301+
f'{glacier_state}')) as ds:
291302
diag_future = ds.load()
292303

293304
# how well do we match the observations -------------------------------
294305
obs_given = gdir.read_pickle('inversion_input',
295-
filesuffix='_agile_measurements')
306+
filesuffix='_agile_measurements_'
307+
f'{glacier_state}')
296308
obs_stats = {}
297309
for obs_key in obs_given.keys():
298310
obs_stats[obs_key] = {}
@@ -374,9 +386,11 @@ def calculate_default_oggm_statistics(gdir):
374386
controls_mdl = {}
375387
controls_true = {}
376388
fls_mdl = gdir.read_pickle('model_flowlines',
377-
filesuffix='_oggm_first_guess')[0]
389+
filesuffix='_oggm_first_guess_'
390+
f'{glacier_state}')[0]
378391
fls_true = gdir.read_pickle('model_flowlines',
379-
filesuffix='_agile_true_init')[0]
392+
filesuffix='_agile_true_init_'
393+
f'{glacier_state}')[0]
380394

381395
for control_var in all_control_vars:
382396
if control_var in ['bed_h', 'area_bed_h']:
@@ -405,7 +419,8 @@ def calculate_default_oggm_statistics(gdir):
405419
# how well do we match the past glacier state -----------------------------
406420
fls_start_mdl = fl_diag_past.sel(time=fl_diag_past.time[0])
407421
fls_start_true = gdir.read_pickle('model_flowlines',
408-
filesuffix='_creation_spinup')[0]
422+
filesuffix='_creation_spinup_'
423+
f'{glacier_state}')[0]
409424

410425
past_state_stats = {}
411426
for var in ['thick', 'area_m2', 'volume_m3']:
@@ -435,7 +450,8 @@ def get_volume(fl):
435450
# how well do we match the past glacier evolution ---------------------
436451
past_evol_mdl = diag_past
437452
fp = gdir.get_filepath('model_diagnostics',
438-
filesuffix='_agile_true_total_run')
453+
filesuffix='_agile_true_total_run_'
454+
f'{glacier_state}')
439455
with xr.open_dataset(fp) as ds_diag:
440456
past_evol_true = ds_diag.load()
441457

@@ -450,7 +466,8 @@ def get_volume(fl):
450466
# how well do we match today's glacier state --------------------------
451467
fls_end_mdl = fl_diag_past.sel(time=fl_diag_past.time[-1])
452468
fls_end_true = gdir.read_pickle('model_flowlines',
453-
filesuffix='_agile_true_end')[0]
469+
filesuffix='_agile_true_end_'
470+
f'{glacier_state}')[0]
454471

455472
today_state_stats = {}
456473
for var in ['thick', 'area_m2', 'volume_m3']:
@@ -480,7 +497,8 @@ def get_volume(fl):
480497
# how well do we match the future glacier evolution -------------------
481498
future_evol_mdl = diag_future
482499
fp = gdir.get_filepath('model_diagnostics',
483-
filesuffix='_agile_true_future')
500+
filesuffix='_agile_true_future_'
501+
f'{glacier_state}')
484502
with xr.open_dataset(fp) as ds_diag:
485503
future_evol_true = ds_diag.load()
486504

@@ -494,7 +512,7 @@ def get_volume(fl):
494512

495513
# save final default statistics as pickle
496514
out = os.path.join(gdir.dir,
497-
'default_oggm_statistics.pkl')
515+
f'default_oggm_statistics_{glacier_state}.pkl')
498516
with open(out, 'wb') as handle:
499517
pickle.dump(default_oggm_statistics, handle,
500518
protocol=pickle.HIGHEST_PROTOCOL)

0 commit comments

Comments
 (0)