Skip to content

Commit 26b6ace

Browse files
authored
Merge pull request #1107 from CLIMADA-project/feature/update_unsequa_tuto
Update unsequa tutos
2 parents ecee22b + 1fc1055 commit 26b6ace

File tree

3 files changed

+65
-67
lines changed

3 files changed

+65
-67
lines changed

climada/engine/unsequa/input_var.py

Lines changed: 16 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -246,9 +246,8 @@ def haz(haz_list, n_ev=None, bounds_int=None, bounds_frac=None, bounds_freq=None
246246
The frequency of all events is multiplied by a number
247247
sampled uniformly from a distribution with (min, max) = bounds_freq
248248
HL: sample uniformly from hazard list
249-
From the provided list of hazard is elements are uniformly
250-
sampled. For example, Hazards outputs from dynamical models
251-
for different input factors.
249+
For each sample, one element is drawn uniformly from the provided list of hazards.
250+
For example, Hazards outputs from dynamical models for different input factors.
252251
253252
If a bounds is None, this parameter is assumed to have no uncertainty.
254253
@@ -310,8 +309,8 @@ def exp(exp_list, bounds_totval=None, bounds_noise=None):
310309
with (min, max) = bounds_noise. EN is the value of the seed
311310
for the uniform random number generator.
312311
EL: sample uniformly from exposure list
313-
From the provided list of exposure is elements are uniformly
314-
sampled. For example, LitPop instances with different exponents.
312+
For each sample, one element is drawn uniformly from the provided list of exposures.
313+
For example, LitPop instances with different exponents.
315314
316315
If a bounds is None, this parameter is assumed to have no uncertainty.
317316
@@ -376,9 +375,8 @@ def impfset(
376375
sampled uniformly from a distribution with
377376
(min, max) = bounds_int
378377
IL: sample uniformly from impact function set list
379-
From the provided list of impact function sets elements are uniformly
380-
sampled. For example, impact functions obtained from different
381-
calibration methods.
378+
For each sample, one element is drawn uniformly from the provided list of impact function sets.
379+
For example, impact functions obtained from different calibration methods.
382380
383381
384382
If a bounds is None, this parameter is assumed to have no uncertainty.
@@ -468,8 +466,8 @@ def ent(
468466
with (min, max) = bounds_noise. EN is the value of the seed
469467
for the uniform random number generator.
470468
EL: sample uniformly from exposure list
471-
From the provided list of exposure is elements are uniformly
472-
sampled. For example, LitPop instances with different exponents.
469+
For each sample, one element is drawn uniformly from the provided list of exposures.
470+
For example, LitPop instances with different exponents.
473471
MDD: scale the mdd (homogeneously)
474472
The value of mdd at each intensity is multiplied by a number
475473
sampled uniformly from a distribution with
@@ -483,9 +481,8 @@ def ent(
483481
sampled uniformly from a distribution with
484482
(min, max) = bounds_int
485483
IL: sample uniformly from impact function set list
486-
From the provided list of impact function sets elements are uniformly
487-
sampled. For example, impact functions obtained from different
488-
calibration methods.
484+
For each sample, one element is drawn uniformly from the provided list of impact function sets.
485+
For example, impact functions obtained from different calibration methods.
489486
490487
If a bounds is None, this parameter is assumed to have no uncertainty.
491488
@@ -566,7 +563,7 @@ def ent(
566563
bounds_noise=bounds_noise,
567564
exp_list=exp_list,
568565
meas_set=meas_set,
569-
**kwargs
566+
**kwargs,
570567
),
571568
_ent_unc_dict(
572569
bounds_totval=bounds_totval,
@@ -616,8 +613,8 @@ def entfut(
616613
with (min, max) = bounds_noise. EN is the value of the seed
617614
for the uniform random number generator.
618615
EL: sample uniformly from exposure list
619-
From the provided list of exposure is elements are uniformly
620-
sampled. For example, LitPop instances with different exponents.
616+
For each sample, one element is drawn uniformly from the provided list of exposures.
617+
For example, LitPop instances with different exponents.
621618
MDD: scale the mdd (homogeneously)
622619
The value of mdd at each intensity is multiplied by a number
623620
sampled uniformly from a distribution with
@@ -631,9 +628,8 @@ def entfut(
631628
sampled uniformly from a distribution with
632629
(min, max) = bounds_impfi
633630
IL: sample uniformly from impact function set list
634-
From the provided list of impact function sets elements are uniformly
635-
sampled. For example, impact functions obtained from different
636-
calibration methods.
631+
For each sample, one element is drawn uniformly from the provided list of impact function sets.
632+
For example, impact functions obtained from different calibration methods.
637633
638634
If a bounds is None, this parameter is assumed to have no uncertainty.
639635
@@ -706,7 +702,7 @@ def entfut(
706702
impf_set_list=impf_set_list,
707703
exp_list=exp_list,
708704
meas_set=meas_set,
709-
**kwargs
705+
**kwargs,
710706
),
711707
_entfut_unc_dict(
712708
bounds_eg=bounds_eg,

doc/user-guide/climada_engine_unsequa.ipynb

Lines changed: 36 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"cell_type": "markdown",
1313
"metadata": {},
1414
"source": [
15-
"This is a tutorial for the unsequa module in CLIMADA. A detailled description can be found in [Kropf (2021)](https://eartharxiv.org/repository/view/3123/)."
15+
"This is a tutorial for the unsequa module in CLIMADA. A detailled description can be found in [Kropf et al. (2022)](https://doi.org/10.5194/gmd-15-7177-2022)."
1616
]
1717
},
1818
{
@@ -31,7 +31,7 @@
3131
"\n",
3232
"In this module, it is possible to perform global uncertainty analysis, as well as a sensitivity analysis. The word global is meant as opposition to the 'one-factor-at-a-time' (OAT) strategy. The OAT strategy, which consists in analyzing the effect of varying one model input factor at a time while keeping all other fixed, is popular among modellers, but has major shortcomings [Saltelli (2010)](https://www.sciencedirect.com/science/article/abs/pii/S1364815210001180), [Saltelli(2019)](http://www.sciencedirect.com/science/article/pii/S1364815218302822) and should not be used.\n",
3333
"\n",
34-
"A rough schemata of how to perform uncertainty and sensitivity analysis (taken from [Kropf(2021)](https://eartharxiv.org/repository/view/3123/))"
34+
"A rough schemata of how to perform uncertainty and sensitivity analysis (taken from [Kropf et al. (2022)](https://doi.org/10.5194/gmd-15-7177-2022)."
3535
]
3636
},
3737
{
@@ -50,7 +50,7 @@
5050
"cell_type": "markdown",
5151
"metadata": {},
5252
"source": [
53-
"1. [Kropf, C.M. et al. Uncertainty and sensitivity analysis for global probabilistic weather and climate risk modelling: an implementation in the CLIMADA platform (2021)](https://eartharxiv.org/repository/view/3123/)\n",
53+
"1. [Kropf, C.M. et al. Uncertainty and sensitivity analysis for probabilistic weather and climate-risk modelling: an implementation in CLIMADA v.3.1.0. Geoscientific Model Development, 15, 7177–7201 (2022)](https://doi.org/10.5194/gmd-15-7177-2022).\n",
5454
"2. [Pianosi, F. et al. Sensitivity analysis of environmental models: A systematic review with practical workflow. Environmental Modelling & Software 79, 214–232 (2016)](https://www.sciencedirect.com/science/article/pii/S1364815216300287).\n",
5555
"3.[Douglas-Smith, D., Iwanaga, T., Croke, B. F. W. & Jakeman, A. J. Certain trends in uncertainty and sensitivity analysis: An overview of software tools and techniques. Environmental Modelling & Software 124, 104588 (2020)](https://doi.org/10.1007/978-1-4899-7547-8_5)\n",
5656
"4. [Knüsel, B. Epistemological Issues in Data-Driven Modeling in Climate Research. (ETH Zurich, 2020)](https://www.research-collection.ethz.ch/handle/20.500.11850/399735)\n",
@@ -542,12 +542,12 @@
542542
"source": [
543543
"| Attribute | Type | Description |\n",
544544
"| --- | --- | --- |\n",
545-
"| sampling_method | str | The sampling method as defined in [SALib](https://salib.readthedocs.io/en/latest/api.html). Possible choices: 'saltelli', 'fast_sampler', 'latin', 'morris', 'dgsm', 'ff'|\n",
545+
"| sampling_method | str | The sampling method as defined in [SALib](https://salib.readthedocs.io/en/latest/api.html). Possible choices: 'saltelli', 'fast_sampler', 'latin', 'morris', 'dgsm', 'ff', 'finite_diff'|\n",
546546
"| sampling_kwargs | dict | Keyword arguments for the sampling_method. |\n",
547547
"| n_samples | int | Effective number of samples (number of rows of samples_df)|\n",
548548
"| param_labels | list(str) | Name of all the uncertainty input parameters|\n",
549549
"| problem_sa | dict | The description of the uncertainty variables and their distribution as used in [SALib](https://salib.readthedocs.io/en/latest/basics.html). |\n",
550-
"| sensitivity_method | str | Sensitivity analysis method from [SALib.analyse](https://salib.readthedocs.io/en/latest/api.html) Possible choices: 'fast', 'rbd_fact', 'morris', 'sobol', 'delta', 'ff'. Note that in Salib, sampling methods and sensitivity analysis methods should be used in specific pairs.|\n",
550+
"| sensitivity_method | str | Sensitivity analysis method from [SALib.analyse](https://salib.readthedocs.io/en/latest/api.html) Possible choices: 'sobol', 'fast', 'rbd_fast', 'morris', 'dgsm', 'ff', 'pawn', 'rhdm', 'rsa', 'discrepancy', 'hdmr'. Note that in Salib, sampling methods and sensitivity analysis methods should be used in specific pairs.|\n",
551551
"| sensitivity_kwargs | dict | Keyword arguments for sensitivity_method. |\n",
552552
"| unit | str | Unit of the exposures value |"
553553
]
@@ -2466,7 +2466,7 @@
24662466
},
24672467
{
24682468
"cell_type": "code",
2469-
"execution_count": 51,
2469+
"execution_count": null,
24702470
"metadata": {},
24712471
"outputs": [],
24722472
"source": [
@@ -2475,10 +2475,10 @@
24752475
"haz.basin = [\"NA\"] * haz.size\n",
24762476
"\n",
24772477
"# apply climate change factors\n",
2478-
"haz_26 = haz.apply_climate_scenario_knu(ref_year=2050, rcp_scenario=26)\n",
2479-
"haz_45 = haz.apply_climate_scenario_knu(ref_year=2050, rcp_scenario=45)\n",
2480-
"haz_60 = haz.apply_climate_scenario_knu(ref_year=2050, rcp_scenario=60)\n",
2481-
"haz_85 = haz.apply_climate_scenario_knu(ref_year=2050, rcp_scenario=85)\n",
2478+
"haz_26 = haz.apply_climate_scenario_knu(target_year=2050, scenario=\"2.6\")\n",
2479+
"haz_45 = haz.apply_climate_scenario_knu(target_year=2050, scenario=\"4.5\")\n",
2480+
"haz_60 = haz.apply_climate_scenario_knu(target_year=2050, scenario=\"6.0\")\n",
2481+
"haz_85 = haz.apply_climate_scenario_knu(target_year=2050, scenario=\"8.5\")\n",
24822482
"\n",
24832483
"# pack future hazard sets into dictionary - we want to sample from this dictionary later\n",
24842484
"haz_fut_list = [haz_26, haz_45, haz_60, haz_85]\n",
@@ -2489,7 +2489,7 @@
24892489
},
24902490
{
24912491
"cell_type": "code",
2492-
"execution_count": 52,
2492+
"execution_count": null,
24932493
"metadata": {},
24942494
"outputs": [],
24952495
"source": [
@@ -2501,7 +2501,7 @@
25012501
"\n",
25022502
"def exp_base_func(x_exp, exp_base):\n",
25032503
" exp = exp_base.copy()\n",
2504-
" exp.gdf[\"value\"] *= x_exp\n",
2504+
" exp.data[\"value\"] *= x_exp\n",
25052505
" return exp\n",
25062506
"\n",
25072507
"\n",
@@ -2821,7 +2821,7 @@
28212821
},
28222822
{
28232823
"cell_type": "code",
2824-
"execution_count": 61,
2824+
"execution_count": null,
28252825
"metadata": {
28262826
"ExecuteTime": {
28272827
"end_time": "2023-08-03T12:00:12.180767Z",
@@ -2844,7 +2844,7 @@
28442844
"\n",
28452845
" entity = Entity.from_excel(ENT_DEMO_TODAY)\n",
28462846
" entity.exposures.ref_year = 2018\n",
2847-
" entity.exposures.gdf[\"value\"] *= x_ent\n",
2847+
" entity.exposures.data[\"value\"] *= x_ent\n",
28482848
" return entity\n",
28492849
"\n",
28502850
"\n",
@@ -2954,7 +2954,7 @@
29542954
},
29552955
{
29562956
"cell_type": "code",
2957-
"execution_count": 64,
2957+
"execution_count": null,
29582958
"metadata": {
29592959
"ExecuteTime": {
29602960
"end_time": "2023-08-03T12:00:12.959984Z",
@@ -3070,7 +3070,7 @@
30703070
],
30713071
"source": [
30723072
"ent_avg = ent_today_iv.evaluate()\n",
3073-
"ent_avg.exposures.gdf.head()"
3073+
"ent_avg.exposures.data.head()"
30743074
]
30753075
},
30763076
{
@@ -5320,7 +5320,7 @@
53205320
},
53215321
{
53225322
"cell_type": "code",
5323-
"execution_count": 77,
5323+
"execution_count": null,
53245324
"metadata": {},
53255325
"outputs": [],
53265326
"source": [
@@ -5335,7 +5335,7 @@
53355335
"\n",
53365336
"def exp_func(cnt, x_exp, exp_list=exp_list):\n",
53375337
" exp = exp_list[int(cnt)].copy()\n",
5338-
" exp.gdf[\"value\"] *= x_exp\n",
5338+
" exp.data[\"value\"] *= x_exp\n",
53395339
" return exp\n",
53405340
"\n",
53415341
"\n",
@@ -5523,7 +5523,7 @@
55235523
"source": [
55245524
"Loading Hazards or Exposures from file is a rather lengthy operation. Thus, we want to minimize the reading operations, ideally reading each file only once. Simultaneously, Hazard and Exposures can be large in memory, and thus we would like to have at most one of each loaded at a time. Thus, we do not want to use the list capacity from the helper method InputVar.exposures and InputVar.hazard.\n",
55255525
"\n",
5526-
"For demonstration purposes, we will use below as exposures files the litpop for three countries, and for tha hazard files the winter storms for the same three countries. Note that this does not make a lot of sense for an uncertainty analysis. For your use case, please replace the set of exposures and/or hazard files with meaningful sets, for instance sets of exposures for different resolutions or hazards for different model runs.\n"
5526+
"For demonstration purposes, we will use below as exposures files the litpop for three countries, and for the hazard files the winter storms for the same three countries. Note that this does not make a lot of sense for an uncertainty analysis. For your use case, please replace the set of exposures and/or hazard files with meaningful sets, for instance sets of exposures for different resolutions or hazards for different model runs.\n"
55275527
]
55285528
},
55295529
{
@@ -5600,17 +5600,18 @@
56005600
"def exp_func(f_exp, x_exp, filename_list=f_exp_list):\n",
56015601
" filename = filename_list[int(f_exp)]\n",
56025602
" global exp_base\n",
5603-
" if \"exp_base\" in globals():\n",
5604-
" if isinstance(exp_base, Exposures):\n",
5605-
" if exp_base.gdf[\"filename\"] != str(filename):\n",
5606-
" exp_base = Exposures.from_hdf5(filename)\n",
5607-
" exp_base.gdf[\"filename\"] = str(filename)\n",
5603+
" if (\n",
5604+
" \"exp_base\" in globals()\n",
5605+
" and isinstance(exp_base, Exposures)\n",
5606+
" and exp_base.description == str(filename)\n",
5607+
" ):\n",
5608+
" pass # if correct file is already loaded in memory, we do not need to reload it\n",
56085609
" else:\n",
56095610
" exp_base = Exposures.from_hdf5(filename)\n",
5610-
" exp_base.gdf[\"filename\"] = str(filename)\n",
5611+
" exp_base.description = str(filename)\n",
56115612
"\n",
56125613
" exp = exp_base.copy()\n",
5613-
" exp.gdf[\"value\"] *= x_exp\n",
5614+
" exp.data[\"value\"] *= x_exp\n",
56145615
" return exp\n",
56155616
"\n",
56165617
"\n",
@@ -5624,14 +5625,16 @@
56245625
"def haz_func(f_haz, i_haz, filename_list=f_haz_list):\n",
56255626
" filename = filename_list[int(f_haz)]\n",
56265627
" global haz_base\n",
5627-
" if \"haz_base\" in globals():\n",
5628-
" if isinstance(haz_base, Hazard):\n",
5629-
" if haz_base.filename != str(filename):\n",
5630-
" haz_base = Hazard.from_hdf5(filename)\n",
5631-
" haz_base.filename = str(filename)\n",
5628+
" if (\n",
5629+
" \"haz_base\" in globals()\n",
5630+
" and isinstance(haz_base, Hazard)\n",
5631+
" and hasattr(haz_base, \"description\")\n",
5632+
" and haz_base.description == str(filename)\n",
5633+
" ):\n",
5634+
" pass\n",
56325635
" else:\n",
56335636
" haz_base = Hazard.from_hdf5(filename)\n",
5634-
" haz_base.filename = str(filename)\n",
5637+
" setattr(haz_base, \"description\", str(filename))\n",
56355638
"\n",
56365639
" haz = copy.deepcopy(haz_base)\n",
56375640
" haz.intensity *= i_haz\n",
@@ -5707,7 +5710,7 @@
57075710
"source": [
57085711
"# Ordering of the samples by hazard first and exposures second\n",
57095712
"output_imp = calc_imp.make_sample(N=2**2, sampling_kwargs={\"skip_values\": 2**3})\n",
5710-
"output_imp.order_samples(by=[\"f_haz\", \"f_exp\"])"
5713+
"output_imp.order_samples(by_parameters=[\"f_haz\", \"f_exp\"])"
57115714
]
57125715
},
57135716
{

0 commit comments

Comments
 (0)