Skip to content

Commit 4c27340

Browse files
authored
Merge pull request #9 from ClimateImpactLab/main_recipe_changes
Main recipe changes
2 parents 584583a + 578bd91 commit 4c27340

File tree

3 files changed

+135
-131
lines changed

3 files changed

+135
-131
lines changed

dscim/menu/main_recipe.py

Lines changed: 42 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,10 @@ def __init__(
161161
"discount_factors",
162162
"uncollapsed_sccs",
163163
"scc",
164+
"uncollapsed_discount_factors",
165+
"uncollapsed_marginal_damages",
166+
"global_consumption",
167+
"global_consumption_no_pulse",
164168
],
165169
**kwargs,
166170
):
@@ -285,6 +289,7 @@ def damage_function():
285289
def scc():
286290
damage_function()
287291
self.global_consumption
292+
self.global_consumption_no_pulse
288293
self.logger.info("Processing SCC calculation ...")
289294
if self.fit_type == "quantreg":
290295
self.full_uncertainty_iqr
@@ -293,6 +298,8 @@ def scc():
293298
self.stream_discount_factors
294299
self.calculate_scc
295300
self.uncollapsed_sccs
301+
self.uncollapsed_marginal_damages
302+
self.uncollapsed_discount_factors
296303

297304
course_dict = {"damage_function": damage_function, "scc": scc}
298305

@@ -1173,7 +1180,7 @@ def calculate_discount_factors(self, cons_pc):
11731180
rhos = xr.DataArray(self.rho, coords=[cons_pc.year])
11741181
else:
11751182
# plug the unique rho in an array, and compute e^rho - 1
1176-
rhos = xr.ufuncs.expm1(xr.DataArray(self.rho, coords=[cons_pc.year]))
1183+
rhos = np.expm1(xr.DataArray(self.rho, coords=[cons_pc.year]))
11771184

11781185
stream_rhos = np.divide(
11791186
1, np.multiply.accumulate((rhos.values + 1), rhos.dims.index("year"))
@@ -1309,6 +1316,40 @@ def stream_discount_factors(self):
13091316
fair_aggregation=self.fair_aggregation,
13101317
)
13111318

1319+
@cachedproperty
1320+
@save("uncollapsed_discount_factors")
1321+
def uncollapsed_discount_factors(self):
1322+
pop = self.collapsed_pop.sum("region")
1323+
pop = pop.reindex(
1324+
year=range(pop.year.min().values, self.ext_end_year + 1),
1325+
method="ffill",
1326+
)
1327+
f = self.calculate_discount_factors(
1328+
self.global_consumption_no_pulse / pop
1329+
).to_dataset(name="discount_factor")
1330+
for var in f.variables:
1331+
f[var].encoding.clear()
1332+
1333+
return f
1334+
1335+
@cachedproperty
1336+
@save("uncollapsed_marginal_damages")
1337+
def uncollapsed_marginal_damages(self):
1338+
1339+
md = (
1340+
(
1341+
(self.global_consumption_no_pulse - self.global_consumption_pulse)
1342+
* self.climate.conversion
1343+
)
1344+
.rename("marginal_damages")
1345+
.to_dataset()
1346+
)
1347+
1348+
for var in md.variables:
1349+
md[var].encoding.clear()
1350+
1351+
return md
1352+
13121353
def ce(self, obj, dims):
13131354
"""Rechunk data appropriately and apply the certainty equivalence
13141355
calculation. This is done in a loop to avoid memory crashes.

dscim/preprocessing/input_damages.py

Lines changed: 93 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from pathlib import Path
1515
from itertools import product
1616
from functools import partial
17-
from p_tqdm import p_map
17+
from p_tqdm import p_map, p_umap
1818
from dscim.menu.simple_storage import EconVars
1919
from zarr.errors import GroupNotFoundError
2020

@@ -220,13 +220,50 @@ def concatenate_labor_damages(
220220

221221
# save out
222222
if format_file == "zarr":
223-
concat_ds.to_zarr(f"{path_to_file}.zarr", consolidated=True, mode="w")
223+
to_store = concat_ds.copy()
224+
for var in to_store.variables:
225+
to_store[var].encoding.clear()
226+
227+
to_store.to_zarr(f"{path_to_file}.zarr", mode="w", consolidated=True)
224228
elif format_file == "netcdf":
225229
concat_ds.to_netcdf(f"{path_to_file}.nc4")
226230

227231
return concat_ds
228232

229233

234+
def calculate_labor_batch_damages(batch, ec, input_path, save_path):
235+
print(f"Processing batch={batch} damages in {os.getpid()}")
236+
concatenate_labor_damages(
237+
input_path=input_path,
238+
save_path=save_path,
239+
ec_cls=ec,
240+
variable="rebased",
241+
val_type="wage-levels",
242+
format_file="zarr",
243+
query=f"exists==True&batch=='batch{batch}'",
244+
)
245+
print("Saved!")
246+
247+
248+
def calculate_labor_damages(
249+
path_econ="/shares/gcp/integration/float32/dscim_input_data/econvars/zarrs/integration-econ-bc39.zarr",
250+
input_path="/shares/gcp/outputs/labor/impacts-woodwork/mc_correct_rebasing_for_integration",
251+
save_path="/shares/gcp/integration/float32/input_data_histclim/labor_data/replication/",
252+
):
253+
ec = EconVars(path_econ)
254+
# process in 3 rounds to limit memory usage
255+
for i in range(0, 3):
256+
partial_func = partial(
257+
calculate_labor_batch_damages,
258+
input_path=input_path,
259+
save_path=save_path,
260+
ec=ec,
261+
)
262+
print("Processing batches:")
263+
print(list(range(i * 5, i * 5 + 5)))
264+
p_umap(partial_func, list(range(i * 5, i * 5 + 5)))
265+
266+
230267
def compute_ag_damages(
231268
input_path,
232269
save_path,
@@ -561,13 +598,66 @@ def concatenate_energy_damages(
561598
logger.info(f"Concatenating and processing {i}")
562599

563600
if format_file == "zarr":
564-
concat_ds.to_zarr(f"{path_to_file}.zarr", consolidated=True, mode="w")
601+
to_store = concat_ds.copy()
602+
for var in to_store.variables:
603+
to_store[var].encoding.clear()
604+
605+
to_store.to_zarr(f"{path_to_file}.zarr", mode="w", consolidated=True)
565606
elif format_file == "netcdf":
566607
concat_ds.to_netcdf(f"{path_to_file}.nc4")
567608

568609
return concat_ds
569610

570611

612+
def calculate_energy_batch_damages(batch, ec, input_path, save_path):
613+
print(f"Processing batch={batch} damages in {os.getpid()}")
614+
concatenate_energy_damages(
615+
input_path=input_path,
616+
file_prefix="TINV_clim_integration_total_energy",
617+
save_path=save_path,
618+
ec_cls=ec,
619+
variable="rebased",
620+
format_file="zarr",
621+
query=f"exists==True&batch=='batch{batch}'",
622+
)
623+
print("Saved!")
624+
625+
626+
def calculate_energy_damages(
627+
re_calculate=True,
628+
path_econ="/shares/gcp/integration/float32/dscim_input_data/econvars/zarrs/integration-econ-bc39.zarr",
629+
input_path="/shares/gcp/outputs/energy_pixel_interaction/impacts-blueghost/integration_resampled",
630+
save_path="/shares/gcp/integration/float32/input_data_histclim/energy_data/replication_2022aug/",
631+
):
632+
ec = EconVars(path_econ)
633+
634+
if re_calculate:
635+
read_energy_files_parallel(
636+
input_path=input_path,
637+
save_path=save_path,
638+
ec_cls=ec,
639+
seed="TINV_clim_integration_total_energy_delta",
640+
)
641+
read_energy_files_parallel(
642+
input_path=input_path,
643+
save_path=save_path,
644+
ec_cls=ec,
645+
seed="TINV_clim_integration_total_energy_histclim",
646+
)
647+
648+
# process in 3 rounds to limit memory usage
649+
for i in range(0, 3):
650+
partial_func = partial(
651+
calculate_energy_batch_damages,
652+
input_path=input_path,
653+
save_path=save_path,
654+
ec=ec,
655+
)
656+
print("Processing batches:")
657+
print(list(range(i * 5, i * 5 + 5)))
658+
p_umap(partial_func, list(range(i * 5, i * 5 + 5)))
659+
660+
571661
def prep_mortality_damages(
572662
gcms,
573663
paths,

dscim/utils/menu_runs.py

Lines changed: 0 additions & 127 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,6 @@ def run_ssps(
3535
AR,
3636
masks=[None],
3737
fair_dims_list=[["simulation"]],
38-
global_cons=False,
39-
factors=False,
40-
marginal_damages=False,
4138
order="damage_function",
4239
):
4340

@@ -109,67 +106,6 @@ def run_ssps(
109106
menu_item = MENU_OPTIONS[menu_option](**kwargs)
110107
menu_item.order_plate(order)
111108

112-
if global_cons:
113-
menu_item.global_consumption_no_pulse.to_netcdf(
114-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_global_consumption_no_pulse.nc4"
115-
)
116-
menu_item.global_consumption.to_netcdf(
117-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_global_consumption.nc4"
118-
)
119-
120-
if marginal_damages:
121-
md = (
122-
menu_item.global_consumption_no_pulse
123-
- menu_item.global_consumption_pulse
124-
) * menu_item.climate.conversion
125-
md = md.rename("marginal_damages").to_dataset()
126-
for var in md.variables:
127-
md[var].encoding.clear()
128-
md.chunk(
129-
{
130-
"discount_type": 1,
131-
"weitzman_parameter": 1,
132-
"ssp": 1,
133-
"model": 1,
134-
"gas": 1,
135-
"year": 10,
136-
}
137-
).to_zarr(
138-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_uncollapsed_marginal_damages.zarr",
139-
consolidated=True,
140-
mode="w",
141-
)
142-
143-
if factors:
144-
145-
# holding population constant
146-
# from 2100 to 2300 with 2099 values
147-
pop = menu_item.collapsed_pop.sum("region")
148-
pop = pop.reindex(
149-
year=range(pop.year.min().values, menu_item.ext_end_year + 1),
150-
method="ffill",
151-
)
152-
153-
df = menu_item.calculate_discount_factors(
154-
menu_item.global_consumption_no_pulse / pop
155-
).to_dataset(name="discount_factor")
156-
for var in df.variables:
157-
df[var].encoding.clear()
158-
df.chunk(
159-
{
160-
"discount_type": 1,
161-
"weitzman_parameter": 1,
162-
"ssp": 1,
163-
"model": 1,
164-
# "gas":1,
165-
"year": 10,
166-
}
167-
).to_zarr(
168-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_uncollapsed_discount_factors.zarr",
169-
consolidated=True,
170-
mode="w",
171-
)
172-
173109

174110
def run_rff(
175111
sectors,
@@ -179,8 +115,6 @@ def run_rff(
179115
config,
180116
USA,
181117
global_cons=True,
182-
factors=True,
183-
marginal_damages=True,
184118
order="scc",
185119
):
186120

@@ -228,64 +162,3 @@ def run_rff(
228162

229163
menu_item = MENU_OPTIONS[menu_option](**kwargs)
230164
menu_item.order_plate(order)
231-
232-
if global_cons:
233-
menu_item.global_consumption_no_pulse.to_netcdf(
234-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_global_consumption_no_pulse.nc4"
235-
)
236-
menu_item.global_consumption.to_netcdf(
237-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_global_consumption.nc4"
238-
)
239-
240-
if marginal_damages:
241-
md = (
242-
(
243-
(
244-
menu_item.global_consumption_no_pulse
245-
- menu_item.global_consumption_pulse
246-
)
247-
* menu_item.climate.conversion
248-
)
249-
.rename("marginal_damages")
250-
.to_dataset()
251-
)
252-
253-
for var in md.variables:
254-
md[var].encoding.clear()
255-
256-
md.chunk(
257-
{
258-
"discount_type": 1,
259-
"weitzman_parameter": 14,
260-
"runid": 10000,
261-
"gas": 1,
262-
"year": 10,
263-
}
264-
).to_zarr(
265-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_uncollapsed_marginal_damages.zarr",
266-
consolidated=True,
267-
mode="w",
268-
)
269-
if factors:
270-
271-
f = menu_item.calculate_discount_factors(
272-
menu_item.global_consumption_no_pulse / menu_item.pop
273-
).to_dataset(name="discount_factor")
274-
275-
for var in f.variables:
276-
f[var].encoding.clear()
277-
278-
f.chunk(
279-
{
280-
"discount_type": 1,
281-
"weitzman_parameter": 14,
282-
"runid": 10000,
283-
"gas": 1,
284-
"region": 1,
285-
"year": 10,
286-
}
287-
).to_zarr(
288-
f"{save_path}/{menu_option}_{discount_type}_eta{menu_item.eta}_rho{menu_item.rho}_uncollapsed_discount_factors.zarr",
289-
consolidated=True,
290-
mode="w",
291-
)

0 commit comments

Comments
 (0)