Skip to content

Commit ec2948b

Browse files
Merge branch 'forecast-class' into add_forecast_tutorial
2 parents 6e25208 + d84db1e commit ec2948b

File tree

5 files changed

+325
-7
lines changed

5 files changed

+325
-7
lines changed

climada/engine/impact.py

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1431,6 +1431,8 @@ def write_attribute(group, name, value):
14311431

14321432
def write_dataset(group, name, value):
14331433
"""Write a dataset"""
1434+
if name == "lead_time":
1435+
value = value.astype("timedelta64[ns]").astype("int64")
14341436
group.create_dataset(name, data=value, dtype=_str_type_helper(value))
14351437

14361438
def write_dict(group, name, value):
@@ -1618,7 +1620,9 @@ def read_excel(self, *args, **kwargs):
16181620
self.__dict__ = Impact.from_excel(*args, **kwargs).__dict__
16191621

16201622
@classmethod
1621-
def from_hdf5(cls, file_path: Union[str, Path]):
1623+
def from_hdf5(
1624+
cls, file_path: Union[str, Path], *, add_scalar_attrs=None, add_array_attrs=None
1625+
):
16221626
"""Create an impact object from an H5 file.
16231627
16241628
This assumes a specific layout of the file. If values are not found in the
@@ -1663,6 +1667,10 @@ def from_hdf5(cls, file_path: Union[str, Path]):
16631667
----------
16641668
file_path : str or Path
16651669
The file path of the file to read.
1670+
add_scalar_attrs : Iterable of str, optional
1671+
Scalar attributes to read from file. Defaults to None.
1672+
add_array_attrs : Iterable of str, optional
1673+
Array attributes to read from file. Defaults to None.
16661674
16671675
Returns
16681676
-------
@@ -1691,17 +1699,27 @@ def from_hdf5(cls, file_path: Union[str, Path]):
16911699
# Scalar attributes
16921700
scalar_attrs = set(
16931701
("crs", "tot_value", "unit", "aai_agg", "frequency_unit", "haz_type")
1694-
).intersection(file.attrs.keys())
1702+
)
1703+
if add_scalar_attrs is not None:
1704+
scalar_attrs = scalar_attrs.union(add_scalar_attrs)
1705+
scalar_attrs = scalar_attrs.intersection(file.attrs.keys())
16951706
kwargs.update({attr: file.attrs[attr] for attr in scalar_attrs})
16961707

16971708
# Array attributes
16981709
# NOTE: Need [:] to copy array data. Otherwise, it would be a view that is
16991710
# invalidated once we close the file.
17001711
array_attrs = set(
17011712
("event_id", "date", "coord_exp", "eai_exp", "at_event", "frequency")
1702-
).intersection(file.keys())
1713+
)
1714+
if add_array_attrs is not None:
1715+
array_attrs = array_attrs.union(add_array_attrs)
1716+
array_attrs = array_attrs.intersection(file.keys())
17031717
kwargs.update({attr: file[attr][:] for attr in array_attrs})
1704-
1718+
# correct lead_time attribut to timedelta
1719+
if "lead_time" in kwargs:
1720+
kwargs["lead_time"] = np.array(file["lead_time"][:]).astype(
1721+
"timedelta64[ns]"
1722+
)
17051723
# Special handling for 'event_name' because it should be a list of strings
17061724
if "event_name" in file:
17071725
# pylint: disable=no-member

climada/engine/impact_forecast.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020
"""
2121

2222
import logging
23+
from pathlib import Path
24+
from typing import Union
2325

2426
import numpy as np
2527
import scipy.sparse as sparse
@@ -173,6 +175,62 @@ def calc_freq_curve(self, return_per=None):
173175
LOGGER.error("calc_freq_curve is not defined for ImpactForecast")
174176
raise NotImplementedError("calc_freq_curve is not defined for ImpactForecast")
175177

178+
@classmethod
179+
def from_hdf5(cls, file_path: Union[str, Path]):
180+
"""Create an ImpactForecast object from an H5 file.
181+
182+
This assumes a specific layout of the file. If values are not found in the
183+
expected places, they will be set to the default values for an ``Impact`` object.
184+
185+
The following H5 file structure is assumed (H5 groups are terminated with ``/``,
186+
attributes are denoted by ``.attrs/``)::
187+
188+
file.h5
189+
├─ at_event
190+
├─ coord_exp
191+
├─ eai_exp
192+
├─ event_id
193+
├─ event_name
194+
├─ frequency
195+
├─ imp_mat
196+
├─ lead_time
197+
├─ member
198+
├─ .attrs/
199+
│ ├─ aai_agg
200+
│ ├─ crs
201+
│ ├─ frequency_unit
202+
│ ├─ haz_type
203+
│ ├─ tot_value
204+
│ ├─ unit
205+
206+
As per the :py:func:`climada.engine.impact.Impact.__init__`, any of these entries
207+
is optional. If it is not found, the default value will be used when constructing
208+
the Impact.
209+
210+
The impact matrix ``imp_mat`` can either be an H5 dataset, in which case it is
211+
interpreted as dense representation of the matrix, or an H5 group, in which case
212+
the group is expected to contain the following data for instantiating a
213+
`scipy.sparse.csr_matrix <https://docs.scipy.org/doc/scipy/reference/generated/scipy.sparse.csr_matrix.html>`_::
214+
215+
imp_mat/
216+
├─ data
217+
├─ indices
218+
├─ indptr
219+
├─ .attrs/
220+
│ ├─ shape
221+
222+
Parameters
223+
----------
224+
file_path : str or Path
225+
The file path of the file to read.
226+
227+
Returns
228+
-------
229+
imp : ImpactForecast
230+
ImpactForecast with data from the given file
231+
"""
232+
return super().from_hdf5(file_path, add_array_attrs={"member", "lead_time"})
233+
176234
def _check_sizes(self):
177235
"""Check sizes of forecast data vs. impact data.
178236
@@ -354,3 +412,56 @@ def select(
354412
coord_exp=coord_exp,
355413
reset_frequency=reset_frequency,
356414
)
415+
416+
def _quantile(self, q: float, event_name: str | None = None):
417+
"""
418+
Reduce the impact matrix and at_event of an ImpactForecast to the quantile value.
419+
"""
420+
red_imp_mat = sparse.csr_matrix(np.quantile(self.imp_mat.toarray(), q, axis=0))
421+
red_at_event = np.array([red_imp_mat.sum()])
422+
if event_name is None:
423+
event_name = f"quantile_{q}"
424+
return ImpactForecast(
425+
frequency_unit=self.frequency_unit,
426+
coord_exp=self.coord_exp,
427+
crs=self.crs,
428+
eai_exp=self.eai_exp,
429+
at_event=red_at_event,
430+
tot_value=self.tot_value,
431+
aai_agg=self.aai_agg,
432+
unit=self.unit,
433+
imp_mat=red_imp_mat,
434+
haz_type=self.haz_type,
435+
**self._reduce_attrs(event_name),
436+
)
437+
438+
def quantile(self, q: float):
439+
"""
440+
Reduce the impact matrix and at_event of an ImpactForecast to the quantile value.
441+
442+
Parameters
443+
----------
444+
q : float
445+
The quantile to compute, which must be between 0 and 1.
446+
447+
Returns
448+
-------
449+
ImpactForecast
450+
An ImpactForecast object with the quantile impact matrix and at_event.
451+
"""
452+
return self._quantile(q=q)
453+
454+
def median(self):
455+
"""
456+
Reduce the impact matrix and at_event of an ImpactForecast to the median value.
457+
458+
Parameters
459+
----------
460+
None
461+
462+
Returns
463+
-------
464+
ImpactForecast
465+
An ImpactForecast object with the median impact matrix and at_event.
466+
"""
467+
return self._quantile(q=0.5, event_name="median")

climada/engine/test/test_impact_forecast.py

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,36 @@ def test_impact_forecast_blocked_methods(impact_forecast):
253253
impact_forecast.calc_freq_curve(np.array([10, 50, 100]))
254254

255255

256+
@pytest.mark.parametrize("dense", [True, False])
257+
def test_write_read_hdf5(impact_forecast, tmp_path, dense):
258+
259+
file_name = tmp_path / "test_hazard_forecast.h5"
260+
# replace dummy_impact event_names with strings
261+
impact_forecast.event_name = [str(name) for name in impact_forecast.event_name]
262+
impact_forecast.write_hdf5(file_name, dense_imp_mat=dense)
263+
264+
def compare_attr(obj, attr):
265+
actual = getattr(obj, attr)
266+
expected = getattr(impact_forecast, attr)
267+
if isinstance(actual, csr_matrix):
268+
npt.assert_array_equal(actual.todense(), expected.todense())
269+
else:
270+
npt.assert_array_equal(actual, expected)
271+
272+
# Read ImpactForecast
273+
impact_forecast_read = ImpactForecast.from_hdf5(file_name)
274+
assert impact_forecast_read.lead_time.dtype.kind == np.dtype("timedelta64").kind
275+
for attr in impact_forecast.__dict__.keys():
276+
compare_attr(impact_forecast_read, attr)
277+
278+
# Read Impact
279+
impact_read = Impact.from_hdf5(file_name)
280+
for attr in impact_read.__dict__.keys():
281+
compare_attr(impact_read, attr)
282+
assert "member" not in impact_read.__dict__
283+
assert "lead_time" not in impact_read.__dict__
284+
285+
256286
@pytest.fixture
257287
def impact_forecast_stats(impact_kwargs, lead_time, member):
258288
max_index = 4
@@ -288,3 +318,49 @@ def test_impact_forecast_min_mean_max(impact_forecast_stats, attr):
288318
npt.assert_array_equal(imp_fc_reduced.event_id, [0])
289319
npt.assert_array_equal(imp_fc_reduced.frequency, [1])
290320
npt.assert_array_equal(imp_fc_reduced.date, [0])
321+
322+
323+
@pytest.mark.parametrize("quantile", [0.3, 0.6, 0.8])
324+
def test_impact_forecast_quantile(impact_forecast, quantile):
325+
"""Check quantile method for ImpactForecast"""
326+
imp_fcst_quantile = impact_forecast.quantile(q=quantile)
327+
328+
# assert imp_mat
329+
npt.assert_array_equal(
330+
imp_fcst_quantile.imp_mat.toarray().squeeze(),
331+
np.quantile(impact_forecast.imp_mat.toarray(), quantile, axis=0),
332+
)
333+
# assert at_event
334+
npt.assert_array_equal(
335+
imp_fcst_quantile.at_event,
336+
np.quantile(impact_forecast.at_event, quantile, axis=0).sum(),
337+
)
338+
339+
# check that attributes where reduced correctly
340+
npt.assert_array_equal(imp_fcst_quantile.member, np.array([-1]))
341+
npt.assert_array_equal(
342+
imp_fcst_quantile.lead_time, np.array([np.timedelta64("NaT")])
343+
)
344+
npt.assert_array_equal(imp_fcst_quantile.event_id, np.array([0]))
345+
npt.assert_array_equal(
346+
imp_fcst_quantile.event_name, np.array([f"quantile_{quantile}"])
347+
)
348+
npt.assert_array_equal(imp_fcst_quantile.frequency, np.array([1]))
349+
npt.assert_array_equal(imp_fcst_quantile.date, np.array([0]))
350+
351+
352+
def test_median(impact_forecast):
353+
imp_fcst_median = impact_forecast.median()
354+
imp_fcst_quantile = impact_forecast.quantile(q=0.5)
355+
npt.assert_array_equal(
356+
imp_fcst_median.imp_mat.toarray(), imp_fcst_quantile.imp_mat.toarray()
357+
)
358+
npt.assert_array_equal(imp_fcst_median.imp_mat.toarray(), [[2.5, 2.5]])
359+
360+
# check that attributes where reduced correctly
361+
npt.assert_array_equal(imp_fcst_median.member, np.array([-1]))
362+
npt.assert_array_equal(imp_fcst_median.lead_time, np.array([np.timedelta64("NaT")]))
363+
npt.assert_array_equal(imp_fcst_median.event_id, np.array([0]))
364+
npt.assert_array_equal(imp_fcst_median.event_name, np.array(["median"]))
365+
npt.assert_array_equal(imp_fcst_median.frequency, np.array([1]))
366+
npt.assert_array_equal(imp_fcst_median.date, np.array([0]))

climada/hazard/forecast.py

Lines changed: 58 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -351,6 +351,49 @@ def select(
351351
reset_frequency=reset_frequency,
352352
)
353353

354+
def _quantile(self, q: float, event_name: str | None = None):
355+
"""
356+
Reduce the impact matrix and at_event of a HazardForecast to the quantile value.
357+
"""
358+
red_intensity = sparse.csr_matrix(
359+
np.quantile(self.intensity.toarray(), q, axis=0)
360+
)
361+
red_fraction = sparse.csr_matrix(
362+
np.quantile(self.fraction.toarray(), q, axis=0)
363+
)
364+
if event_name is None:
365+
event_name = f"quantile_{q}"
366+
return HazardForecast(
367+
haz_type=self.haz_type,
368+
pool=self.pool,
369+
units=self.units,
370+
centroids=self.centroids,
371+
frequency_unit=self.frequency_unit,
372+
intensity=red_intensity,
373+
fraction=red_fraction,
374+
**self._reduce_attrs(event_name),
375+
)
376+
377+
def quantile(self, q: float):
378+
"""
379+
Reduce the impact matrix and at_event of a HazardForecast to the quantile value.
380+
381+
The quantile value is computed by taking the quantile of the impact matrix
382+
along the event dimension axis (axis=0) and then taking the quantile of the
383+
resulting array.
384+
385+
Parameters
386+
----------
387+
q : float
388+
The quantile to compute, between 0 and 1.
389+
390+
Returns
391+
-------
392+
HazardForecast
393+
A HazardForecast object with the quantile intensity and fraction.
394+
"""
395+
return self._quantile(q=q)
396+
354397
@classmethod
355398
def from_xarray_raster(
356399
cls,
@@ -395,9 +438,7 @@ def from_xarray_raster(
395438
open_dataset_kws : dict, optional
396439
Keyword arguments passed to xarray.open_dataset if data is a file path
397440
398-
Returns
399-
-------
400-
HazardForecast
441+
401442
A forecast hazard object with lead_time and member attributes populated
402443
403444
See Also
@@ -471,3 +512,17 @@ def from_xarray_raster(
471512

472513
# Convert to HazardForecast with forecast attributes
473514
return cls(**Hazard._check_and_cast_attrs(kwargs))
515+
516+
def median(self):
517+
"""
518+
Reduce the impact matrix and at_event of a HazardForecast to the median value.
519+
520+
The median value is computed by taking the median of the impact matrix along the
521+
event dimension axis (axis=0) and then taking the median of the resulting array.
522+
523+
Returns
524+
-------
525+
HazardForecast
526+
A HazardForecast object with the median intensity and fraction.
527+
"""
528+
return self._quantile(q=0.5, event_name="median")

0 commit comments

Comments
 (0)