Skip to content

Commit 168e4d8

Browse files
committed
initial commit of storm_europe
1 parent b279b56 commit 168e4d8

File tree

2 files changed

+173
-0
lines changed

2 files changed

+173
-0
lines changed

climada/hazard/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@
66
from .tag import *
77
from .trop_cyclone import *
88
from .tc_tracks import *
9+
from .storm_europe import *

climada/hazard/storm_europe.py

Lines changed: 172 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,172 @@
1+
"""
2+
Define StormEurope class.
3+
"""
4+
5+
__all__ = ['StormEurope']
6+
7+
import logging
8+
import numpy as np
9+
import xarray as xr
10+
import pandas as pd
11+
from scipy import sparse
12+
13+
from climada.hazard.base import Hazard
14+
from climada.hazard.centroids.base import Centroids
15+
from climada.hazard.tag import Tag as TagHazard
16+
from climada.util.files_handler import get_file_names, to_list
17+
from climada.util.constants import WISC_CENTROIDS
18+
19+
LOGGER = logging.getLogger(__name__)
20+
21+
HAZ_TYPE = 'WS'
22+
""" Hazard type acronym for Winter Storm """
23+
24+
25+
class StormEurope(Hazard):
26+
"""Contains european winter storm events.
27+
28+
Attributes:
29+
ssi (float): Storm Severity Index, as recorded in the footprint
30+
files; this is _not_ the same as that computed by the Matlab
31+
climada version.
32+
cf. Lamb and Frydendahl (1991)
33+
"Historic Storms of the North Sea, British Isles and
34+
Northwest Europe", ISBN: 978-0-521-37522-1
35+
SSI = v [m/s] ^ 3 * duration [h] * area [km^2 or m^2]
36+
"""
37+
intensity_thres = 15
38+
""" intensity threshold for storage in m/s """
39+
40+
vars_opt = Hazard.vars_opt.union({'ssi'})
41+
"""Name of the variables that aren't need to compute the impact."""
42+
43+
def __init__(self):
44+
"""Empty constructor. """
45+
Hazard.__init__(self, HAZ_TYPE)
46+
self.ssi = np.array([], int)
47+
48+
def read_footprints(self, path, description=None,
49+
ref_raster=None, centroids=None,
50+
files_omit='fp_era20c_1990012515_701_0.nc'):
51+
"""Clear instance and read WISC footprints. Read Assumes that all
52+
footprints have the same coordinates as the first file listed/first
53+
file in dir.
54+
55+
Parameters:
56+
path (str, list(str)): A location in the filesystem. Either a
57+
path to a single netCDF WISC footprint, or a folder containing
58+
only footprints, or a globbing pattern to one or more
59+
footprints.
60+
description (str, optional): description of the events, defaults to
61+
'WISC historical hazard set'
62+
ref_raster (str, optional): Reference netCDF file from which to
63+
construct a new barebones Centroids instance. Defaults to the
64+
first file in path.
65+
centroids (Centroids, optional): A Centroids struct, overriding
66+
ref_raster
67+
files_omit (str, list(str), optional): List of files to omit;
68+
defaults to one duplicate storm present in the WISC set as of
69+
2018-09-10.
70+
"""
71+
72+
self.clear()
73+
74+
file_names = get_file_names(path)
75+
76+
if ref_raster is not None and centroids is None:
77+
centroids = self._centroids_from_nc(ref_raster)
78+
elif ref_raster is not None and centroids is not None:
79+
LOGGER.warning('Overriding ref_raster with centroids')
80+
else:
81+
centroids = self._centroids_from_nc(file_name[0])
82+
83+
files_omit = to_list(files_omit)
84+
85+
for fn in file_names:
86+
if any(fo in fn for fo in files_omit):
87+
LOGGER.info("Omitting file %s", fn)
88+
continue
89+
self.append(self._read_one_nc(fn, centroids))
90+
91+
self.tag = TagHazard(
92+
HAZ_TYPE, 'Hazard set not saved, too large to pickle',
93+
description='WISC historical hazard set.'
94+
)
95+
if description is not None:
96+
self.tag.description = description
97+
98+
@classmethod
99+
def _read_one_nc(cls, file_name, centroids):
100+
""" Read a single WISC footprint. Assumes a time dimension of length
101+
1. Omits a footprint if another file with the same timestamp has
102+
already been read.
103+
104+
Parameters:
105+
nc (xarray.Dataset): File connection to netcdf
106+
file_name (str): Absolute or relative path to *.nc
107+
centroids (Centroids): Centr. instance that matches the
108+
coordinates used in the *.nc, only validated by size.
109+
"""
110+
nc = xr.open_dataset(file_name)
111+
112+
if centroids.size != (nc.sizes['latitude'] * nc.sizes['longitude']):
113+
raise ValueError('Number of centroids and grid size don\'t match.')
114+
115+
# xarray does not penalise repeated assignments, see
116+
# http://xarray.pydata.org/en/stable/data-structures.html
117+
stacked = nc.max_wind_gust.stack(
118+
intensity=('latitude', 'longitude', 'time')
119+
)
120+
stacked = stacked.where(stacked > cls.intensity_thres)
121+
stacked = stacked.fillna(0)
122+
123+
# fill in values from netCDF
124+
new_haz = StormEurope()
125+
new_haz.event_name = [nc.storm_name]
126+
new_haz.date = np.array([
127+
_datetime64_toordinal(nc.time.data[0])
128+
])
129+
new_haz.intensity = sparse.csr_matrix(stacked)
130+
new_haz.ssi = np.array([float(nc.ssi)])
131+
new_haz.time_bounds = np.array(nc.time_bounds)
132+
133+
# fill in default values
134+
new_haz.centroids = centroids
135+
new_haz.units = 'm/s'
136+
new_haz.event_id = np.array([1])
137+
new_haz.frequency = np.array([1])
138+
new_haz.fraction = new_haz.intensity.copy().tocsr()
139+
new_haz.fraction.data.fill(1)
140+
new_haz.orig = np.array([True])
141+
142+
nc.close()
143+
return new_haz
144+
145+
@staticmethod
146+
def _centroids_from_nc(file_name):
147+
""" Construct Centroids from the grid described by 'latitude'
148+
and 'longitude' variables in a netCDF file.
149+
"""
150+
nc = xr.open_dataset(file_name)
151+
lats = nc.latitude.data
152+
lons = nc.longitude.data
153+
ct = Centroids()
154+
ct.coord = np.array([
155+
np.repeat(lats, len(lons)),
156+
np.tile(lons, len(lats)),
157+
]).T
158+
ct.id = np.arange(0, len(ct.coord))
159+
ct.tag.description = 'Centroids constructed from: ' + file_name
160+
161+
return ct
162+
163+
def plot_ssi(self):
164+
""" Ought to plot the SSI versus the xs_freq, which presumably is the
165+
excess frequency. """
166+
pass
167+
168+
169+
def _datetime64_toordinal(datetime):
170+
""" Converts from a numpy datetime64 object to an ordinal date.
171+
See https://stackoverflow.com/a/21916253 for the horrible details. """
172+
return pd.to_datetime(datetime.tolist()).toordinal()

0 commit comments

Comments
 (0)