Skip to content

Commit 0c9ec38

Browse files
kumdonoaaDong Kim
andauthored
Ten diffusive domains w schism boundary (#573)
* f90 files * step2 * run all ten diffusive domains in serial with schism depth boundary data * code update 6/29/2022 * run all ten diffusive domains in serial with schism boundary * more clean up * comment out unused maxCourant variable * removed private file paths * further cleanup * change dt_schisma and dt_db_g and cleanup * remove coastal_domain_testing.yaml * remove old files in LowererColorado_TX folder * remove 2 more old files in LowerColorado_TX folder * updated hydrofabric & natural xsec files * remove private path in yaml * change default value of diffusivity limits * remove list() * list() removed * remove two natural xsec nc files * updated mask file only for Lower Colorado River * remove data of HurricaneLaura Co-authored-by: Dong Kim <donghakim@cheyenne6.cheyenne.ucar.edu>
1 parent ca36a2e commit 0c9ec38

21 files changed

+13261
-1685
lines changed

src/kernel/diffusive/diffusive.f90

Lines changed: 112 additions & 96 deletions
Large diffs are not rendered by default.

src/troute-network/troute/nhd_io.py

Lines changed: 91 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@
1515
import netCDF4
1616
from joblib import delayed, Parallel
1717
from cftime import date2num
18+
import dateutil.parser as dparser
19+
from datetime import datetime, timedelta
1820

1921
from troute.nhd_network import reverse_dict
2022

@@ -78,7 +80,7 @@ def read_config_file(custom_input_file):
7880
compute_parameters (dict): Input parameters re computation settings
7981
forcing_parameters (dict): Input parameters re model forcings
8082
restart_parameters (dict): Input parameters re model restart
81-
diffusive_parameters (dict): Input parameters re diffusive wave model
83+
hybrid_parameters (dict): Input parameters re diffusive wave model
8284
output_parameters (dict): Input parameters re output writing
8385
parity_parameters (dict): Input parameters re parity assessment
8486
data_assimilation_parameters (dict): Input parameters re data assimilation
@@ -106,7 +108,7 @@ def read_config_file(custom_input_file):
106108
compute_parameters = data.get("compute_parameters", {})
107109
forcing_parameters = compute_parameters.get("forcing_parameters", {})
108110
restart_parameters = compute_parameters.get("restart_parameters", {})
109-
diffusive_parameters = compute_parameters.get("diffusive_parameters", {})
111+
hybrid_parameters = compute_parameters.get("hybrid_parameters", {})
110112
data_assimilation_parameters = compute_parameters.get(
111113
"data_assimilation_parameters", {}
112114
)
@@ -121,7 +123,7 @@ def read_config_file(custom_input_file):
121123
compute_parameters,
122124
forcing_parameters,
123125
restart_parameters,
124-
diffusive_parameters,
126+
hybrid_parameters,
125127
output_parameters,
126128
parity_parameters,
127129
data_assimilation_parameters,
@@ -150,6 +152,30 @@ def read_diffusive_domain(domain_file):
150152

151153
return data
152154

155+
def read_coastal_boundary_domain(domain_file):
156+
'''
157+
Read coastal boundary domain from .ymal or .json file.
158+
159+
Arguments
160+
---------
161+
domain_file (str or pathlib.Path): Path of coastal boundary domain file
162+
163+
Returns
164+
-------
165+
data (dict int: int): diffusive domain tailwater segments: coastal domain segments
166+
167+
168+
'''
169+
170+
if domain_file[-4:] == "yaml":
171+
with open(domain_file) as domain:
172+
data = yaml.load(domain, Loader=yaml.SafeLoader)
173+
else:
174+
with open(domain_file) as domain:
175+
data = json.load(domain)
176+
177+
return data
178+
153179
def read_custom_input(custom_input_file):
154180
if custom_input_file[-4:] == "yaml":
155181
with open(custom_input_file) as custom_file:
@@ -201,7 +227,6 @@ def read_lakeparm(
201227
Reads LAKEPARM file and prepares a dataframe, filtered
202228
to the relevant reservoirs, to provide the parameters
203229
for level-pool reservoir computation.
204-
205230
Completely replaces the read_waterbody_df function from prior versions
206231
of the v02 routing code.
207232
@@ -213,7 +238,6 @@ def read_lakeparm(
213238
214239
Returns:
215240
df1 (DataFrame):
216-
217241
"""
218242

219243
# TODO: avoid or parameterize "feature_id" or ... return to name-blind dataframe version
@@ -385,23 +409,17 @@ def get_ql_from_wrf_hydro_mf(
385409
value_col: column/field in the CHRTOUT files with the lateral inflow value
386410
gw_col: column/field in the CHRTOUT files with the groundwater bucket flux value
387411
runoff_col: column/field in the CHRTOUT files with the runoff from terrain routing value
388-
389412
In general the CHRTOUT files contain one value per time step. At present, there is
390413
no capability for handling non-uniform timesteps in the qlaterals.
391-
392414
The qlateral may also be input using comma delimited file -- see
393415
`get_ql_from_csv`
394-
395-
396416
Note/Todo:
397417
For later needs, filtering for specific features or times may
398418
be accomplished with one of:
399419
ds.loc[{selectors}]
400420
ds.sel({selectors})
401421
ds.isel({selectors})
402-
403422
Returns from these selection functions are sub-datasets.
404-
405423
For example:
406424
```
407425
(Pdb) ds.sel({"feature_id":[4186117, 4186169],"time":ds.time.values[:2]})['q_lateral'].to_dataframe()
@@ -410,7 +428,6 @@ def get_ql_from_wrf_hydro_mf(
410428
2018-01-01 13:00:00 4186117 41.233807 -75.413895 0.006496
411429
2018-01-02 00:00:00 4186117 41.233807 -75.413895 0.006460
412430
```
413-
414431
or...
415432
```
416433
(Pdb) ds.sel({"feature_id":[4186117, 4186169],"time":[np.datetime64('2018-01-01T13:00:00')]})['q_lateral'].to_dataframe()
@@ -485,12 +502,20 @@ def write_chanobs(
485502
-------------
486503
487504
'''
488-
505+
# TODO: for and if statements are improvised just in case when link of gage location is not in flowveldepth, which should be fixed
506+
link_na = []
507+
for link in link_gage_df.index:
508+
if link not in flowveldepth.index:
509+
link_na.append(link)
510+
link_gage_df_nona = link_gage_df.drop(link_na)
511+
489512
# array of segment linkIDs at gage locations. Results from these segments will be written
490-
gage_feature_id = link_gage_df.index.to_numpy(dtype = "int64")
513+
#gage_feature_id = link_gage_df.index.to_numpy(dtype = "int64")
514+
gage_feature_id = link_gage_df_nona.index.to_numpy(dtype = "int64")
491515

492-
# array of simulated flow data at gage locations
493-
gage_flow_data = flowveldepth.loc[link_gage_df.index].iloc[:,::3].to_numpy(dtype="float32")
516+
# array of simulated flow data at gage locations
517+
#gage_flow_data = flowveldepth.loc[link_gage_df.index].iloc[:,::3].to_numpy(dtype="float32")
518+
gage_flow_data = flowveldepth.loc[link_gage_df_nona.index].iloc[:,::3].to_numpy(dtype="float32")
494519

495520
# array of simulation time
496521
gage_flow_time = [t0 + timedelta(seconds = (i+1) * dt) for i in range(nts)]
@@ -571,7 +596,6 @@ def write_chanobs(
571596
fill_value = np.nan
572597
)
573598
y[:] = gage_flow_data.T
574-
575599
# =========== GLOBAL ATTRIBUTES ===============
576600
f.setncatts(
577601
{
@@ -743,10 +767,8 @@ def get_ql_from_wrf_hydro(qlat_files, index_col="station_id", value_col="q_later
743767
qlat_files: globbed list of CHRTOUT files containing desired lateral inflows
744768
index_col: column/field in the CHRTOUT files with the segment/link id
745769
value_col: column/field in the CHRTOUT files with the lateral inflow value
746-
747770
In general the CHRTOUT files contain one value per time step. At present, there is
748771
no capability for handling non-uniform timesteps in the qlaterals.
749-
750772
The qlateral may also be input using comma delimited file -- see
751773
`get_ql_from_csv`
752774
"""
@@ -943,12 +965,10 @@ def get_usgs_df_from_csv(usgs_csv, routelink_subset_file, index_col="link"):
943965
usgs_csv - csv file with SEGMENT IDs in the left-most column labeled with "link",
944966
and date-headed values from time-slice files in the format
945967
"2018-09-18 00:00:00"
946-
947968
It is assumed that the segment crosswalk and interpolation have both
948969
already been performed, so we do not need to comprehend
949970
the potentially non-numeric byte-strings associated with gage IDs, nor
950971
do we need to interpolate anything here as when we read from the timeslices.
951-
952972
If that were necessary, we might use a solution such as proposed here:
953973
https://stackoverflow.com/a/35058538
954974
note that explicit typing of the index cannot be done on read and
@@ -1221,7 +1241,6 @@ def get_channel_restart_from_wrf_hydro(
12211241
default_us_flow_column: name used in remainder of program to refer to this column of the dataset
12221242
default_ds_flow_column: name used in remainder of program to refer to this column of the dataset
12231243
default_depth_column: name used in remainder of program to refer to this column of the dataset
1224-
12251244
The Restart file gives hlink, qlink1, and qlink2 values for channels --
12261245
the order is simply the same as that found in the Route-Link files.
12271246
*Subnote 1*: The order of these values is NOT the order found in the CHRTOUT files,
@@ -1353,7 +1372,6 @@ def write_hydro_rst(
13531372
):
13541373
"""
13551374
Write t-route flow and depth data to WRF-Hydro restart files.
1356-
13571375
Agruments
13581376
---------
13591377
data (Data Frame): t-route simulated flow, velocity and depth data
@@ -1366,7 +1384,6 @@ def write_hydro_rst(
13661384
troute_us_flow_var_name (str):
13671385
troute_ds_flow_var_name (str):
13681386
troute_depth_var_name (str):
1369-
13701387
Returns
13711388
-------
13721389
"""
@@ -1473,7 +1490,6 @@ def get_reservoir_restart_from_wrf_hydro(
14731490
waterbody_depth_column: column in the restart file to use for downstream flow initial state
14741491
default_waterbody_flow_column: name used in remainder of program to refer to this column of the dataset
14751492
default_waterbody_depth_column: name used in remainder of program to refer to this column of the dataset
1476-
14771493
The Restart file gives qlakeo and resht values for waterbodies.
14781494
The order of values in the file is the same as the order in the LAKEPARM file from WRF-Hydro.
14791495
However, there are many instances where only a subset of waterbodies described in the lakeparm
@@ -1523,7 +1539,57 @@ def build_coastal_ncdf_dataframe(coastal_ncdf):
15231539
coastal_ncdf_df = ds[["elev", "depth"]]
15241540
return coastal_ncdf_df.to_dataframe()
15251541

1542+
def build_coastal_ncdf_dataframe(
1543+
coastal_files,
1544+
coastal_boundary_domain,
1545+
):
1546+
# retrieve coastal elevation, topo depth, and temporal data
1547+
ds = netCDF4.Dataset(filename = coastal_files, mode = 'r', format = "NETCDF4")
1548+
1549+
tws = list(coastal_boundary_domain.keys())
1550+
coastal_boundary_nodes = list(coastal_boundary_domain.values())
1551+
1552+
elev_NAVD88 = ds.variables['elev'][:, coastal_boundary_nodes].filled(fill_value = np.nan)
1553+
depth_bathy = ds.variables['depth'][coastal_boundary_nodes].filled(fill_value = np.nan)
1554+
timesteps = ds.variables['time'][:]
1555+
if len(timesteps) > 1:
1556+
dt_schism = timesteps[1]-timesteps[0]
1557+
else:
1558+
raise RuntimeError("schism provided less than 2 time steps")
1559+
1560+
start_date = ds.variables['time'].units
1561+
start_date = dparser.parse(start_date,fuzzy=True)
1562+
dt_timeslice = timedelta(minutes=dt_schism/60.0)
1563+
tfin = start_date + dt_timeslice*len(timesteps)
1564+
timestamps = pd.date_range(start_date, tfin, freq=dt_timeslice)
1565+
timestamps = timestamps.strftime('%Y-%m-%d %H:%M:%S')
1566+
1567+
# create a dataframe of water depth at coastal domain nodes
1568+
timeslice_schism_list=[]
1569+
for t in range(0, len(timesteps)+1):
1570+
timeslice= np.full(len(tws), timestamps[t])
1571+
if t==0:
1572+
depth = np.nan
1573+
else:
1574+
depth = elev_NAVD88[t-1,:] + depth_bathy
1575+
1576+
timeslice_schism = (pd.DataFrame({
1577+
'stationId' : tws,
1578+
'datetime' : timeslice,
1579+
'depth' : depth
1580+
}).
1581+
set_index(['stationId', 'datetime']).
1582+
unstack(1, fill_value = np.nan)['depth'])
1583+
1584+
timeslice_schism_list.append(timeslice_schism)
1585+
1586+
coastal_boundary_depth_df = pd.concat(timeslice_schism_list, axis=1, ignore_index=False)
1587+
1588+
# linearly extrapolate depth value at start date
1589+
coastal_boundary_depth_df.iloc[:,0] = 2.0*coastal_boundary_depth_df.iloc[:,1] - coastal_boundary_depth_df.iloc[:,2]
15261590

1591+
return coastal_boundary_depth_df
1592+
15271593
def lastobs_df_output(
15281594
lastobs_df,
15291595
dt,

src/troute-network/troute/nhd_network.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ def reachable_network(N, sources=None, targets=None, check_disjoint=True):
269269

270270
rv = {}
271271
for k, n in reached.items():
272-
rv[k] = {m: N.get(m, []) for m in n}
272+
rv[k] = {m: N.get(m, []) for m in n}
273273
return rv
274274

275275

src/troute-network/troute/nhd_network_utilities_v02.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -845,7 +845,7 @@ def build_refac_connections(diff_network_parameters):
845845

846846
# set parameter dataframe index as segment id number, sort
847847
param_df = param_df.set_index("key").sort_index()
848-
848+
849849
# There can be an externally determined terminal code -- that's this first value
850850
terminal_codes = set()
851851
terminal_codes.update(terminal_code)
@@ -865,4 +865,3 @@ def build_refac_connections(diff_network_parameters):
865865
)
866866

867867
return connections
868-

0 commit comments

Comments
 (0)