Skip to content

Commit 2747223

Browse files
mjrenomjreno
authored andcommitted
move data update to package
1 parent 54b503f commit 2747223

File tree

6 files changed

+194
-139
lines changed

6 files changed

+194
-139
lines changed

.docs/Notebooks/mf6_netcdf01_tutorial.py

Lines changed: 39 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -127,6 +127,12 @@
127127
# write simulation with structured NetCDF file
128128
sim.write_simulation(netcdf="structured")
129129

130+
# ## Run MODFLOW 6 simulation with NetCDF input
131+
#
132+
# The simulation generated by this tutorial should be runnable by
133+
# Extended MODFLOW 6, available from the nightly-build repository
134+
# (linked above).
135+
130136
# success, buff = sim.run_simulation(silent=True, report=True)
131137
# assert success, pformat(buff)
132138

@@ -140,6 +146,12 @@
140146
# write simulation with with layered mesh NetCDF file
141147
sim.write_simulation(netcdf="layered")
142148

149+
# ## Run MODFLOW 6 simulation with NetCDF input
150+
#
151+
# The simulation generated by this tutorial should be runnable by
152+
# Extended MODFLOW 6, available from the nightly-build repository
153+
# (linked above).
154+
143155
# success, buff = sim.run_simulation(silent=True, report=True)
144156
# assert success, pformat(buff)
145157

@@ -225,23 +237,6 @@
225237
dis = gwf.get_package("dis")
226238
ds = dis.update_dataset(ds)
227239

228-
# ## Update array data
229-
#
230-
# We have created dataset array variables for the package but they do not yet
231-
# define the expected input data for MODFLOW 6. We will take advantage of the
232-
# existing simulation objects and update the dataset.
233-
#
234-
# Default dataset variable names are defined in the package `netcdf_info()`
235-
# dictionary. Here we will use the info dictionary to programmatically update
236-
# the dataset- for remaining packages we will hardcode the variable names
237-
# being updated for maximum clarity.
238-
239-
nc_info = dis.netcdf_info()
240-
for v in nc_info:
241-
name = nc_info[v]["attrs"]["modflow_input"].rsplit("/", 1)[1].lower()
242-
d = getattr(dis, name)
243-
ds[nc_info[v]["varname"]].values = d.get_data()
244-
245240
# ## Access `NPF` package NetCDF attributes
246241
#
247242
# Access package scoped NetCDF details by storing the dictionary returned
@@ -270,12 +265,6 @@
270265
nc_info["k"]["attrs"]["standard_name"] = "soil_hydraulic_conductivity_at_saturation"
271266
ds = npf.update_dataset(ds, netcdf_info=nc_info)
272267

273-
# ## Update array data
274-
275-
# update dataset from npf arrays
276-
ds["npf_icelltype"].values = npf.icelltype.get_data()
277-
ds["npf_k_updated"].values = npf.k.get_data()
278-
279268
# ## Show dataset `NPF K` parameter with updates
280269

281270
# print dataset npf k variable
@@ -286,37 +275,19 @@
286275
# ic
287276
ic = gwf.get_package("ic")
288277
ds = ic.update_dataset(ds)
289-
ds["ic_strt"].values = ic.strt.get_data()
290278

291279
# ## Update the dataset with supported `STO` arrays
292280

293281
# storage
294282
sto = gwf.get_package("sto")
295283
ds = sto.update_dataset(ds)
296-
ds["sto_iconvert"].values = sto.iconvert.get_data()
297-
ds["sto_sy"].values = sto.sy.get_data()
298-
ds["sto_ss"].values = sto.ss.get_data()
299284

300285
# ## Update the dataset with supported `GHBG` arrays
301286

302287
# update dataset with 'GHBG' arrays
303288
ghbg = gwf.get_package("ghbg_0")
304289
ds = ghbg.update_dataset(ds)
305290

306-
# ## Update array data
307-
308-
# update bhead netcdf array from flopy perioddata
309-
# timeseries step index is first of stress period
310-
for p in ghbg.bhead.get_data():
311-
istp = sum(gwf.modeltime.nstp[0:p])
312-
ds["ghbg_0_bhead"].values[istp] = ghbg.bhead.get_data()[p]
313-
314-
# update cond netcdf array from flopy perioddata
315-
# timeseries step index is first of stress period
316-
for p in ghbg.cond.get_data():
317-
istp = sum(gwf.modeltime.nstp[0:p])
318-
ds["ghbg_0_cond"].values[istp] = ghbg.cond.get_data()[p]
319-
320291
# ## Display generated dataset
321292

322293
# show the dataset
@@ -337,3 +308,30 @@
337308

338309
# success, buff = sim.run_simulation(silent=True, report=True)
339310
# assert success, pformat(buff)
311+
312+
# ## Method 4: DIY with xarray
313+
#
314+
# The above method still uses FloPy objects to update the dataset arrays
315+
# to values consistent with the state of the objects. The `netcdf_info`
316+
# dictionary is intended to supported creation of the dataset without
317+
# an existing simulation defined. The base dataset can be defined with
318+
# `modelgrid` and `modeltime` objects, while the full package netcdf
319+
# dictionary can retrieved with a static call to a model or package mf6
320+
# type. The auxiliary input is optional but does show the variables that
321+
# would be required if auxiliary variables were defined in the package.
322+
323+
# ## Demonstrate static call on MFPackage (structured dataset):
324+
325+
netcdf_info = flopy.mf6.mfpackage.MFPackage.netcdf_package(
326+
mtype="GWF",
327+
ptype="NPF",
328+
auxiliary=["CONCENTRATION"],
329+
)
330+
pprint(netcdf_info)
331+
332+
# ## Demonstrate static call on MFPackage (layered dataset):
333+
334+
netcdf_info = flopy.mf6.mfpackage.MFPackage.netcdf_package(
335+
mtype="GWF", ptype="NPF", mesh="LAYERED", auxiliary=["CONCENTRATION"], nlay=2
336+
)
337+
pprint(netcdf_info)

autotest/regression/test_model_netcdf.py

Lines changed: 2 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,8 @@ def check_netcdf(path, mobj, mesh=None):
4949
if p in packages:
5050
l = -1
5151
assert "modflow_input" in da.attrs
52+
assert "longname" in da.attrs
53+
assert da.attrs["longname"] != ""
5254
if mesh is None:
5355
assert "layer" not in da.attrs
5456
else:
@@ -82,14 +84,6 @@ def check_netcdf(path, mobj, mesh=None):
8284
assert np.allclose(ds.data_vars[varname].values, d.get_data())
8385

8486

85-
def update_dataset(dataset, pobj):
86-
nc_info = pobj.netcdf_info()
87-
for v in nc_info:
88-
name = nc_info[v]["attrs"]["modflow_input"].rsplit("/", 1)[1].lower()
89-
d = getattr(pobj, name)
90-
dataset[nc_info[v]["varname"]].values = d.get_data()
91-
92-
9387
@pytest.mark.regression
9488
def test_uzf01_model_scope_nofile(function_tmpdir, example_data_path):
9589
sim_name = "uzf01"
@@ -350,16 +344,6 @@ def test_uzf01_pkg_scope(function_tmpdir, example_data_path):
350344
# add all packages and update data
351345
for p in gwf.packagelist:
352346
ds = p.update_dataset(ds)
353-
nc_info = p.netcdf_info()
354-
for v in nc_info:
355-
name = nc_info[v]["attrs"]["modflow_input"].rsplit("/", 1)[1].lower()
356-
d = getattr(p, name)
357-
if d.repeating:
358-
for per in d.get_data():
359-
istp = sum(gwf.modeltime.nstp[0:per])
360-
ds[nc_info[v]["varname"]].values[istp] = d.get_data()[per]
361-
else:
362-
ds[nc_info[v]["varname"]].values = d.get_data()
363347

364348
# write dataset to netcdf
365349
ds.to_netcdf(ws / fname, format="NETCDF4", engine="netcdf4")
@@ -398,7 +382,6 @@ def test_uzf01_pkg_scope_modify(function_tmpdir, example_data_path):
398382
# update dataset with `DIS` arrays
399383
dis = gwf.get_package("dis")
400384
ds = dis.update_dataset(ds)
401-
update_dataset(ds, dis)
402385

403386
# get npf package netcdf info
404387
npf = gwf.get_package("npf")
@@ -410,36 +393,18 @@ def test_uzf01_pkg_scope_modify(function_tmpdir, example_data_path):
410393
nc_info["k"]["attrs"]["standard_name"] = "soil_hydraulic_conductivity_at_saturation"
411394
ds = npf.update_dataset(ds, netcdf_info=nc_info)
412395

413-
# update dataset from npf arrays
414-
ds["npf_icelltype"].values = npf.icelltype.get_data()
415-
ds["npf_k_updated"].values = npf.k.get_data()
416-
417396
# ic
418397
ic = gwf.get_package("ic")
419398
ds = ic.update_dataset(ds)
420-
update_dataset(ds, ic)
421399

422400
# storage
423401
sto = gwf.get_package("sto")
424402
ds = sto.update_dataset(ds)
425-
update_dataset(ds, sto)
426403

427404
# update dataset with 'GHBG' arrays
428405
ghbg = gwf.get_package("ghbg_0")
429406
ds = ghbg.update_dataset(ds)
430407

431-
# update bhead netcdf array from flopy perioddata
432-
# timeseries step index is first of stress period
433-
for p in ghbg.bhead.get_data():
434-
istp = sum(gwf.modeltime.nstp[0:p])
435-
ds["ghbg_0_bhead"].values[istp] = ghbg.bhead.get_data()[p]
436-
437-
# update cond netcdf array from flopy perioddata
438-
# timeseries step index is first of stress period
439-
for p in ghbg.cond.get_data():
440-
istp = sum(gwf.modeltime.nstp[0:p])
441-
ds["ghbg_0_cond"].values[istp] = ghbg.cond.get_data()[p]
442-
443408
# write dataset to netcdf
444409
ds.to_netcdf(ws / fname, format="NETCDF4", engine="netcdf4")
445410

flopy/mf6/data/mfdataarray.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1242,23 +1242,26 @@ def _get_file_entry(
12421242
layer_min = layer
12431243
layer_max = shape_ml.inc_shape_idx(layer)
12441244

1245-
if data_storage.netcdf:
1246-
file_entry_array.append(f"{indent}{self.structure.name}{indent}NETCDF\n")
1247-
1248-
elif layered_aux:
1245+
if layered_aux:
12491246
aux_var_names = (
12501247
self.data_dimensions.package_dim.get_aux_variables()[0]
12511248
)
12521249
for layer in range(0, len(aux_var_names)-1):
1253-
file_entry_array.append(
1254-
self._get_file_entry_layer(
1255-
[layer],
1256-
data_indent,
1257-
data_storage.layer_storage[layer].data_storage_type,
1258-
ext_file_action,
1259-
layered_aux,
1250+
if data_storage.netcdf:
1251+
if data_storage.has_data():
1252+
file_entry_array.append(f"{indent}{aux_var_names[layer+1]}{indent}NETCDF\n")
1253+
else:
1254+
file_entry_array.append(
1255+
self._get_file_entry_layer(
1256+
[layer],
1257+
data_indent,
1258+
data_storage.layer_storage[layer].data_storage_type,
1259+
ext_file_action,
1260+
layered_aux,
1261+
)
12601262
)
1261-
)
1263+
elif data_storage.netcdf:
1264+
file_entry_array.append(f"{indent}{self.structure.name}{indent}NETCDF\n")
12621265

12631266
else:
12641267
for layer in shape_ml.indexes(layer_min, layer_max):

flopy/mf6/data/mfdatastorage.py

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2371,8 +2371,15 @@ def _build_full_data(self, apply_multiplier=False):
23712371
else:
23722372
fill_value = None
23732373
full_data = np.full(dimensions, fill_value, np_full_data_type)
2374-
is_aux = self.data_dimensions.structure.name == "aux"
2375-
if is_aux:
2374+
layer_aux = (
2375+
self.data_dimensions.structure.name == "aux"
2376+
and not self.layered
2377+
)
2378+
grid_aux = (
2379+
self.data_dimensions.structure.name == "aux"
2380+
and self.layered
2381+
)
2382+
if layer_aux:
23762383
aux_data = []
23772384
if not self.layered:
23782385
layers_to_process = [0]
@@ -2398,7 +2405,7 @@ def _build_full_data(self, apply_multiplier=False):
23982405
or len(self.layer_storage[layer].internal_data) > 0
23992406
and self.layer_storage[layer].internal_data[0] is None
24002407
):
2401-
if is_aux:
2408+
if layer_aux:
24022409
full_data = None
24032410
else:
24042411
return None
@@ -2409,9 +2416,14 @@ def _build_full_data(self, apply_multiplier=False):
24092416
):
24102417
full_data = self.layer_storage[layer].internal_data * mult
24112418
else:
2412-
full_data[layer] = (
2413-
self.layer_storage[layer].internal_data * mult
2414-
)
2419+
if grid_aux:
2420+
full_data = (
2421+
self.layer_storage[layer].internal_data * mult
2422+
)
2423+
else:
2424+
full_data[layer] = (
2425+
self.layer_storage[layer].internal_data * mult
2426+
)
24152427
elif (
24162428
self.layer_storage[layer].data_storage_type
24172429
== DataStorageType.internal_constant
@@ -2469,11 +2481,11 @@ def _build_full_data(self, apply_multiplier=False):
24692481
):
24702482
full_data = data_out
24712483
else:
2472-
if is_aux and full_data.shape == data_out.shape:
2484+
if layer_aux and full_data.shape == data_out.shape:
24732485
full_data = data_out
24742486
else:
24752487
full_data[layer] = data_out
2476-
if is_aux:
2488+
if layer_aux:
24772489
if full_data is not None:
24782490
all_none = False
24792491
aux_data.append(full_data)
@@ -2482,7 +2494,7 @@ def _build_full_data(self, apply_multiplier=False):
24822494
np.nan,
24832495
self.data_dimensions.structure.get_datum_type(True),
24842496
)
2485-
if is_aux:
2497+
if layer_aux:
24862498
if all_none:
24872499
return None
24882500
else:

0 commit comments

Comments
 (0)