From 9048b5e500d9ca64be7163c17f97d3fed5890f55 Mon Sep 17 00:00:00 2001 From: mjreno Date: Fri, 25 Jul 2025 15:06:41 -0400 Subject: [PATCH 01/12] netcdf attr dicts for package and model --- flopy/mf6/data/mfstructure.py | 4 +++ flopy/mf6/mfmodel.py | 26 ++++++++++++++ flopy/mf6/mfpackage.py | 66 +++++++++++++++++++++++++++++++++++ 3 files changed, 96 insertions(+) diff --git a/flopy/mf6/data/mfstructure.py b/flopy/mf6/data/mfstructure.py index 07115d12d..2d9ff246d 100644 --- a/flopy/mf6/data/mfstructure.py +++ b/flopy/mf6/data/mfstructure.py @@ -594,6 +594,7 @@ def __init__(self): self.parameter_name = None self.one_per_pkg = False self.jagged_array = None + self.netcdf = False def set_value(self, line, common): arr_line = line.strip().split() @@ -771,6 +772,8 @@ def set_value(self, line, common): self.one_per_pkg = bool(arr_line[1]) elif arr_line[0] == "jagged_array": self.jagged_array = arr_line[1] + elif arr_line[0] == "netcdf": + self.netcdf = arr_line[1] def get_type_string(self): return f"[{self.type_string}]" @@ -1089,6 +1092,7 @@ def __init__(self, data_item, model_data, package_type, dfn_list): or "nodes" in data_item.shape or len(data_item.layer_dims) > 1 ) + self.netcdf = data_item.netcdf self.num_data_items = len(data_item.data_items) self.record_within_record = False self.file_data = False diff --git a/flopy/mf6/mfmodel.py b/flopy/mf6/mfmodel.py index 0abc25cbd..98572175b 100644 --- a/flopy/mf6/mfmodel.py +++ b/flopy/mf6/mfmodel.py @@ -808,6 +808,32 @@ def export(self, f, **kwargs): return utils.model_export(f, self, **kwargs) + def netcdf_attrs(self, mesh=None): + """Return dictionary of dataset (model) scoped attributes + Parameters + ---------- + mesh : str + mesh type if dataset is ugrid complient + """ + attrs = { + "modflow_grid": "", + "modflow_model": "", + } + if self.get_grid_type() == DiscretizationType.DIS: + attrs["modflow_grid"] = "STRUCTURED" + elif self.get_grid_type() == DiscretizationType.DISV: + attrs["modflow_grid"] = "VERTEX" + + attrs["modflow_model"] = ( + f"{self.name.upper()}: MODFLOW 6 {self.model_type.upper()[0:3]} model" + ) + + # supported => LAYERED + if mesh: + attrs["mesh"] = mesh + + return attrs + @property def verbose(self): """Verbose setting for model operations (True/False)""" diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index 4ee9d6175..b2ca67b22 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -2086,6 +2086,72 @@ def package_filename_dict(self): ) return self._package_container.package_filename_dict + def netcdf_attrs(self, mesh=None): + attrs = {} + + def attr_d(tagname, iaux=None, layer=None): + tag = tagname + name = f"{self.package_name}" + if iaux: + auxvar = self.dimensions.get_aux_variables()[0] + tag = f"{tag}/{iaux}" + name = f"{name}_{auxvar[iaux]}" + else: + name = f"{name}_{tagname}" + if layer: + tag = f"{tag}/layer{layer}" + name = f"{name}_l{layer}" + + a = {} + a["varname"] = name + a["attrs"] = {} + a["attrs"]["modflow_input"] = ( + f"{self.model_name}/{self.package_name}/{tagname}" + ).upper() + if iaux: + a["attrs"]["modflow_iaux"] = iaux + if layer: + a["attrs"]["layer"] = layer + return tag, a + + for key, block in self.blocks.items(): + if key != "griddata" and key != "period": + continue + for dataset in block.datasets.values(): + if isinstance(dataset, mfdataarray.MFArray): + for index, data_item in enumerate( + dataset.structure.data_item_structures + ): + if not (dataset.structure.netcdf and dataset.has_data()): + continue + if dataset.structure.layered and mesh == "LAYERED": + if data_item.name == "aux" or data_item.name == "auxvar": + for n, auxname in enumerate( + self.dimensions.get_aux_variables()[0] + ): + if auxname == "auxiliary" and n == 0: + continue + for l in range(self.model_or_sim.modelgrid.nlay): + key, a = attr_d(data_item.name, n, l + 1) + attrs[key] = a + else: + for l in range(self.model_or_sim.modelgrid.nlay): + key, a = attr_d(data_item.name, layer=l + 1) + attrs[key] = a + else: + if data_item.name == "aux" or data_item.name == "auxvar": + for n, auxname in enumerate( + self.dimensions.get_aux_variables()[0] + ): + if auxname == "auxiliary" and n == 0: + continue + key, a = attr_d(data_item.name, iaux=n) + attrs[key] = a + else: + key, a = attr_d(data_item.name) + attrs[key] = a + return attrs + def get_package(self, name=None, type_only=False, name_only=False): """ Finds a package by package name, package key, package type, or partial From cff915bead111cb243f615bc3f1b67dd98d09a7b Mon Sep 17 00:00:00 2001 From: mjreno Date: Fri, 25 Jul 2025 16:11:11 -0400 Subject: [PATCH 02/12] fix spelling --- flopy/mf6/mfmodel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flopy/mf6/mfmodel.py b/flopy/mf6/mfmodel.py index 98572175b..ec540e50c 100644 --- a/flopy/mf6/mfmodel.py +++ b/flopy/mf6/mfmodel.py @@ -813,7 +813,7 @@ def netcdf_attrs(self, mesh=None): Parameters ---------- mesh : str - mesh type if dataset is ugrid complient + mesh type if dataset is ugrid compliant """ attrs = { "modflow_grid": "", From ff6b82410c26528b9431624535b19311b6fdf093 Mon Sep 17 00:00:00 2001 From: mjreno Date: Mon, 28 Jul 2025 13:10:33 -0400 Subject: [PATCH 03/12] add static methods --- flopy/mf6/mfmodel.py | 72 ++++++++++------ flopy/mf6/mfpackage.py | 181 ++++++++++++++++++++++++++--------------- 2 files changed, 161 insertions(+), 92 deletions(-) diff --git a/flopy/mf6/mfmodel.py b/flopy/mf6/mfmodel.py index ec540e50c..c190522fc 100644 --- a/flopy/mf6/mfmodel.py +++ b/flopy/mf6/mfmodel.py @@ -808,32 +808,6 @@ def export(self, f, **kwargs): return utils.model_export(f, self, **kwargs) - def netcdf_attrs(self, mesh=None): - """Return dictionary of dataset (model) scoped attributes - Parameters - ---------- - mesh : str - mesh type if dataset is ugrid compliant - """ - attrs = { - "modflow_grid": "", - "modflow_model": "", - } - if self.get_grid_type() == DiscretizationType.DIS: - attrs["modflow_grid"] = "STRUCTURED" - elif self.get_grid_type() == DiscretizationType.DISV: - attrs["modflow_grid"] = "VERTEX" - - attrs["modflow_model"] = ( - f"{self.name.upper()}: MODFLOW 6 {self.model_type.upper()[0:3]} model" - ) - - # supported => LAYERED - if mesh: - attrs["mesh"] = mesh - - return attrs - @property def verbose(self): """Verbose setting for model operations (True/False)""" @@ -2229,3 +2203,49 @@ def _resolve_idomain(idomain, botm): else: return np.ones_like(botm) return idomain + + @staticmethod + def netcdf_attrs(mname, mtype, grid_type, mesh=None): + """Return dictionary of dataset (model) scoped attributes + Parameters + ---------- + mname : str + model name + mtype : str + model type + grid_type: + DiscretizationType + mesh : str + mesh type if dataset is ugrid compliant + """ + attrs = { + "modflow_grid": "", + "modflow_model": "", + } + if grid_type == DiscretizationType.DIS: + attrs["modflow_grid"] = "STRUCTURED" + elif grid_type == DiscretizationType.DISV: + attrs["modflow_grid"] = "VERTEX" + + attrs["modflow_model"] = f"{mname.upper()}: MODFLOW 6 {mtype.upper()} model" + + # supported => LAYERED + if mesh: + attrs["mesh"] = mesh + + return attrs + + def netcdf_info(self, mesh=None): + """Return dictionary of dataset (model) scoped attributes + Parameters + ---------- + mesh : str + mesh type if dataset is ugrid compliant + """ + attrs = MFModel.netcdf_attrs( + self.name, self.model_type, self.get_grid_type(), mesh + ) + + res_d = {} + res_d["attrs"] = attrs + return res_d diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index b2ca67b22..9176a07d1 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -2086,72 +2086,6 @@ def package_filename_dict(self): ) return self._package_container.package_filename_dict - def netcdf_attrs(self, mesh=None): - attrs = {} - - def attr_d(tagname, iaux=None, layer=None): - tag = tagname - name = f"{self.package_name}" - if iaux: - auxvar = self.dimensions.get_aux_variables()[0] - tag = f"{tag}/{iaux}" - name = f"{name}_{auxvar[iaux]}" - else: - name = f"{name}_{tagname}" - if layer: - tag = f"{tag}/layer{layer}" - name = f"{name}_l{layer}" - - a = {} - a["varname"] = name - a["attrs"] = {} - a["attrs"]["modflow_input"] = ( - f"{self.model_name}/{self.package_name}/{tagname}" - ).upper() - if iaux: - a["attrs"]["modflow_iaux"] = iaux - if layer: - a["attrs"]["layer"] = layer - return tag, a - - for key, block in self.blocks.items(): - if key != "griddata" and key != "period": - continue - for dataset in block.datasets.values(): - if isinstance(dataset, mfdataarray.MFArray): - for index, data_item in enumerate( - dataset.structure.data_item_structures - ): - if not (dataset.structure.netcdf and dataset.has_data()): - continue - if dataset.structure.layered and mesh == "LAYERED": - if data_item.name == "aux" or data_item.name == "auxvar": - for n, auxname in enumerate( - self.dimensions.get_aux_variables()[0] - ): - if auxname == "auxiliary" and n == 0: - continue - for l in range(self.model_or_sim.modelgrid.nlay): - key, a = attr_d(data_item.name, n, l + 1) - attrs[key] = a - else: - for l in range(self.model_or_sim.modelgrid.nlay): - key, a = attr_d(data_item.name, layer=l + 1) - attrs[key] = a - else: - if data_item.name == "aux" or data_item.name == "auxvar": - for n, auxname in enumerate( - self.dimensions.get_aux_variables()[0] - ): - if auxname == "auxiliary" and n == 0: - continue - key, a = attr_d(data_item.name, iaux=n) - attrs[key] = a - else: - key, a = attr_d(data_item.name) - attrs[key] = a - return attrs - def get_package(self, name=None, type_only=False, name_only=False): """ Finds a package by package name, package key, package type, or partial @@ -3488,6 +3422,121 @@ def plot(self, **kwargs): axes = PlotUtilities._plot_package_helper(self, **kwargs) return axes + @staticmethod + def _add_netcdf_entries( + attrs, mname, pname, data_item, auxiliary=None, mesh=None, nlay=1 + ): + if auxiliary: + auxnames = auxiliary + else: + auxnames = [] + + def add_entry(tagname, iaux=None, layer=None): + key = tagname + name = f"{pname}" + if iaux is not None: + key = f"{key}/{iaux}" + name = f"{name}_{auxiliary[iaux]}" + else: + name = f"{name}_{tagname}" + if layer is not None: + key = f"{key}/layer{layer}" + name = f"{name}_l{layer}" + + a = {} + a["varname"] = name.lower() + a["attrs"] = {} + a["attrs"]["modflow_input"] = (f"{mname}/{pname}/{tagname}").upper() + if iaux is not None: + a["attrs"]["modflow_iaux"] = iaux + 1 + if layer is not None: + a["attrs"]["layer"] = layer + attrs[key] = a + + if data_item.layered and mesh == "LAYERED": + if data_item.name == "aux" or data_item.name == "auxvar": + for n, auxname in enumerate(auxnames): + for l in range(nlay): + add_entry(data_item.name, n, l + 1) + else: + for l in range(nlay): + add_entry(data_item.name, layer=l + 1) + else: + if data_item.name == "aux" or data_item.name == "auxvar": + for n, auxname in enumerate(auxnames): + add_entry(data_item.name, iaux=n) + else: + add_entry(data_item.name) + + @staticmethod + def netcdf_attrs(mtype, ptype, auxiliary=None, mesh=None, nlay=1): + from .data.mfstructure import DfnPackage, MFSimulationStructure + + attrs = {} + sim_struct = MFSimulationStructure() + + for package in MFPackage.__subclasses__(): + sim_struct.process_dfn(DfnPackage(package)) + p = DfnPackage(package) + c, sc = p.dfn_file_name.split(".")[0].split("-") + if c == mtype.lower() and sc == ptype.lower(): + sim_struct.add_package(p, model_file=False) + exit + + if ptype.lower() in sim_struct.package_struct_objs: + pso = sim_struct.package_struct_objs[ptype.lower()] + for key, block in pso.blocks.items(): + if key != "griddata" and key != "period": + continue + for d in block.data_structures: + if block.data_structures[d].netcdf: + MFPackage._add_netcdf_entries( + attrs, + mtype, + ptype, + block.data_structures[d], + auxiliary, + mesh, + nlay, + ) + + res_d = {} + for k in list(attrs): + res_d[k] = attrs[k]["attrs"] + + return res_d + + def netcdf_info(self, mesh=None): + attrs = {} + + if self.dimensions.get_aux_variables(): + auxnames = list(self.dimensions.get_aux_variables()[0]) + if len(auxnames) and auxnames[0] == "auxiliary": + auxnames.pop(0) + else: + auxnames = [] + + for key, block in self.blocks.items(): + if key != "griddata" and key != "period": + continue + for dataset in block.datasets.values(): + if isinstance(dataset, mfdataarray.MFArray): + for index, data_item in enumerate( + dataset.structure.data_item_structures + ): + if dataset.structure.netcdf and dataset.has_data(): + MFPackage._add_netcdf_entries( + attrs, + self.model_name, + self.package_name, + dataset.structure, + auxnames, + mesh, + self.model_or_sim.modelgrid.nlay, + ) + + return attrs + class MFChildPackages: """ From 8764d77ceb30efe0eb0de4318fe70aab9aa11589 Mon Sep 17 00:00:00 2001 From: mjreno Date: Tue, 29 Jul 2025 16:07:26 -0400 Subject: [PATCH 04/12] draft netcdf tutorial --- .docs/Notebooks/netcdf01_tutorial.py | 277 +++++++++++++++++++++++++++ flopy/mf6/mfpackage.py | 8 +- 2 files changed, 283 insertions(+), 2 deletions(-) create mode 100644 .docs/Notebooks/netcdf01_tutorial.py diff --git a/.docs/Notebooks/netcdf01_tutorial.py b/.docs/Notebooks/netcdf01_tutorial.py new file mode 100644 index 000000000..20447b65f --- /dev/null +++ b/.docs/Notebooks/netcdf01_tutorial.py @@ -0,0 +1,277 @@ +import sys +from pathlib import Path +from tempfile import TemporaryDirectory + +import numpy as np +import xarray as xr + +import flopy + +print(sys.version) +print(f"flopy version: {flopy.__version__}") + +DNODATA = 3.0e30 + + +def create_sim(ws): + name = "uzf01" + perlen = [500.0] + nper = len(perlen) + nstp = [10] + tsmult = nper * [1.0] + crs = "EPSG:26916" + nlay, nrow, ncol = 100, 1, 1 + delr = 1.0 + delc = 1.0 + delv = 1.0 + top = 100.0 + botm = [top - (k + 1) * delv for k in range(nlay)] + strt = 0.5 + hk = 1.0 + laytyp = 1 + ss = 0.0 + sy = 0.1 + + tdis_rc = [] + for i in range(nper): + tdis_rc.append((perlen[i], nstp[i], tsmult[i])) + + # build MODFLOW 6 files + sim = flopy.mf6.MFSimulation( + sim_name=name, version="mf6", exe_name="mf6", sim_ws=ws + ) + + # create tdis package + tdis = flopy.mf6.ModflowTdis(sim, time_units="DAYS", nper=nper, perioddata=tdis_rc) + + # create iterative model solution and register the gwf model with it + nouter, ninner = 100, 10 + hclose, rclose, relax = 1.5e-6, 1e-6, 0.97 + imsgwf = flopy.mf6.ModflowIms( + sim, + print_option="SUMMARY", + outer_dvclose=hclose, + outer_maximum=nouter, + under_relaxation="DBD", + under_relaxation_theta=0.7, + inner_maximum=ninner, + inner_dvclose=hclose, + rcloserecord=rclose, + linear_acceleration="BICGSTAB", + scaling_method="NONE", + reordering_method="NONE", + relaxation_factor=relax, + ) + + # create gwf model + newtonoptions = "NEWTON UNDER_RELAXATION" + gwf = flopy.mf6.ModflowGwf( + sim, + modelname=name, + newtonoptions=newtonoptions, + save_flows=True, + ) + + dis = flopy.mf6.ModflowGwfdis( + gwf, + crs=crs, + nlay=nlay, + nrow=nrow, + ncol=ncol, + delr=delr, + delc=delc, + top=top, + botm=botm, + idomain=np.ones((nlay, nrow, ncol), dtype=int), + ) + + # initial conditions + ic = flopy.mf6.ModflowGwfic(gwf, strt=strt) + + # node property flow + npf = flopy.mf6.ModflowGwfnpf(gwf, save_flows=False, icelltype=laytyp, k=hk) + # storage + sto = flopy.mf6.ModflowGwfsto( + gwf, + save_flows=False, + iconvert=laytyp, + ss=ss, + sy=sy, + steady_state={0: False}, + transient={0: True}, + ) + + # ghbg + ghb_obs = {f"{name}.ghb.obs.csv": [("100_1_1", "GHB", (99, 0, 0))]} + bhead = np.full(nlay * nrow * ncol, DNODATA, dtype=float) + cond = np.full(nlay * nrow * ncol, DNODATA, dtype=float) + bhead[nlay - 1] = 1.5 + cond[nlay - 1] = 1.0 + ghb = flopy.mf6.ModflowGwfghbg( + gwf, + print_input=True, + print_flows=True, + bhead=bhead, + cond=cond, + save_flows=False, + ) + + ghb.obs.initialize( + filename=f"{name}.ghb.obs", + digits=20, + print_input=True, + continuous=ghb_obs, + ) + + # note: for specifying lake number, use fortran indexing! + uzf_obs = { + f"{name}.uzf.obs.csv": [ + ("wc 02", "water-content", 2, 0.5), + ("wc 50", "water-content", 50, 0.5), + ("wcbn 02", "water-content", "uzf 002", 0.5), + ("wcbn 50", "water-content", "UZF 050", 0.5), + ("rch 02", "uzf-gwrch", "uzf 002"), + ("rch 50", "uzf-gwrch", "uzf 050"), + ] + } + + sd = 0.1 + vks = hk + thtr = 0.05 + thti = thtr + thts = sy + eps = 4 + uzf_pkdat = [[0, (0, 0, 0), 1, 1, sd, vks, thtr, thts, thti, eps, "uzf 001"]] + [ + [k, (k, 0, 0), 0, k + 1, sd, vks, thtr, thts, thti, eps, f"uzf {k + 1:03d}"] + for k in range(1, nlay - 1) + ] + uzf_pkdat[-1][3] = -1 + infiltration = 2.01 + uzf_spd = {0: [[0, infiltration, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]} + uzf = flopy.mf6.ModflowGwfuzf( + gwf, + print_input=True, + print_flows=True, + save_flows=True, + boundnames=True, + ntrailwaves=15, + nwavesets=40, + nuzfcells=len(uzf_pkdat), + packagedata=uzf_pkdat, + perioddata=uzf_spd, + budget_filerecord=f"{name}.uzf.bud", + budgetcsv_filerecord=f"{name}.uzf.bud.csv", + observations=uzf_obs, + filename=f"{name}.uzf", + ) + + # output control + oc = flopy.mf6.ModflowGwfoc( + gwf, + budget_filerecord=f"{name}.bud", + head_filerecord=f"{name}.hds", + headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")], + saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], + printrecord=[("HEAD", "LAST"), ("BUDGET", "ALL")], + ) + + obs_lst = [] + obs_lst.append(["obs1", "head", (0, 0, 0)]) + obs_lst.append(["obs2", "head", (1, 0, 0)]) + obs_dict = {f"{name}.obs.csv": obs_lst} + obs = flopy.mf6.ModflowUtlobs(gwf, pname="head_obs", digits=20, continuous=obs_dict) + + return sim + + +temp_dir = TemporaryDirectory() +workspace = Path(temp_dir.name) + +# run the non-netcdf simulation +sim = create_sim(ws=workspace) +sim.write_simulation() +success, buff = sim.run_simulation(silent=True, report=True) +if success: + for line in buff: + print(line) +else: + raise ValueError("Failed to run.") + +# create directory for netcdf sim +sim.set_sim_path(workspace / "netcdf") +gwf = sim.get_model("uzf01") +gwf.name_file.nc_filerecord = "uzf01.structured.nc" +sim.write_simulation() + +# create the netcdf dataset +ds = xr.Dataset() + +# get model netcdf info +ds_info = gwf.netcdf_info() + +# update dataset with required attributes +for a in ds_info["attrs"]: + ds.attrs[a] = ds_info["attrs"][a] + +# set dimensional info +dis = gwf.modelgrid +xoff = dis.xoffset +yoff = dis.yoffset +x = xoff + dis.xycenters[0] +y = yoff + dis.xycenters[1] +z = [float(x) for x in range(1, dis.nlay + 1)] +nstp = sum(gwf.modeltime.nstp) +time = gwf.modeltime.tslen + +# create coordinate vars +var_d = {"time": (["time"], time), "z": (["z"], z), "y": (["y"], y), "x": (["x"], x)} +ds = ds.assign(var_d) + +# get ghbg package netcdf info +ghbg = gwf.get_package("ghbg_0") +ghbg_info = ghbg.netcdf_info() + +# create ghbg dataset variables +shape = nc_shape = ["time", "z", "y", "x"] +for v in ghbg_info: + varname = ghbg_info[v]["varname"] + data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) + var_d = {varname: (shape, data)} + ds = ds.assign(var_d) + for a in ghbg_info[v]["attrs"]: + ds[varname].attrs[a] = ghbg_info[v]["attrs"][a] + ds[varname].attrs["_FillValue"] = DNODATA + +# update bhead netcdf array from flopy perioddata +for p in ghbg.bhead.get_data(): + ds["ghbg_0_bhead"].values[p] = ghbg.bhead.get_data()[p] + +# update cond netcdf array from flopy perioddata +for p in ghbg.cond.get_data(): + ds["ghbg_0_cond"].values[p] = ghbg.cond.get_data()[p] + +# write the netcdf +ds.to_netcdf( + workspace / "netcdf/uzf01.structured.nc", format="NETCDF4", engine="netcdf4" +) + +# update ghbg input to read bhead and cond from netcdf +with open(workspace / "netcdf/uzf01.ghbg", "w") as f: + f.write("BEGIN options\n") + f.write(" READARRAYGRID\n") + f.write(" PRINT_INPUT\n") + f.write(" PRINT_FLOWS\n") + f.write(" OBS6 FILEIN uzf01.ghb.obs\n") + f.write("END options\n\n") + f.write("BEGIN period 1\n") + f.write(" bhead NETCDF\n") + f.write(" cond NETCDF\n") + f.write("END period 1\n") + +# run the netcdf sim +success, buff = sim.run_simulation(silent=False, report=True) +if success: + for line in buff: + print(line) +else: + raise ValueError("Failed to run.") diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index 9176a07d1..e9c6ccb5a 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -3485,6 +3485,10 @@ def netcdf_attrs(mtype, ptype, auxiliary=None, mesh=None, nlay=1): if ptype.lower() in sim_struct.package_struct_objs: pso = sim_struct.package_struct_objs[ptype.lower()] + if pso.multi_package_support: + pname = f"<{ptype}name>" + else: + pname = ptype for key, block in pso.blocks.items(): if key != "griddata" and key != "period": continue @@ -3492,8 +3496,8 @@ def netcdf_attrs(mtype, ptype, auxiliary=None, mesh=None, nlay=1): if block.data_structures[d].netcdf: MFPackage._add_netcdf_entries( attrs, - mtype, - ptype, + f"<{mtype}name>", + pname, block.data_structures[d], auxiliary, mesh, From e8a54e45936fcd40b9320e456355d3871af0c7b0 Mon Sep 17 00:00:00 2001 From: mjreno Date: Tue, 29 Jul 2025 16:27:32 -0400 Subject: [PATCH 05/12] add xarray as optional dependency --- etc/environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/etc/environment.yml b/etc/environment.yml index bcb8a08e3..d0debc795 100644 --- a/etc/environment.yml +++ b/etc/environment.yml @@ -64,3 +64,4 @@ dependencies: - vtk - xmipy - h5py + - xarray From 09b7711982bde4300544b218bc0631913820253d Mon Sep 17 00:00:00 2001 From: mjreno Date: Wed, 30 Jul 2025 09:51:52 -0400 Subject: [PATCH 06/12] add another tutorial --- .docs/Notebooks/netcdf02_tutorial.py | 425 +++++++++++++++++++++++++++ 1 file changed, 425 insertions(+) create mode 100644 .docs/Notebooks/netcdf02_tutorial.py diff --git a/.docs/Notebooks/netcdf02_tutorial.py b/.docs/Notebooks/netcdf02_tutorial.py new file mode 100644 index 000000000..389c81617 --- /dev/null +++ b/.docs/Notebooks/netcdf02_tutorial.py @@ -0,0 +1,425 @@ +import sys +from pathlib import Path +from tempfile import TemporaryDirectory + +import numpy as np +import xarray as xr + +import flopy + +print(sys.version) +print(f"flopy version: {flopy.__version__}") +# - + +DNODATA = 3.0e30 + + +def get_flow_sim(ws): + name = "flow" + gwfname = name + sim = flopy.mf6.MFSimulation(sim_name=name, sim_ws=ws, exe_name="mf6") + tdis_rc = [(100.0, 1, 1.0), (100.0, 1, 1.0)] + nper = len(tdis_rc) + tdis = flopy.mf6.ModflowTdis(sim, time_units="DAYS", nper=nper, perioddata=tdis_rc) + + gwf = flopy.mf6.ModflowGwf(sim, modelname=gwfname, save_flows=True) + + # ims + hclose = 1.0e-6 + rclose = 1.0e-6 + nouter = 1000 + ninner = 100 + relax = 0.99 + imsgwf = flopy.mf6.ModflowIms( + sim, + print_option="ALL", + outer_dvclose=hclose, + outer_maximum=nouter, + under_relaxation="NONE", + inner_maximum=ninner, + inner_dvclose=hclose, + rcloserecord=rclose, + linear_acceleration="CG", + scaling_method="NONE", + reordering_method="NONE", + relaxation_factor=relax, + filename=f"{gwfname}.ims", + ) + + nlay = 1 + nrow = 10 + ncol = 10 + delr = 10.0 + delc = 10.0 + top = 100.0 + botm = 0.0 + + dis = flopy.mf6.ModflowGwfdis( + gwf, + nlay=nlay, + nrow=nrow, + ncol=ncol, + delr=delr, + delc=delc, + top=top, + botm=botm, + ) + + ic = flopy.mf6.ModflowGwfic(gwf, strt=100.0) + + npf = flopy.mf6.ModflowGwfnpf( + gwf, + xt3doptions=False, + save_flows=True, + save_specific_discharge=True, + save_saturation=True, + icelltype=[1], + k=10.0, + ) + + sto_on = False + if sto_on: + sto = flopy.mf6.ModflowGwfsto( + gwf, + save_flows=True, + iconvert=[1], + ss=1.0e-5, + sy=0.3, + steady_state={0: True}, + transient={0: False}, + ) + + oc = flopy.mf6.ModflowGwfoc( + gwf, + budget_filerecord=f"{gwfname}.bud", + head_filerecord=f"{gwfname}.hds", + headprintrecord=[("COLUMNS", ncol, "WIDTH", 15, "DIGITS", 6, "GENERAL")], + saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], + printrecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], + ) + + rch_on = False + if rch_on: + rch = flopy.mf6.ModflowGwfrcha(gwf, recharge={0: 4.79e-3}, pname="RCH-1") + + # wel + q = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + welconc = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + for i in range(nrow): + q[0, i, 0] = 100.0 + welconc[0, i, 0] = 100.0 + wel = flopy.mf6.ModflowGwfwelg( + gwf, + auxiliary=["concentration"], + pname="WEL-1", + q=q, + aux=welconc, + ) + + # ghb + rows = [0, 1, 2, 3] + for ipak, i in enumerate(rows): + fname = f"flow.{ipak + 1}.ghb" + pname = f"GHB-{ipak + 1}" + bhead = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + cond = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + conc = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + bhead[0, i, ncol - 1] = 50.0 + cond[0, i, ncol - 1] = 1000.0 + conc[0, i, ncol - 1] = 100.0 + flopy.mf6.ModflowGwfghbg( + gwf, + auxiliary=["concentration"], + filename=f"{fname}g", + pname=pname, + bhead=bhead, + cond=cond, + aux=conc, + ) + + # riv + rows = [4, 5, 6] + for ipak, i in enumerate(rows): + fname = f"flow.{ipak + 1}.riv" + pname = f"RIV-{ipak + 1}" + stage = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + cond = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + rbot = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + conc = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + stage[0, i, ncol - 1] = 50.0 + cond[0, i, ncol - 1] = 1000.0 + rbot[0, i, ncol - 1] = 0.0 + conc[0, i, ncol - 1] = 100.0 + riv = flopy.mf6.ModflowGwfrivg( + gwf, + auxiliary=["concentration"], + filename=f"{fname}g", + pname=pname, + stage=stage, + cond=cond, + rbot=rbot, + aux=[conc], + ) + + # drn + rows = [7, 8, 9] + for ipak, i in enumerate(rows): + fname = f"flow.{ipak + 1}.drn" + pname = f"DRN-{ipak + 1}" + elev = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + cond = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + conc = np.full((nlay, nrow, ncol), DNODATA, dtype=float) + elev[0, i, ncol - 1] = 50.0 + cond[0, i, ncol - 1] = 1000.0 + conc[0, i, ncol - 1] = 100.0 + drn = flopy.mf6.modflow.ModflowGwfdrng( + gwf, + auxiliary=["concentration"], + filename=f"{fname}g", + pname=pname, + elev=elev, + cond=cond, + aux=[conc], + ) + + return sim + + +temp_dir = TemporaryDirectory() +workspace = Path(temp_dir.name) + +# run the non-netcdf simulation +sim = get_flow_sim(workspace) +sim.write_simulation() +success, buff = sim.run_simulation(silent=True, report=True) +if success: + for line in buff: + print(line) +else: + raise ValueError("Failed to run.") + +# create directory for netcdf sim +sim.set_sim_path(workspace / "netcdf") +gwf = sim.get_model("flow") +gwf.name_file.nc_filerecord = "flow.structured.nc" +sim.write_simulation() + +# create the netcdf dataset +ds = xr.Dataset() + +# get model netcdf info +nc_info = gwf.netcdf_info() + +# update dataset with required attributes +for a in nc_info["attrs"]: + ds.attrs[a] = nc_info["attrs"][a] + +# get dim info from modelgrid +dis = gwf.modelgrid +xoff = dis.xoffset +yoff = dis.yoffset +x = xoff + dis.xycenters[0] +y = yoff + dis.xycenters[1] +z = [float(x) for x in range(1, dis.nlay + 1)] + +# set nstp and time +nstp = sum(gwf.modeltime.nstp) +time = gwf.modeltime.tslen + +# create coordinate vars +var_d = {"time": (["time"], time), "z": (["z"], z), "y": (["y"], y), "x": (["x"], x)} +ds = ds.assign(var_d) + +# shape list for data arrays +shape = ["time", "z", "y", "x"] + +# update for welg +welg = gwf.get_package("wel-1") +nc_info = welg.netcdf_info() + +for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) + var_d = {varname: (shape, data)} + ds = ds.assign(var_d) + # add required modflow 6 param attributes + for a in nc_info[v]["attrs"]: + ds[varname].attrs[a] = nc_info[v]["attrs"][a] + ds[varname].attrs["_FillValue"] = DNODATA + +# update q netcdf array from flopy perioddata +for p in welg.q.get_data(): + if welg.q.get_data()[p] is not None: + ds["wel-1_q"].values[p] = welg.q.get_data()[p] + +# update conc netcdf array from flopy perioddata +for p in welg.aux.get_data(): + if welg.aux.get_data()[p] is not None: + ds["wel-1_concentration"].values[p] = welg.aux.get_data()[p][0] + +# update welg input to read from netcdf +with open(workspace / "netcdf" / "flow.welg", "w") as f: + f.write("BEGIN options\n") + f.write(" READARRAYGRID\n") + f.write(" auxiliary CONCENTRATION\n") + f.write("END options\n\n") + f.write("BEGIN period 1\n") + f.write(" q NETCDF\n") + f.write(" concentration NETCDF\n") + f.write("END period 1\n\n") + +# update for ghbg +for n in range(4): + ip = n + 1 + + # get ghbg package netcdf info + ghbg = gwf.get_package(f"ghb-{ip}") + nc_info = ghbg.netcdf_info() + + for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) + var_d = {varname: (shape, data)} + ds = ds.assign(var_d) + # add required modflow 6 param attributes + for a in nc_info[v]["attrs"]: + ds[varname].attrs[a] = nc_info[v]["attrs"][a] + ds[varname].attrs["_FillValue"] = DNODATA + + # update bhead netcdf array from flopy perioddata + for p in ghbg.bhead.get_data(): + if ghbg.bhead.get_data()[p] is not None: + ds[f"ghb-{ip}_bhead"].values[p] = ghbg.bhead.get_data()[p] + + # update cond netcdf array from flopy perioddata + for p in ghbg.cond.get_data(): + if ghbg.cond.get_data()[p] is not None: + ds[f"ghb-{ip}_cond"].values[p] = ghbg.cond.get_data()[p] + + # update conc netcdf array from flopy perioddata + for p in ghbg.aux.get_data(): + if ghbg.aux.get_data()[p] is not None: + ds[f"ghb-{ip}_concentration"].values[p] = ghbg.aux.get_data()[p][0] + + # update ghbg input to read from netcdf + with open(workspace / "netcdf" / f"flow.{ip}.ghbg", "w") as f: + f.write("BEGIN options\n") + f.write(" READARRAYGRID\n") + f.write(" auxiliary CONCENTRATION\n") + f.write("END options\n\n") + f.write("BEGIN period 1\n") + f.write(" bhead NETCDF\n") + f.write(" cond NETCDF\n") + f.write(" concentration NETCDF\n") + f.write("END period 1\n\n") + + +# update for rivg +for n in range(3): + ip = n + 1 + + # get rivg package netcdf info + rivg = gwf.get_package(f"riv-{ip}") + nc_info = rivg.netcdf_info() + + for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) + var_d = {varname: (shape, data)} + ds = ds.assign(var_d) + # add required modflow 6 param attributes + for a in nc_info[v]["attrs"]: + ds[varname].attrs[a] = nc_info[v]["attrs"][a] + ds[varname].attrs["_FillValue"] = DNODATA + + # update stage netcdf array from flopy perioddata + for p in rivg.stage.get_data(): + if rivg.stage.get_data()[p] is not None: + ds[f"riv-{ip}_stage"].values[p] = rivg.stage.get_data()[p] + + # update cond netcdf array from flopy perioddata + for p in rivg.cond.get_data(): + if rivg.cond.get_data()[p] is not None: + ds[f"riv-{ip}_cond"].values[p] = rivg.cond.get_data()[p] + + # update rbot netcdf array from flopy perioddata + for p in rivg.rbot.get_data(): + if rivg.rbot.get_data()[p] is not None: + ds[f"riv-{ip}_rbot"].values[p] = rivg.rbot.get_data()[p] + + # update conc netcdf array from flopy perioddata + for p in rivg.aux.get_data(): + if rivg.aux.get_data()[p] is not None: + ds[f"riv-{ip}_concentration"].values[p] = rivg.aux.get_data()[p][0] + + # update rivg input to read from netcdf + with open(workspace / "netcdf" / f"flow.{ip}.rivg", "w") as f: + f.write("BEGIN options\n") + f.write(" READARRAYGRID\n") + f.write(" auxiliary CONCENTRATION\n") + f.write("END options\n\n") + f.write("BEGIN period 1\n") + f.write(" stage NETCDF\n") + f.write(" cond NETCDF\n") + f.write(" rbot NETCDF\n") + f.write(" concentration NETCDF\n") + f.write("END period 1\n\n") + + +# update for drng +for n in range(3): + ip = n + 1 + + # get drng package netcdf info + drng = gwf.get_package(f"drn-{ip}") + nc_info = drng.netcdf_info() + + for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) + var_d = {varname: (shape, data)} + ds = ds.assign(var_d) + # add required modflow 6 param attributes + for a in nc_info[v]["attrs"]: + ds[varname].attrs[a] = nc_info[v]["attrs"][a] + ds[varname].attrs["_FillValue"] = DNODATA + + # update elev netcdf array from flopy perioddata + for p in drng.elev.get_data(): + if drng.elev.get_data()[p] is not None: + ds[f"drn-{ip}_elev"].values[p] = drng.elev.get_data()[p] + + # update cond netcdf array from flopy perioddata + for p in drng.cond.get_data(): + if drng.cond.get_data()[p] is not None: + ds[f"drn-{ip}_cond"].values[p] = drng.cond.get_data()[p] + + # update conc netcdf array from flopy perioddata + for p in drng.aux.get_data(): + if drng.aux.get_data()[p] is not None: + ds[f"drn-{ip}_concentration"].values[p] = drng.aux.get_data()[p][0] + + # update drng input to read from netcdf + with open(workspace / "netcdf" / f"flow.{ip}.drng", "w") as f: + f.write("BEGIN options\n") + f.write(" READARRAYGRID\n") + f.write(" auxiliary CONCENTRATION\n") + f.write("END options\n\n") + f.write("BEGIN period 1\n") + f.write(" elev NETCDF\n") + f.write(" cond NETCDF\n") + f.write(" concentration NETCDF\n") + f.write("END period 1\n\n") + +# write the netcdf +ds.to_netcdf( + workspace / "netcdf/flow.structured.nc", format="NETCDF4", engine="netcdf4" +) + +# run the netcdf sim +success, buff = sim.run_simulation(silent=False, report=True) +if success: + for line in buff: + print(line) +else: + raise ValueError("Failed to run.") From ddf86e0cfe2f9217d72e49dfa675c434ac0a7af5 Mon Sep 17 00:00:00 2001 From: mjreno Date: Wed, 30 Jul 2025 16:27:34 -0400 Subject: [PATCH 07/12] add some static data to netcdf file --- .docs/Notebooks/netcdf01_tutorial.py | 124 +++++++++++++++++++++++---- .docs/Notebooks/netcdf02_tutorial.py | 97 ++++++++++++++------- flopy/mf6/mfpackage.py | 28 ++++++ 3 files changed, 203 insertions(+), 46 deletions(-) diff --git a/.docs/Notebooks/netcdf01_tutorial.py b/.docs/Notebooks/netcdf01_tutorial.py index 20447b65f..368ba5bc5 100644 --- a/.docs/Notebooks/netcdf01_tutorial.py +++ b/.docs/Notebooks/netcdf01_tutorial.py @@ -184,6 +184,21 @@ def create_sim(ws): return sim +def data_shape(shape): + dims = [] + for d in shape: + if d == "time": + dims.append(nstp) + if d == "z": + dims.append(nlay) + elif d == "y": + dims.append(nrow) + elif d == "x": + dims.append(ncol) + + return dims + + temp_dir = TemporaryDirectory() workspace = Path(temp_dir.name) @@ -207,11 +222,11 @@ def create_sim(ws): ds = xr.Dataset() # get model netcdf info -ds_info = gwf.netcdf_info() +nc_info = gwf.netcdf_info() # update dataset with required attributes -for a in ds_info["attrs"]: - ds.attrs[a] = ds_info["attrs"][a] +for a in nc_info["attrs"]: + ds.attrs[a] = nc_info["attrs"][a] # set dimensional info dis = gwf.modelgrid @@ -222,40 +237,119 @@ def create_sim(ws): z = [float(x) for x in range(1, dis.nlay + 1)] nstp = sum(gwf.modeltime.nstp) time = gwf.modeltime.tslen +nlay = dis.nlay +nrow = dis.nrow +ncol = dis.ncol # create coordinate vars var_d = {"time": (["time"], time), "z": (["z"], z), "y": (["y"], y), "x": (["x"], x)} ds = ds.assign(var_d) +# dis +dis = gwf.get_package("dis") +nc_info = dis.netcdf_info() + +# create dis dataset variables +for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full( + data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} + ds = ds.assign(var_d) + for a in nc_info[v]["attrs"]: + ds[varname].attrs[a] = nc_info[v]["attrs"][a] + +# update data +ds["dis_delr"].values = dis.delr.get_data() +ds["dis_delc"].values = dis.delc.get_data() +ds["dis_top"].values = dis.top.get_data() +ds["dis_botm"].values = dis.botm.get_data() +ds["dis_idomain"].values = dis.idomain.get_data() + +# update dis to read from netcdf +with open(workspace / "netcdf" / "uzf01.dis", "w") as f: + f.write("BEGIN options\n") + f.write(" crs EPSG:26916\n") + f.write("END options\n\n") + f.write("BEGIN dimensions\n") + f.write(" NLAY 100\n") + f.write(" NROW 1\n") + f.write(" NCOL 1\n") + f.write("END dimensions\n\n") + f.write("BEGIN griddata\n") + f.write(" delr NETCDF\n") + f.write(" delc NETCDF\n") + f.write(" top NETCDF\n") + f.write(" botm NETCDF\n") + f.write(" idomain NETCDF\n") + f.write("END griddata\n") + +# npf +npf = gwf.get_package("npf") +nc_info = npf.netcdf_info() + +# create npf dataset variables +for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full( + data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} + ds = ds.assign(var_d) + for a in nc_info[v]["attrs"]: + ds[varname].attrs[a] = nc_info[v]["attrs"][a] + +# update data +ds["npf_icelltype"].values = npf.icelltype.get_data() +ds["npf_k"].values = npf.k.get_data() + +# update npf to read from netcdf +with open(workspace / "netcdf" / "uzf01.npf", "w") as f: + f.write("BEGIN options\n") + f.write("END options\n\n") + f.write("BEGIN griddata\n") + f.write(" icelltype NETCDF\n") + f.write(" k NETCDF\n") + f.write("END griddata\n") + # get ghbg package netcdf info ghbg = gwf.get_package("ghbg_0") -ghbg_info = ghbg.netcdf_info() +nc_info = ghbg.netcdf_info() # create ghbg dataset variables -shape = nc_shape = ["time", "z", "y", "x"] -for v in ghbg_info: - varname = ghbg_info[v]["varname"] - data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) - var_d = {varname: (shape, data)} +for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full( + data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} ds = ds.assign(var_d) - for a in ghbg_info[v]["attrs"]: - ds[varname].attrs[a] = ghbg_info[v]["attrs"][a] - ds[varname].attrs["_FillValue"] = DNODATA + for a in nc_info[v]["attrs"]: + ds[varname].attrs[a] = nc_info[v]["attrs"][a] # update bhead netcdf array from flopy perioddata for p in ghbg.bhead.get_data(): - ds["ghbg_0_bhead"].values[p] = ghbg.bhead.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds["ghbg_0_bhead"].values[istp] = ghbg.bhead.get_data()[p] # update cond netcdf array from flopy perioddata for p in ghbg.cond.get_data(): - ds["ghbg_0_cond"].values[p] = ghbg.cond.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds["ghbg_0_cond"].values[istp] = ghbg.cond.get_data()[p] # write the netcdf ds.to_netcdf( workspace / "netcdf/uzf01.structured.nc", format="NETCDF4", engine="netcdf4" ) -# update ghbg input to read bhead and cond from netcdf +# update ghbg to read from netcdf with open(workspace / "netcdf/uzf01.ghbg", "w") as f: f.write("BEGIN options\n") f.write(" READARRAYGRID\n") diff --git a/.docs/Notebooks/netcdf02_tutorial.py b/.docs/Notebooks/netcdf02_tutorial.py index 389c81617..4692c9bba 100644 --- a/.docs/Notebooks/netcdf02_tutorial.py +++ b/.docs/Notebooks/netcdf02_tutorial.py @@ -14,7 +14,7 @@ DNODATA = 3.0e30 -def get_flow_sim(ws): +def create_sim(ws): name = "flow" gwfname = name sim = flopy.mf6.MFSimulation(sim_name=name, sim_ws=ws, exe_name="mf6") @@ -185,11 +185,26 @@ def get_flow_sim(ws): return sim +def data_shape(shape): + dims = [] + for d in shape: + if d == "time": + dims.append(nstp) + if d == "z": + dims.append(nlay) + elif d == "y": + dims.append(nrow) + elif d == "x": + dims.append(ncol) + + return dims + + temp_dir = TemporaryDirectory() workspace = Path(temp_dir.name) # run the non-netcdf simulation -sim = get_flow_sim(workspace) +sim = create_sim(workspace) sim.write_simulation() success, buff = sim.run_simulation(silent=True, report=True) if success: @@ -221,10 +236,11 @@ def get_flow_sim(ws): x = xoff + dis.xycenters[0] y = yoff + dis.xycenters[1] z = [float(x) for x in range(1, dis.nlay + 1)] - -# set nstp and time nstp = sum(gwf.modeltime.nstp) time = gwf.modeltime.tslen +nlay = dis.nlay +nrow = dis.nrow +ncol = dis.ncol # create coordinate vars var_d = {"time": (["time"], time), "z": (["z"], z), "y": (["y"], y), "x": (["x"], x)} @@ -239,23 +255,27 @@ def get_flow_sim(ws): for v in nc_info: varname = nc_info[v]["varname"] - data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) - var_d = {varname: (shape, data)} + data = np.full( + data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} ds = ds.assign(var_d) - # add required modflow 6 param attributes for a in nc_info[v]["attrs"]: ds[varname].attrs[a] = nc_info[v]["attrs"][a] - ds[varname].attrs["_FillValue"] = DNODATA # update q netcdf array from flopy perioddata for p in welg.q.get_data(): if welg.q.get_data()[p] is not None: - ds["wel-1_q"].values[p] = welg.q.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds["wel-1_q"].values[istp] = welg.q.get_data()[p] # update conc netcdf array from flopy perioddata for p in welg.aux.get_data(): if welg.aux.get_data()[p] is not None: - ds["wel-1_concentration"].values[p] = welg.aux.get_data()[p][0] + istp = sum(gwf.modeltime.nstp[0:p]) + ds["wel-1_concentration"].values[istp] = welg.aux.get_data()[p][0] # update welg input to read from netcdf with open(workspace / "netcdf" / "flow.welg", "w") as f: @@ -278,28 +298,33 @@ def get_flow_sim(ws): for v in nc_info: varname = nc_info[v]["varname"] - data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) - var_d = {varname: (shape, data)} + data = np.full( + data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} ds = ds.assign(var_d) - # add required modflow 6 param attributes for a in nc_info[v]["attrs"]: ds[varname].attrs[a] = nc_info[v]["attrs"][a] - ds[varname].attrs["_FillValue"] = DNODATA # update bhead netcdf array from flopy perioddata for p in ghbg.bhead.get_data(): if ghbg.bhead.get_data()[p] is not None: - ds[f"ghb-{ip}_bhead"].values[p] = ghbg.bhead.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"ghb-{ip}_bhead"].values[istp] = ghbg.bhead.get_data()[p] # update cond netcdf array from flopy perioddata for p in ghbg.cond.get_data(): if ghbg.cond.get_data()[p] is not None: - ds[f"ghb-{ip}_cond"].values[p] = ghbg.cond.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"ghb-{ip}_cond"].values[istp] = ghbg.cond.get_data()[p] # update conc netcdf array from flopy perioddata for p in ghbg.aux.get_data(): if ghbg.aux.get_data()[p] is not None: - ds[f"ghb-{ip}_concentration"].values[p] = ghbg.aux.get_data()[p][0] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"ghb-{ip}_concentration"].values[istp] = ghbg.aux.get_data()[p][0] # update ghbg input to read from netcdf with open(workspace / "netcdf" / f"flow.{ip}.ghbg", "w") as f: @@ -324,23 +349,27 @@ def get_flow_sim(ws): for v in nc_info: varname = nc_info[v]["varname"] - data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) - var_d = {varname: (shape, data)} + data = np.full( + data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} ds = ds.assign(var_d) - # add required modflow 6 param attributes for a in nc_info[v]["attrs"]: ds[varname].attrs[a] = nc_info[v]["attrs"][a] - ds[varname].attrs["_FillValue"] = DNODATA # update stage netcdf array from flopy perioddata for p in rivg.stage.get_data(): if rivg.stage.get_data()[p] is not None: - ds[f"riv-{ip}_stage"].values[p] = rivg.stage.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"riv-{ip}_stage"].values[istp] = rivg.stage.get_data()[p] # update cond netcdf array from flopy perioddata for p in rivg.cond.get_data(): if rivg.cond.get_data()[p] is not None: - ds[f"riv-{ip}_cond"].values[p] = rivg.cond.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"riv-{ip}_cond"].values[istp] = rivg.cond.get_data()[p] # update rbot netcdf array from flopy perioddata for p in rivg.rbot.get_data(): @@ -350,7 +379,8 @@ def get_flow_sim(ws): # update conc netcdf array from flopy perioddata for p in rivg.aux.get_data(): if rivg.aux.get_data()[p] is not None: - ds[f"riv-{ip}_concentration"].values[p] = rivg.aux.get_data()[p][0] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"riv-{ip}_concentration"].values[istp] = rivg.aux.get_data()[p][0] # update rivg input to read from netcdf with open(workspace / "netcdf" / f"flow.{ip}.rivg", "w") as f: @@ -376,28 +406,33 @@ def get_flow_sim(ws): for v in nc_info: varname = nc_info[v]["varname"] - data = np.full((nstp, dis.nlay, dis.nrow, dis.ncol), DNODATA, dtype=float) - var_d = {varname: (shape, data)} + data = np.full( + data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} ds = ds.assign(var_d) - # add required modflow 6 param attributes for a in nc_info[v]["attrs"]: ds[varname].attrs[a] = nc_info[v]["attrs"][a] - ds[varname].attrs["_FillValue"] = DNODATA # update elev netcdf array from flopy perioddata for p in drng.elev.get_data(): if drng.elev.get_data()[p] is not None: - ds[f"drn-{ip}_elev"].values[p] = drng.elev.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"drn-{ip}_elev"].values[istp] = drng.elev.get_data()[p] # update cond netcdf array from flopy perioddata for p in drng.cond.get_data(): if drng.cond.get_data()[p] is not None: - ds[f"drn-{ip}_cond"].values[p] = drng.cond.get_data()[p] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"drn-{ip}_cond"].values[istp] = drng.cond.get_data()[p] # update conc netcdf array from flopy perioddata for p in drng.aux.get_data(): if drng.aux.get_data()[p] is not None: - ds[f"drn-{ip}_concentration"].values[p] = drng.aux.get_data()[p][0] + istp = sum(gwf.modeltime.nstp[0:p]) + ds[f"drn-{ip}_concentration"].values[istp] = drng.aux.get_data()[p][0] # update drng input to read from netcdf with open(workspace / "netcdf" / f"flow.{ip}.drng", "w") as f: diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index e9c6ccb5a..279b212ed 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -3426,6 +3426,10 @@ def plot(self, **kwargs): def _add_netcdf_entries( attrs, mname, pname, data_item, auxiliary=None, mesh=None, nlay=1 ): + DNODATA = 3.0e30 # MF6 DNODATA constant + FILLNA_INT32 = np.int32(-2147483647) # netcdf-fortran NF90_FILL_INT + FILLNA_DBL = 9.96920996838687e36 # netcdf-fortran NF90_FILL_DOUBLE + if auxiliary: auxnames = auxiliary else: @@ -3445,12 +3449,36 @@ def add_entry(tagname, iaux=None, layer=None): a = {} a["varname"] = name.lower() + if (data_item.type) == DatumType.integer: + a["nc_type"] = np.int32 + elif (data_item.type) == DatumType.double_precision: + a["nc_type"] = np.float64 + dims = [] + if data_item.shape[0] == 'nodes': + if data_item.block_name == "griddata": + dims += ["x", "y", "z"] + elif data_item.block_name == "period": + dims += ["x", "y", "z", "time"] + else: + map = {"nlay": "z", "nrow": "y", "ncol": "x"} + for s in data_item.shape: + for k, v in map.items(): + s = s.replace(k, v) + dims.append(s) + a["nc_shape"] = dims[::-1] a["attrs"] = {} a["attrs"]["modflow_input"] = (f"{mname}/{pname}/{tagname}").upper() if iaux is not None: a["attrs"]["modflow_iaux"] = iaux + 1 if layer is not None: a["attrs"]["layer"] = layer + if (data_item.type) == DatumType.integer: + a["attrs"]["_FillValue"] = FILLNA_INT32 + elif (data_item.type) == DatumType.double_precision: + if data_item.block_name == "griddata": + a["attrs"]["_FillValue"] = FILLNA_DBL + elif data_item.block_name == "period": + a["attrs"]["_FillValue"] = DNODATA attrs[key] = a if data_item.layered and mesh == "LAYERED": From 128361dd452cf2712ad7e5b910e08cd50a5ad5af Mon Sep 17 00:00:00 2001 From: mjreno Date: Fri, 1 Aug 2025 08:04:17 -0400 Subject: [PATCH 08/12] try rtd regenerate classes --- .docs/Notebooks/netcdf01_tutorial.py | 69 ++++++++-------------- .docs/Notebooks/netcdf02_tutorial.py | 85 ++++++++++------------------ .github/workflows/rtd.yml | 3 + flopy/mf6/mfpackage.py | 25 +++++--- 4 files changed, 73 insertions(+), 109 deletions(-) diff --git a/.docs/Notebooks/netcdf01_tutorial.py b/.docs/Notebooks/netcdf01_tutorial.py index 368ba5bc5..3b1cef396 100644 --- a/.docs/Notebooks/netcdf01_tutorial.py +++ b/.docs/Notebooks/netcdf01_tutorial.py @@ -184,19 +184,27 @@ def create_sim(ws): return sim -def data_shape(shape): - dims = [] - for d in shape: - if d == "time": - dims.append(nstp) - if d == "z": - dims.append(nlay) - elif d == "y": - dims.append(nrow) - elif d == "x": - dims.append(ncol) +def add_netcdf_vars(dataset, nc_info, dimmap): + def _data_shape(shape): + dims_l = [] + for d in shape: + dims_l.append(dimmap[d]) - return dims + return dims_l + + for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full( + _data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} + dataset = dataset.assign(var_d) + for a in nc_info[v]["attrs"]: + dataset[varname].attrs[a] = nc_info[v]["attrs"][a] + + return dataset temp_dir = TemporaryDirectory() @@ -240,6 +248,7 @@ def data_shape(shape): nlay = dis.nlay nrow = dis.nrow ncol = dis.ncol +dimmap = {"time": nstp, "z": nlay, "y": nrow, "x": ncol} # create coordinate vars var_d = {"time": (["time"], time), "z": (["z"], z), "y": (["y"], y), "x": (["x"], x)} @@ -250,17 +259,7 @@ def data_shape(shape): nc_info = dis.netcdf_info() # create dis dataset variables -for v in nc_info: - varname = nc_info[v]["varname"] - data = np.full( - data_shape(nc_info[v]["nc_shape"]), - nc_info[v]["attrs"]["_FillValue"], - dtype=nc_info[v]["nc_type"], - ) - var_d = {varname: (nc_info[v]["nc_shape"], data)} - ds = ds.assign(var_d) - for a in nc_info[v]["attrs"]: - ds[varname].attrs[a] = nc_info[v]["attrs"][a] +ds = add_netcdf_vars(ds, nc_info, dimmap) # update data ds["dis_delr"].values = dis.delr.get_data() @@ -292,17 +291,7 @@ def data_shape(shape): nc_info = npf.netcdf_info() # create npf dataset variables -for v in nc_info: - varname = nc_info[v]["varname"] - data = np.full( - data_shape(nc_info[v]["nc_shape"]), - nc_info[v]["attrs"]["_FillValue"], - dtype=nc_info[v]["nc_type"], - ) - var_d = {varname: (nc_info[v]["nc_shape"], data)} - ds = ds.assign(var_d) - for a in nc_info[v]["attrs"]: - ds[varname].attrs[a] = nc_info[v]["attrs"][a] +ds = add_netcdf_vars(ds, nc_info, dimmap) # update data ds["npf_icelltype"].values = npf.icelltype.get_data() @@ -322,17 +311,7 @@ def data_shape(shape): nc_info = ghbg.netcdf_info() # create ghbg dataset variables -for v in nc_info: - varname = nc_info[v]["varname"] - data = np.full( - data_shape(nc_info[v]["nc_shape"]), - nc_info[v]["attrs"]["_FillValue"], - dtype=nc_info[v]["nc_type"], - ) - var_d = {varname: (nc_info[v]["nc_shape"], data)} - ds = ds.assign(var_d) - for a in nc_info[v]["attrs"]: - ds[varname].attrs[a] = nc_info[v]["attrs"][a] +ds = add_netcdf_vars(ds, nc_info, dimmap) # update bhead netcdf array from flopy perioddata for p in ghbg.bhead.get_data(): diff --git a/.docs/Notebooks/netcdf02_tutorial.py b/.docs/Notebooks/netcdf02_tutorial.py index 4692c9bba..460139e97 100644 --- a/.docs/Notebooks/netcdf02_tutorial.py +++ b/.docs/Notebooks/netcdf02_tutorial.py @@ -185,19 +185,27 @@ def create_sim(ws): return sim -def data_shape(shape): - dims = [] - for d in shape: - if d == "time": - dims.append(nstp) - if d == "z": - dims.append(nlay) - elif d == "y": - dims.append(nrow) - elif d == "x": - dims.append(ncol) +def add_netcdf_vars(dataset, nc_info, dimmap): + def _data_shape(shape): + dims_l = [] + for d in shape: + dims_l.append(dimmap[d]) - return dims + return dims_l + + for v in nc_info: + varname = nc_info[v]["varname"] + data = np.full( + _data_shape(nc_info[v]["nc_shape"]), + nc_info[v]["attrs"]["_FillValue"], + dtype=nc_info[v]["nc_type"], + ) + var_d = {varname: (nc_info[v]["nc_shape"], data)} + dataset = dataset.assign(var_d) + for a in nc_info[v]["attrs"]: + dataset[varname].attrs[a] = nc_info[v]["attrs"][a] + + return dataset temp_dir = TemporaryDirectory() @@ -241,6 +249,7 @@ def data_shape(shape): nlay = dis.nlay nrow = dis.nrow ncol = dis.ncol +dimmap = {"time": nstp, "z": nlay, "y": nrow, "x": ncol} # create coordinate vars var_d = {"time": (["time"], time), "z": (["z"], z), "y": (["y"], y), "x": (["x"], x)} @@ -253,17 +262,8 @@ def data_shape(shape): welg = gwf.get_package("wel-1") nc_info = welg.netcdf_info() -for v in nc_info: - varname = nc_info[v]["varname"] - data = np.full( - data_shape(nc_info[v]["nc_shape"]), - nc_info[v]["attrs"]["_FillValue"], - dtype=nc_info[v]["nc_type"], - ) - var_d = {varname: (nc_info[v]["nc_shape"], data)} - ds = ds.assign(var_d) - for a in nc_info[v]["attrs"]: - ds[varname].attrs[a] = nc_info[v]["attrs"][a] +# create welg dataset variables +ds = add_netcdf_vars(ds, nc_info, dimmap) # update q netcdf array from flopy perioddata for p in welg.q.get_data(): @@ -296,17 +296,8 @@ def data_shape(shape): ghbg = gwf.get_package(f"ghb-{ip}") nc_info = ghbg.netcdf_info() - for v in nc_info: - varname = nc_info[v]["varname"] - data = np.full( - data_shape(nc_info[v]["nc_shape"]), - nc_info[v]["attrs"]["_FillValue"], - dtype=nc_info[v]["nc_type"], - ) - var_d = {varname: (nc_info[v]["nc_shape"], data)} - ds = ds.assign(var_d) - for a in nc_info[v]["attrs"]: - ds[varname].attrs[a] = nc_info[v]["attrs"][a] + # create ghbg dataset variables + ds = add_netcdf_vars(ds, nc_info, dimmap) # update bhead netcdf array from flopy perioddata for p in ghbg.bhead.get_data(): @@ -347,17 +338,8 @@ def data_shape(shape): rivg = gwf.get_package(f"riv-{ip}") nc_info = rivg.netcdf_info() - for v in nc_info: - varname = nc_info[v]["varname"] - data = np.full( - data_shape(nc_info[v]["nc_shape"]), - nc_info[v]["attrs"]["_FillValue"], - dtype=nc_info[v]["nc_type"], - ) - var_d = {varname: (nc_info[v]["nc_shape"], data)} - ds = ds.assign(var_d) - for a in nc_info[v]["attrs"]: - ds[varname].attrs[a] = nc_info[v]["attrs"][a] + # create rivg dataset variables + ds = add_netcdf_vars(ds, nc_info, dimmap) # update stage netcdf array from flopy perioddata for p in rivg.stage.get_data(): @@ -404,17 +386,8 @@ def data_shape(shape): drng = gwf.get_package(f"drn-{ip}") nc_info = drng.netcdf_info() - for v in nc_info: - varname = nc_info[v]["varname"] - data = np.full( - data_shape(nc_info[v]["nc_shape"]), - nc_info[v]["attrs"]["_FillValue"], - dtype=nc_info[v]["nc_type"], - ) - var_d = {varname: (nc_info[v]["nc_shape"], data)} - ds = ds.assign(var_d) - for a in nc_info[v]["attrs"]: - ds[varname].attrs[a] = nc_info[v]["attrs"][a] + # create drng dataset variables + ds = add_netcdf_vars(ds, nc_info, dimmap) # update elev netcdf array from flopy perioddata for p in drng.elev.get_data(): diff --git a/.github/workflows/rtd.yml b/.github/workflows/rtd.yml index e258d00b3..0af21571e 100644 --- a/.github/workflows/rtd.yml +++ b/.github/workflows/rtd.yml @@ -135,6 +135,9 @@ jobs: ostag: mac subset: triangle + - name: Update FloPy packages + run: python -m flopy.mf6.utils.generate_classes --ref develop + - name: Run tutorial and example notebooks working-directory: autotest run: | diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index 279b212ed..843931800 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -3435,7 +3435,12 @@ def _add_netcdf_entries( else: auxnames = [] - def add_entry(tagname, iaux=None, layer=None): + def _add_entry(tagname, iaux=None, layer=None): + + # netcdf variable dictionary + a = {} + + # set dict key and netcdf variable name key = tagname name = f"{pname}" if iaux is not None: @@ -3447,7 +3452,7 @@ def add_entry(tagname, iaux=None, layer=None): key = f"{key}/layer{layer}" name = f"{name}_l{layer}" - a = {} + # add non-attrs to dictionary a["varname"] = name.lower() if (data_item.type) == DatumType.integer: a["nc_type"] = np.int32 @@ -3460,12 +3465,14 @@ def add_entry(tagname, iaux=None, layer=None): elif data_item.block_name == "period": dims += ["x", "y", "z", "time"] else: - map = {"nlay": "z", "nrow": "y", "ncol": "x"} + dimmap = {"nlay": "z", "nrow": "y", "ncol": "x"} for s in data_item.shape: - for k, v in map.items(): + for k, v in dimmap.items(): s = s.replace(k, v) dims.append(s) a["nc_shape"] = dims[::-1] + + # add variable attributes dictionary a["attrs"] = {} a["attrs"]["modflow_input"] = (f"{mname}/{pname}/{tagname}").upper() if iaux is not None: @@ -3479,22 +3486,24 @@ def add_entry(tagname, iaux=None, layer=None): a["attrs"]["_FillValue"] = FILLNA_DBL elif data_item.block_name == "period": a["attrs"]["_FillValue"] = DNODATA + + # set dictionary attrs[key] = a if data_item.layered and mesh == "LAYERED": if data_item.name == "aux" or data_item.name == "auxvar": for n, auxname in enumerate(auxnames): for l in range(nlay): - add_entry(data_item.name, n, l + 1) + _add_entry(data_item.name, n, l + 1) else: for l in range(nlay): - add_entry(data_item.name, layer=l + 1) + _add_entry(data_item.name, layer=l + 1) else: if data_item.name == "aux" or data_item.name == "auxvar": for n, auxname in enumerate(auxnames): - add_entry(data_item.name, iaux=n) + _add_entry(data_item.name, iaux=n) else: - add_entry(data_item.name) + _add_entry(data_item.name) @staticmethod def netcdf_attrs(mtype, ptype, auxiliary=None, mesh=None, nlay=1): From fcb3577803c903bbc4b10c4bc227fcfddbe663f0 Mon Sep 17 00:00:00 2001 From: mjreno Date: Fri, 1 Aug 2025 08:23:12 -0400 Subject: [PATCH 09/12] try 2 --- .github/workflows/rtd.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/rtd.yml b/.github/workflows/rtd.yml index 0af21571e..809201c51 100644 --- a/.github/workflows/rtd.yml +++ b/.github/workflows/rtd.yml @@ -79,6 +79,12 @@ jobs: echo $GITHUB_REF echo $GITHUB_EVENT_NAME + - name: Checkout MODFLOW 6 + uses: actions/checkout@v4 + with: + repository: MODFLOW-ORG/modflow6 + path: modflow6 + - name: Setup Micromamba uses: mamba-org/setup-micromamba@v1 with: @@ -136,7 +142,8 @@ jobs: subset: triangle - name: Update FloPy packages - run: python -m flopy.mf6.utils.generate_classes --ref develop + working-directory: modflow6 + run: python -m flopy.mf6.utils.generate_classes --dfnpath doc/mf6io/mf6ivar/dfn - name: Run tutorial and example notebooks working-directory: autotest From 31fe6b4e8dced84423e0c462786589636f8d7cc0 Mon Sep 17 00:00:00 2001 From: mjreno Date: Fri, 1 Aug 2025 09:07:11 -0400 Subject: [PATCH 10/12] pin conda devtools --- etc/environment.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/etc/environment.yml b/etc/environment.yml index d0debc795..d5dc32b5d 100644 --- a/etc/environment.yml +++ b/etc/environment.yml @@ -16,8 +16,7 @@ dependencies: - Jinja2>=3.0 - tomli - tomli-w - - pip: - - git+https://github.com/MODFLOW-ORG/modflow-devtools.git + - modflow-devtools==1.7.0 # lint - cffconvert @@ -31,8 +30,8 @@ dependencies: - jupyter - jupyter_client>=8.4.0 - jupytext + - modflow-devtools==1.7.0 - pip: - - git+https://github.com/MODFLOW-ORG/modflow-devtools.git - tach - pytest!=8.1.0 - pytest-benchmark From 34aabc1c28940fcc6a97e2f38bacb922d626cd60 Mon Sep 17 00:00:00 2001 From: mjreno Date: Fri, 1 Aug 2025 09:29:42 -0400 Subject: [PATCH 11/12] disable extended mf6 run --- .docs/Notebooks/netcdf01_tutorial.py | 13 +++++++------ .docs/Notebooks/netcdf02_tutorial.py | 13 +++++++------ 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/.docs/Notebooks/netcdf01_tutorial.py b/.docs/Notebooks/netcdf01_tutorial.py index 3b1cef396..2f6cdcef6 100644 --- a/.docs/Notebooks/netcdf01_tutorial.py +++ b/.docs/Notebooks/netcdf01_tutorial.py @@ -341,10 +341,11 @@ def _data_shape(shape): f.write(" cond NETCDF\n") f.write("END period 1\n") +# TODO need extended modflow 6 to run this simulation # run the netcdf sim -success, buff = sim.run_simulation(silent=False, report=True) -if success: - for line in buff: - print(line) -else: - raise ValueError("Failed to run.") +# success, buff = sim.run_simulation(silent=False, report=True) +# if success: +# for line in buff: +# print(line) +# else: +# raise ValueError("Failed to run.") diff --git a/.docs/Notebooks/netcdf02_tutorial.py b/.docs/Notebooks/netcdf02_tutorial.py index 460139e97..c4438e6e2 100644 --- a/.docs/Notebooks/netcdf02_tutorial.py +++ b/.docs/Notebooks/netcdf02_tutorial.py @@ -424,10 +424,11 @@ def _data_shape(shape): workspace / "netcdf/flow.structured.nc", format="NETCDF4", engine="netcdf4" ) +# TODO need extended modflow 6 to run this simulation # run the netcdf sim -success, buff = sim.run_simulation(silent=False, report=True) -if success: - for line in buff: - print(line) -else: - raise ValueError("Failed to run.") +# success, buff = sim.run_simulation(silent=False, report=True) +# if success: +# for line in buff: +# print(line) +# else: +# raise ValueError("Failed to run.") From 8180c28755659dd60113cb58eb2d4eec582ee500 Mon Sep 17 00:00:00 2001 From: mjreno Date: Fri, 1 Aug 2025 10:16:11 -0400 Subject: [PATCH 12/12] report error --- .docs/Notebooks/netcdf01_tutorial.py | 15 ++++----------- .docs/Notebooks/netcdf02_tutorial.py | 15 ++++----------- 2 files changed, 8 insertions(+), 22 deletions(-) diff --git a/.docs/Notebooks/netcdf01_tutorial.py b/.docs/Notebooks/netcdf01_tutorial.py index 2f6cdcef6..b70c22af3 100644 --- a/.docs/Notebooks/netcdf01_tutorial.py +++ b/.docs/Notebooks/netcdf01_tutorial.py @@ -1,5 +1,6 @@ import sys from pathlib import Path +from pprint import pformat from tempfile import TemporaryDirectory import numpy as np @@ -214,11 +215,7 @@ def _data_shape(shape): sim = create_sim(ws=workspace) sim.write_simulation() success, buff = sim.run_simulation(silent=True, report=True) -if success: - for line in buff: - print(line) -else: - raise ValueError("Failed to run.") +assert success, pformat(buff) # create directory for netcdf sim sim.set_sim_path(workspace / "netcdf") @@ -343,9 +340,5 @@ def _data_shape(shape): # TODO need extended modflow 6 to run this simulation # run the netcdf sim -# success, buff = sim.run_simulation(silent=False, report=True) -# if success: -# for line in buff: -# print(line) -# else: -# raise ValueError("Failed to run.") +# success, buff = sim.run_simulation(silent=True, report=True) +# assert success, pformat(buff) diff --git a/.docs/Notebooks/netcdf02_tutorial.py b/.docs/Notebooks/netcdf02_tutorial.py index c4438e6e2..4df8cbd29 100644 --- a/.docs/Notebooks/netcdf02_tutorial.py +++ b/.docs/Notebooks/netcdf02_tutorial.py @@ -1,5 +1,6 @@ import sys from pathlib import Path +from pprint import pformat from tempfile import TemporaryDirectory import numpy as np @@ -215,11 +216,7 @@ def _data_shape(shape): sim = create_sim(workspace) sim.write_simulation() success, buff = sim.run_simulation(silent=True, report=True) -if success: - for line in buff: - print(line) -else: - raise ValueError("Failed to run.") +assert success, pformat(buff) # create directory for netcdf sim sim.set_sim_path(workspace / "netcdf") @@ -426,9 +423,5 @@ def _data_shape(shape): # TODO need extended modflow 6 to run this simulation # run the netcdf sim -# success, buff = sim.run_simulation(silent=False, report=True) -# if success: -# for line in buff: -# print(line) -# else: -# raise ValueError("Failed to run.") +# success, buff = sim.run_simulation(silent=True, report=True) +# assert success, pformat(buff)