Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions flopy4/mf6/codec/reader/grammar/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def _get_template_env():
)
env.filters["field_type"] = filters.field_type
env.filters["record_child_type"] = filters.record_child_type
env.filters["to_rule_name"] = filters.to_rule_name
return env


Expand Down
8 changes: 8 additions & 0 deletions flopy4/mf6/codec/reader/grammar/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,3 +77,11 @@ def get_recarray_name(block_name: str) -> str:
if block_name == "period":
return "stress_period_data"
return f"{block_name}data"


def to_rule_name(name: str) -> str:
"""Convert a field name to a valid Lark rule name.
Lark rule names must not contain hyphens, so we replace them with underscores.
"""
return name.replace("-", "_")
4 changes: 2 additions & 2 deletions flopy4/mf6/codec/reader/grammar/generated/gwf-sto.lark
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ period_block: "begin"i "period"i block_index period_fields "end"i "period"i bloc
block_index: integer
options_fields: (save_flows | storagecoefficient | ss_confined_only | tvs_filerecord | export_array_ascii | export_array_netcdf | dev_original_specific_storage | dev_oldstorageformulation)*
griddata_fields: (iconvert | ss | sy)*
period_fields: (steady-state | transient)*
period_fields: (steady_state | transient)*
save_flows: "save_flows"i
storagecoefficient: "storagecoefficient"i
ss_confined_only: "ss_confined_only"i
Expand All @@ -34,5 +34,5 @@ dev_oldstorageformulation: "dev_oldstorageformulation"i
iconvert: "iconvert"i array
ss: "ss"i array
sy: "sy"i array
steady-state: "steady-state"i
steady_state: "steady-state"i
transient: "transient"i
22 changes: 11 additions & 11 deletions flopy4/mf6/codec/reader/grammar/templates/macros.jinja
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
{# Field rendering macros #}

{% macro record_field(field_name, field) -%}
{{ field_name }}: {% for child_name, child in field.children.items() -%}
{{ field_name|to_rule_name }}: {% for child_name, child in field.children.items() -%}
{%- set child_type = child|record_child_type -%}
{%- if child.type == 'keyword' -%}
"{{ child.name }}"i
{%- elif child.type == 'union' -%}
{{ child_name }}
{{ child_name|to_rule_name }}
{%- else -%}
{{ child_type }}
{%- endif -%}
Expand All @@ -15,12 +15,12 @@
{%- endmacro %}

{% macro union_field(field_name, field) -%}
{{ field_name }}: {% for child_name, child in field.children.items() -%}
{{ field_name }}_{{ child_name }}
{{ field_name|to_rule_name }}: {% for child_name, child in field.children.items() -%}
{{ field_name|to_rule_name }}_{{ child_name|to_rule_name }}
{%- if not loop.last %} | {% endif -%}
{%- endfor %}
{% for child_name, child in field.children.items() -%}
{{ field_name }}_{{ child_name }}: {% if child.type == 'keyword' -%}
{{ field_name|to_rule_name }}_{{ child_name|to_rule_name }}: {% if child.type == 'keyword' -%}
"{{ child.name }}"i
{%- else -%}
"{{ child.name }}"i {{ child.type }}
Expand All @@ -29,17 +29,17 @@
{%- endmacro %}

{% macro simple_field(field_name, field, field_type) -%}
{{ field_name }}: "{{ field.name }}"i {{ field_type }}
{{ field_name|to_rule_name }}: "{{ field.name }}"i {{ field_type }}
{%- endmacro %}

{% macro nested_union(child_name, child) -%}
{{ child_name }}: {% for opt_name, opt in child.children.items() -%}
{{ child_name }}_{{ opt_name }}
{{ child_name|to_rule_name }}: {% for opt_name, opt in child.children.items() -%}
{{ child_name|to_rule_name }}_{{ opt_name|to_rule_name }}
{%- if not loop.last %} | {% endif -%}
{%- endfor %}

{% for opt_name, opt in child.children.items() -%}
{{ child_name }}_{{ opt_name }}: {% if opt.type == 'keyword' -%}
{{ child_name|to_rule_name }}_{{ opt_name|to_rule_name }}: {% if opt.type == 'keyword' -%}
"{{ opt.name }}"i
{%- else -%}
"{{ opt.name }}"i {{ opt.type }}{% if opt.shape %}+{% endif %}
Expand All @@ -61,12 +61,12 @@
{%- macro field_list(block_name, fields, recarray_name) -%}
{%- if recarray_name -%}
{{ block_name }}_fields: (
{%- for field_name in fields %}{{ field_name }} | {% endfor -%}
{%- for field_name in fields %}{{ field_name|to_rule_name }} | {% endfor -%}
{{ recarray_name }})*
{%- else -%}
{{ block_name }}_fields: (
{%- for field_name in fields %}
{{- field_name }}{% if not loop.last %} | {% endif %}
{{- field_name|to_rule_name }}{% if not loop.last %} | {% endif %}
{%- endfor -%}
)*
{%- endif -%}
Expand Down
31 changes: 29 additions & 2 deletions flopy4/mf6/codec/reader/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,14 @@ class BasicTransformer(Transformer):
grammar. Yields blocks simply as collections of lines of tokens.
"""

def __getattr__(self, name):
"""Handle typed__ prefixed methods by delegating to the unprefixed version."""
if name.startswith("typed__"):
unprefixed = name[7:] # Remove "typed__" prefix
if hasattr(self, unprefixed):
return getattr(self, unprefixed)
raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'")

def start(self, items: list[Any]) -> dict[str, Any]:
blocks = {}
for item in items:
Expand Down Expand Up @@ -72,6 +80,14 @@ def __init__(self, visit_tokens=False, dfn: Dfn = None):
# Create a flattened fields dict that includes nested fields
self._flat_fields = self._flatten_fields(self.fields) if self.fields else None

def __getattr__(self, name):
"""Handle typed__ prefixed methods by delegating to the unprefixed version."""
if name.startswith("typed__"):
unprefixed = name[7:] # Remove "typed__" prefix
if hasattr(self, unprefixed):
return getattr(self, unprefixed)
raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'")

def _flatten_fields(self, fields: dict) -> dict:
"""Recursively flatten fields dict to include children of records and unions."""
flat = dict(fields) # Start with top-level fields
Expand Down Expand Up @@ -181,7 +197,13 @@ def filename(self, items: list[Any]) -> Path:
return Path(items[0].strip("\"'"))

def string(self, items: list[Any]) -> str:
return items[0].strip("\"'")
# String can be either a token or a tree (word)
value = items[0]
if hasattr(value, "strip"):
return value.strip("\"'")
else:
# It's a tree, extract the token value
return str(value.children[0]) if hasattr(value, "children") else str(value)

def simple_string(self, items: list[Any]) -> str:
"""Handle simple string (unquoted word or escaped string)."""
Expand Down Expand Up @@ -321,7 +343,12 @@ def __default__(self, data, children, meta):
else:
# Non-keyword alternatives return the transformed children
return children[0] if len(children) == 1 else children
if (field := self._flat_fields.get(data, None)) is not None:
# Try to find the field, checking with underscore replacement for hyphens
field = self._flat_fields.get(data, None)
if field is None and "-" in data:
# Try with hyphens instead of underscores (reverse of to_rule_name)
field = self._flat_fields.get(data.replace("_", "-"), None)
if field is not None:
if field.type == "keyword":
return data, True
elif field.type == "record" and hasattr(field, "children") and field.children:
Expand Down
153 changes: 149 additions & 4 deletions test/test_mf6_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import pytest
import xarray as xr
from lark import Lark
from modflow_devtools.dfn import Dfn, load_flat
from modflow_devtools.dfn import Dfn, MapV1To2, load_flat
from modflow_devtools.models import get_models
from packaging.version import Version

Expand Down Expand Up @@ -417,9 +417,15 @@ def test_transform_gwf_ic_file(model_workspace, dfn_path):
assert "griddata" in result # IC has griddata block
assert "strt" in result["griddata"] # Starting heads

# Check strt field exists (array transformation not fully implemented yet)
strt_data = result["griddata"]["strt"]
assert strt_data is not None
# Check strt array structure
strt = result["griddata"]["strt"]
assert "control" in strt
assert "data" in strt
assert strt["control"]["type"] in ["constant", "internal", "external"]

# If internal or constant, should have data
if strt["control"]["type"] in ["constant", "internal"]:
assert strt["data"] is not None


@pytest.mark.parametrize("model_workspace", ["mf6/example/ex-gwf-bcf2ss-p01a"], indirect=True)
Expand Down Expand Up @@ -568,3 +574,142 @@ def test_transform_gwf_oc_file(model_workspace, dfn_path):
for rec in save_records:
assert "ocsetting" in rec
assert rec["ocsetting"] == "all"


@pytest.mark.parametrize("model_workspace", ["mf6/example/ex-gwf-csub-p01"], indirect=True)
def test_transform_gwf_dis_file(model_workspace, dfn_path):
"""Test transforming a parsed GWF DIS file into structured data."""

# Load the DFN for DIS and convert to V2
v1_dfns = load_flat(dfn_path)
mapper = MapV1To2()
dis_dfn = mapper.map(v1_dfns["gwf-dis"])

# Find the DIS file
dis_files = list(model_workspace.rglob("*.dis"))
assert len(dis_files) > 0

dis_file = dis_files[0]
parser = get_typed_parser("gwf-dis")
transformer = TypedTransformer(dfn=dis_dfn)

# Read, parse, and transform
with open(dis_file, "r") as f:
content = f.read()

tree = parser.parse(content)
result = transformer.transform(tree)

# Check structure
assert isinstance(result, dict)

# Check dimensions block
assert "dimensions" in result
assert "nlay" in result["dimensions"]
assert "nrow" in result["dimensions"]
assert "ncol" in result["dimensions"]
assert result["dimensions"]["nlay"] > 0
assert result["dimensions"]["nrow"] > 0
assert result["dimensions"]["ncol"] > 0

# Check griddata block
assert "griddata" in result
griddata = result["griddata"]
assert "delr" in griddata
assert "delc" in griddata
assert "top" in griddata
assert "botm" in griddata

# Each array should have control and data
assert "control" in griddata["delr"]
assert "data" in griddata["delr"]


@pytest.mark.parametrize("model_workspace", ["mf6/example/ex-gwf-csub-p01"], indirect=True)
def test_transform_gwf_npf_file(model_workspace, dfn_path):
"""Test transforming a parsed GWF NPF file into structured data."""

# Load the DFN for NPF and convert to V2
v1_dfns = load_flat(dfn_path)
mapper = MapV1To2()
npf_dfn = mapper.map(v1_dfns["gwf-npf"])

# Find the NPF file
npf_files = list(model_workspace.rglob("*.npf"))
assert len(npf_files) > 0

npf_file = npf_files[0]
parser = get_typed_parser("gwf-npf")
transformer = TypedTransformer(dfn=npf_dfn)

# Read, parse, and transform
with open(npf_file, "r") as f:
content = f.read()

tree = parser.parse(content)
result = transformer.transform(tree)

# Check structure
assert isinstance(result, dict)

# Check options block
assert "options" in result
options = result["options"]

# Should have save_specific_discharge option
assert "save_specific_discharge" in options
assert options["save_specific_discharge"] is True

# Check griddata block
assert "griddata" in result
griddata = result["griddata"]

# NPF should have at least icelltype and k
assert "icelltype" in griddata
assert "k" in griddata

# Each array should have control and data
assert "control" in griddata["icelltype"]
assert "data" in griddata["icelltype"]
assert "control" in griddata["k"]
assert "data" in griddata["k"]


@pytest.mark.parametrize("model_workspace", ["mf6/example/ex-gwf-csub-p01"], indirect=True)
def test_transform_gwf_sto_file(model_workspace, dfn_path):
"""Test transforming a parsed GWF STO file into structured data."""

# Load the DFN for STO and convert to V2
v1_dfns = load_flat(dfn_path)
mapper = MapV1To2()
sto_dfn = mapper.map(v1_dfns["gwf-sto"])

# Find the STO file
sto_files = list(model_workspace.rglob("*.sto"))

# Skip if no STO files (not all models have storage)
if len(sto_files) == 0:
pytest.skip("No STO files found in this model")

sto_file = sto_files[0]
parser = get_typed_parser("gwf-sto")
transformer = TypedTransformer(dfn=sto_dfn)

# Read, parse, and transform
with open(sto_file, "r") as f:
content = f.read()

tree = parser.parse(content)
result = transformer.transform(tree)

# Check structure
assert isinstance(result, dict)

# Check griddata block
assert "griddata" in result
griddata = result["griddata"]

# STO should have iconvert
assert "iconvert" in griddata
assert "control" in griddata["iconvert"]
assert "data" in griddata["iconvert"]
Loading