Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion docs/source/usersguide/processing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,15 @@ VTK Mesh File Generation
------------------------

VTK files of OpenMC meshes can be created using the
:meth:`openmc.Mesh.write_data_to_vtk` method. Data can be applied to the
:meth:`openmc.Mesh.write_data_to_vtk` method. This method supports several VTK
formats depending on the mesh type. Structured meshes
(:class:`~openmc.RegularMesh`, :class:`~openmc.RectilinearMesh`,
:class:`~openmc.CylindricalMesh`, and :class:`~openmc.SphericalMesh`) can be
exported to legacy VTK format (``.vtk``). The :class:`~openmc.UnstructuredMesh`
class supports VTK unstructured grid formats (``.vtu``) as well as an HDF5-based
format (``.vtkhdf``) that does not require the ``vtk`` module to write.

Data can be applied to the
elements of the resulting mesh from mesh filter objects. This data can be
provided either as a flat array or, in the case of structured meshes
(:class:`~openmc.RegularMesh`, :class:`~openmc.RectilinearMesh`,
Expand Down
109 changes: 52 additions & 57 deletions openmc/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -2444,7 +2444,8 @@ class UnstructuredMesh(MeshBase):
_UNSUPPORTED_ELEM = -1
_LINEAR_TET = 0
_LINEAR_HEX = 1
_VTK_TETRA = 10
_VTK_TET = 10
_VTK_HEX = 12

def __init__(self, filename: PathLike, library: str, mesh_id: int | None = None,
name: str = '', length_multiplier: float = 1.0,
Expand Down Expand Up @@ -2751,7 +2752,9 @@ def _write_data_to_vtk_ascii_format(
n_skipped += 1
continue
else:
raise RuntimeError(f"Invalid element type {elem_type} found")
raise RuntimeError(
f"Invalid element type {elem_type} found in mesh {self.id}"
)

for i, c in enumerate(conn):
if c == -1:
Expand Down Expand Up @@ -2808,35 +2811,42 @@ def _write_data_to_vtk_hdf5_format(
datasets: dict | None = None,
volume_normalization: bool = True,
):
def append_dataset(dset, array):
"""Convenience function to append data to an HDF5 dataset"""
origLen = dset.shape[0]
dset.resize(origLen + array.shape[0], axis=0)
dset[origLen:] = array

if self.library != "moab":
raise NotImplementedError("VTKHDF output is only supported for MOAB meshes")

# the self.connectivity contains arrays of length 8 to support hex
# elements as well, in the case of tetrahedra mesh elements, the
# last 4 values are -1 and are removed
trimmed_connectivity = []
for cell in self.connectivity:
# Find the index of the first -1 value, if any
first_negative_index = np.where(cell == -1)[0]
if first_negative_index.size > 0:
# Slice the array up to the first -1 value
trimmed_connectivity.append(cell[: first_negative_index[0]])
# This writer supports linear tetrahedra and linear hexahedra elements
conn_list = [] # flattened connectivity ids
cell_sizes = [] # number of points per cell
vtk_types = [] # VTK cell types per cell (uint8)
n_skipped = 0

for conn, etype in zip(self.connectivity, self.element_types):
if etype == self._LINEAR_TET:
ids = conn[:4]
vtk_types.append(self._VTK_TET)
elif etype == self._LINEAR_HEX:
ids = conn[:8]
vtk_types.append(self._VTK_HEX)
elif etype == self._UNSUPPORTED_ELEM:
n_skipped += 1
continue
else:
# No -1 values, append the whole cell
trimmed_connectivity.append(cell)
trimmed_connectivity = np.array(trimmed_connectivity, dtype="int32").flatten()
raise RuntimeError(
f"Invalid element type {etype} found in mesh {self.id}"
)
conn_list.extend(ids.tolist())
cell_sizes.append(len(ids))

# MOAB meshes supports tet elements only so we know it has 4 points per cell
points_per_cell = 4
if n_skipped > 0:
warnings.warn(
f"{n_skipped} elements were not written because "
"they are not of type linear tet/hex"
)

# offsets are the indices of the first point of each cell in the array of points
offsets = np.arange(0, self.n_elements * points_per_cell + 1, points_per_cell)
connectivity = np.asarray(conn_list, dtype=np.int64)

# Offsets must be length (numCells + 1) with a leading 0 and
# cumulative end-indices thereafter, per VTK's layout
cell_sizes_arr = np.asarray(cell_sizes, dtype=np.int64)
offsets = np.zeros(cell_sizes_arr.size + 1, dtype=np.int64)
np.cumsum(cell_sizes_arr, out=offsets[1:])

for name, data in datasets.items():
if data.shape != self.dimension:
Expand All @@ -2858,42 +2868,27 @@ def append_dataset(dset, array):
dtype=h5py.string_dtype("ascii", len(ascii_type)),
)

# create hdf5 file structure
root.create_dataset("NumberOfPoints", (0,), maxshape=(None,), dtype="i8")
root.create_dataset("Types", (0,), maxshape=(None,), dtype="uint8")
root.create_dataset("Points", (0, 3), maxshape=(None, 3), dtype="f")
root.create_dataset(
"NumberOfConnectivityIds", (0,), maxshape=(None,), dtype="i8"
)
root.create_dataset("NumberOfCells", (0,), maxshape=(None,), dtype="i8")
root.create_dataset("Offsets", (0,), maxshape=(None,), dtype="i8")
root.create_dataset("Connectivity", (0,), maxshape=(None,), dtype="i8")

append_dataset(root["NumberOfPoints"], np.array([len(self.vertices)]))
append_dataset(root["Points"], self.vertices)
append_dataset(
root["NumberOfConnectivityIds"],
np.array([len(trimmed_connectivity)]),
)
append_dataset(root["Connectivity"], trimmed_connectivity)
append_dataset(root["NumberOfCells"], np.array([self.n_elements]))
append_dataset(root["Offsets"], offsets)
# Create HDF5 file structure compliant with VTKHDF UnstructuredGrid
n_points = int(len(self.vertices))
n_cells = int(len(cell_sizes))
n_conn_ids = int(len(connectivity))

append_dataset(
root["Types"], np.full(self.n_elements, self._VTK_TETRA, dtype="uint8")
)
root.create_dataset("NumberOfPoints", data=(n_points,), dtype="i8")
root.create_dataset("NumberOfCells", data=(n_cells,), dtype="i8")
root.create_dataset("NumberOfConnectivityIds", data=(n_conn_ids,), dtype="i8")
root.create_dataset("Points", data=self.vertices.astype(np.float64, copy=False), dtype="f8")
root.create_dataset("Types", data=np.asarray(vtk_types, dtype=np.uint8), dtype="uint8")
root.create_dataset("Offsets", data=offsets.astype("i8"), dtype="i8")
root.create_dataset("Connectivity", data=connectivity.astype("i8"), dtype="i8")

cell_data_group = root.create_group("CellData")

for name, data in datasets.items():

cell_data_group.create_dataset(
name, (0,), maxshape=(None,), dtype="float64", chunks=True
)

if volume_normalization:
data /= self.volumes
append_dataset(cell_data_group[name], data)
cell_data_group.create_dataset(
name, data=data, dtype="float64", chunks=True
)

@classmethod
def from_hdf5(cls, group: h5py.Group, mesh_id: int, name: str):
Expand Down
44 changes: 41 additions & 3 deletions tests/unit_tests/test_mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,22 +488,38 @@ def test_umesh(run_in_tmpdir, simple_umesh, export_type):
simple_umesh.write_data_to_vtk(datasets={'mean': ref_data[:-2]}, filename=filename)


vtkhdf_tests = [
(
Path("test_mesh_dagmc_tets.vtk"),
"moab"
),
(
Path("test_mesh_hexes.exo"),
"libmesh"
)
]
@pytest.mark.skipif(not openmc.lib._dagmc_enabled(), reason="DAGMC not enabled.")
def test_write_vtkhdf(request, run_in_tmpdir):
@pytest.mark.parametrize('mesh_file, mesh_library', vtkhdf_tests)
def test_write_vtkhdf(mesh_file, mesh_library, request, run_in_tmpdir):
"""Performs a minimal UnstructuredMesh simulation, reads in the resulting
statepoint file and writes the mesh data to vtk and vtkhdf files. It is
necessary to read in the unstructured mesh from a statepoint file to ensure
it has all the required attributes
"""
if mesh_library == 'moab' and not openmc.lib._dagmc_enabled():
pytest.skip("DAGMC not enabled.")
if mesh_library == 'libmesh' and not openmc.lib._libmesh_enabled():
pytest.skip("LibMesh not enabled.")

model = openmc.Model()

surf1 = openmc.Sphere(r=1000.0, boundary_type="vacuum")
cell1 = openmc.Cell(region=-surf1)
model.geometry = openmc.Geometry([cell1])

umesh = openmc.UnstructuredMesh(
request.path.parent / "test_mesh_dagmc_tets.vtk",
"moab",
request.path.parent / mesh_file,
mesh_library,
mesh_id = 1
)
mesh_filter = openmc.MeshFilter(umesh)
Expand All @@ -512,6 +528,8 @@ def test_write_vtkhdf(request, run_in_tmpdir):
mesh_tally = openmc.Tally(name="test_tally")
mesh_tally.filters = [mesh_filter]
mesh_tally.scores = ["flux"]
if mesh_library == "libmesh":
mesh_tally.estimator = "collision"

model.tallies = [mesh_tally]

Expand Down Expand Up @@ -554,6 +572,26 @@ def test_write_vtkhdf(request, run_in_tmpdir):
with h5py.File("test_mesh.vtkhdf", "r"):
...

import vtk
reader = vtk.vtkHDFReader()
reader.SetFileName("test_mesh.vtkhdf")
reader.Update()

# Get mean from file and make sure it matches original data
num_elements = reader.GetOutput().GetNumberOfCells()
assert num_elements == umesh_from_sp.n_elements

num_vertices = reader.GetOutput().GetNumberOfPoints()
assert num_vertices == umesh_from_sp.vertices.shape[0]

arr = reader.GetOutput().GetCellData().GetArray("mean")
mean = np.array([arr.GetTuple1(i) for i in range(my_tally.mean.size)])
np.testing.assert_almost_equal(mean, my_tally.mean.flatten()/umesh_from_sp.volumes)

arr = reader.GetOutput().GetCellData().GetArray("std_dev")
std_dev = np.array([arr.GetTuple1(i) for i in range(my_tally.std_dev.size)])
np.testing.assert_almost_equal(std_dev, my_tally.std_dev.flatten()/umesh_from_sp.volumes)


def test_mesh_get_homogenized_materials():
"""Test the get_homogenized_materials method"""
Expand Down
Loading