From b295f4a0952b5cbdcf700a73cd992e9be98f27c6 Mon Sep 17 00:00:00 2001 From: Jon Shimwell Date: Wed, 8 Jan 2025 17:30:18 +0100 Subject: [PATCH 1/7] added option to write vtkhdf files for umesh --- openmc/mesh.py | 225 ++++++++++++---- .../test_mesh_dagmc_tets.vtk | 253 ++++++++++++++++++ tests/unit_tests/test_mesh.py | 68 +++++ 3 files changed, 489 insertions(+), 57 deletions(-) create mode 100644 tests/regression_tests/unstructured_mesh/test_mesh_dagmc_tets.vtk diff --git a/openmc/mesh.py b/openmc/mesh.py index 0b6f6b84e4b..019c44c8b46 100644 --- a/openmc/mesh.py +++ b/openmc/mesh.py @@ -5,6 +5,7 @@ from functools import wraps from math import pi, sqrt, atan2 from numbers import Integral, Real +from pathlib import Path import h5py import lxml.etree as ET @@ -2368,7 +2369,9 @@ def write_data_to_vtk( Parameters ---------- filename : str or pathlib.Path - Name of the VTK file to write + Name of the VTK file to write. If the filename ends in '.hdf' then a VTKHDF + format file will be written and can be opened with Paraview versions 5.13.0 + and above, if the filename ends in '.vtk' then a .vtk file will be written. datasets : dict Dictionary whose keys are the data labels and values are numpy appropriately sized arrays @@ -2377,78 +2380,186 @@ def write_data_to_vtk( Whether or not to normalize the data by the volume of the mesh elements """ - import vtk - from vtk.util import numpy_support as nps - if self.connectivity is None or self.vertices is None: - raise RuntimeError('This mesh has not been ' - 'loaded from a statepoint file.') + if Path(filename).suffix == ".hdf": + + def append_dataset(dset, array): + """Convenience function to append data to an HDF5 dataset""" + origLen = dset.shape[0] + dset.resize(origLen + array.shape[0], axis=0) + dset[origLen:] = array + + if self.library != "moab": + raise NotImplemented("VTKHDF output is only supported for MOAB meshes") + + # the self.connectivity contains an arrays of length 8, in the case of + # DAGMC tetrahedra mesh elements, the last 4 values are -1 and can be removed + trimmed_connectivity = [] + for cell in self.connectivity: + # Find the index of the first -1 value, if any + first_negative_index = np.where(cell == -1)[0] + if first_negative_index.size > 0: + # Slice the array up to the first -1 value + trimmed_connectivity.append(cell[: first_negative_index[0]]) + else: + # No -1 values, append the whole cell + trimmed_connectivity.append(cell) + trimmed_connectivity = np.array( + trimmed_connectivity, dtype="int32" + ).flatten() + + # DAGMC supports tet meshes only so we know it has 4 points per cell + points_per_cell = 4 + + # offsets are the indices of the first point of each cell in the array of points + offsets = np.arange( + 0, self.n_elements * points_per_cell + 1, points_per_cell + ) - if filename is None: - filename = f'mesh_{self.id}.vtk' + with h5py.File(filename, "w") as f: + + root = f.create_group("VTKHDF") + root.attrs["Version"] = (2, 1) + ascii_type = "UnstructuredGrid".encode("ascii") + root.attrs.create( + "Type", + ascii_type, + dtype=h5py.string_dtype("ascii", len(ascii_type)), + ) + + # create hdf5 file structure + root.create_dataset( + "NumberOfPoints", (0,), maxshape=(None,), dtype="i8" + ) + root.create_dataset("Types", (0,), maxshape=(None,), dtype="uint8") + root.create_dataset("Points", (0, 3), maxshape=(None, 3), dtype="f") + root.create_dataset( + "NumberOfConnectivityIds", (0,), maxshape=(None,), dtype="i8" + ) + root.create_dataset("NumberOfCells", (0,), maxshape=(None,), dtype="i8") + root.create_dataset("Offsets", (0,), maxshape=(None,), dtype="i8") + root.create_dataset("Connectivity", (0,), maxshape=(None,), dtype="i8") + + append_dataset(root["NumberOfPoints"], np.array([len(self.vertices)])) + append_dataset(root["Points"], self.vertices) + append_dataset( + root["NumberOfConnectivityIds"], + np.array([len(trimmed_connectivity)]), + ) + append_dataset(root["Connectivity"], trimmed_connectivity) + append_dataset(root["NumberOfCells"], np.array([self.n_elements])) + append_dataset(root["Offsets"], offsets) + + # VTK_TETRA type is known as DAGMC only supports tet meshes + append_dataset( + root["Types"], np.full(self.n_elements, 10, dtype="uint8") + ) + + cell_data_group = root.create_group("CellData") - writer = vtk.vtkUnstructuredGridWriter() + for name, data in datasets.items(): - writer.SetFileName(str(filename)) + if data.shape != self.dimension: + raise ValueError( + f'Cannot apply dataset "{name}" with ' + f"shape {data.shape} to mesh {self.id} " + f"with dimensions {self.dimension}" + ) - grid = vtk.vtkUnstructuredGrid() + cell_data_group.create_dataset( + name, (0,), maxshape=(None,), dtype="float64", chunks=True + ) - vtk_pnts = vtk.vtkPoints() - vtk_pnts.SetData(nps.numpy_to_vtk(self.vertices)) - grid.SetPoints(vtk_pnts) + if volume_normalization: + data /= self.volumes + append_dataset(cell_data_group[name], data) - n_skipped = 0 - for elem_type, conn in zip(self.element_types, self.connectivity): - if elem_type == self._LINEAR_TET: - elem = vtk.vtkTetra() - elif elem_type == self._LINEAR_HEX: - elem = vtk.vtkHexahedron() - elif elem_type == self._UNSUPPORTED_ELEM: - n_skipped += 1 - else: - raise RuntimeError(f'Invalid element type {elem_type} found') - for i, c in enumerate(conn): - if c == -1: - break - elem.GetPointIds().SetId(i, c) + elif Path(filename).suffix == ".vtk": - grid.InsertNextCell(elem.GetCellType(), elem.GetPointIds()) + import vtk + from vtk.util import numpy_support as nps - if n_skipped > 0: - warnings.warn(f'{n_skipped} elements were not written because ' - 'they are not of type linear tet/hex') + if self.connectivity is None or self.vertices is None: + raise RuntimeError( + "This mesh has not been " "loaded from a statepoint file." + ) - # check that datasets are the correct size - datasets_out = [] - if datasets is not None: - for name, data in datasets.items(): - if data.shape != self.dimension: - raise ValueError(f'Cannot apply dataset "{name}" with ' - f'shape {data.shape} to mesh {self.id} ' - f'with dimensions {self.dimension}') + if filename is None: + filename = f"mesh_{self.id}.vtk" + + writer = vtk.vtkUnstructuredGridWriter() - if volume_normalization: + writer.SetFileName(str(filename)) + + grid = vtk.vtkUnstructuredGrid() + + vtk_pnts = vtk.vtkPoints() + vtk_pnts.SetData(nps.numpy_to_vtk(self.vertices)) + grid.SetPoints(vtk_pnts) + + n_skipped = 0 + for elem_type, conn in zip(self.element_types, self.connectivity): + if elem_type == self._LINEAR_TET: + elem = vtk.vtkTetra() + elif elem_type == self._LINEAR_HEX: + elem = vtk.vtkHexahedron() + elif elem_type == self._UNSUPPORTED_ELEM: + n_skipped += 1 + else: + raise RuntimeError(f"Invalid element type {elem_type} found") + for i, c in enumerate(conn): + if c == -1: + break + elem.GetPointIds().SetId(i, c) + + grid.InsertNextCell(elem.GetCellType(), elem.GetPointIds()) + + if n_skipped > 0: + warnings.warn( + f"{n_skipped} elements were not written because " + "they are not of type linear tet/hex" + ) + + # check that datasets are the correct size + datasets_out = [] + if datasets is not None: for name, data in datasets.items(): - if np.issubdtype(data.dtype, np.integer): - warnings.warn(f'Integer data set "{name}" will ' - 'not be volume-normalized.') - continue - data /= self.volumes + if data.shape != self.dimension: + raise ValueError( + f'Cannot apply dataset "{name}" with ' + f"shape {data.shape} to mesh {self.id} " + f"with dimensions {self.dimension}" + ) - # add data to the mesh - for name, data in datasets.items(): - datasets_out.append(data) - arr = vtk.vtkDoubleArray() - arr.SetName(name) - arr.SetNumberOfTuples(data.size) + if volume_normalization: + for name, data in datasets.items(): + if np.issubdtype(data.dtype, np.integer): + warnings.warn( + f'Integer data set "{name}" will ' + "not be volume-normalized." + ) + continue + data /= self.volumes + + # add data to the mesh + for name, data in datasets.items(): + datasets_out.append(data) + arr = vtk.vtkDoubleArray() + arr.SetName(name) + arr.SetNumberOfTuples(data.size) - for i in range(data.size): - arr.SetTuple1(i, data.flat[i]) - grid.GetCellData().AddArray(arr) + for i in range(data.size): + arr.SetTuple1(i, data.flat[i]) + grid.GetCellData().AddArray(arr) - writer.SetInputData(grid) + writer.SetInputData(grid) - writer.Write() + writer.Write() + + else: + raise ValueError( + f"Unsupported file extension for '{filename}'. Extension must be '.hdf' or '.vtk'." + ) @classmethod def from_hdf5(cls, group: h5py.Group, mesh_id: int, name: str): diff --git a/tests/regression_tests/unstructured_mesh/test_mesh_dagmc_tets.vtk b/tests/regression_tests/unstructured_mesh/test_mesh_dagmc_tets.vtk new file mode 100644 index 00000000000..daad9e15a75 --- /dev/null +++ b/tests/regression_tests/unstructured_mesh/test_mesh_dagmc_tets.vtk @@ -0,0 +1,253 @@ +# vtk DataFile Version 3.0 +MOAB 5.5.1 +ASCII +DATASET UNSTRUCTURED_GRID +POINTS 84 double +-0.5 -0.5 0.5 +-0.5 -0.5 -0.5 +-0.5 0.5 0.5 +-0.5 0.5 -0.5 +0.5 -0.5 0.5 +0.5 -0.5 -0.5 +0.5 0.5 0.5 +0.5 0.5 -0.5 +-0.5 0 0 +0.5 0 0 +0 -0.5 0 +0 0.5 0 +0 0 -0.5 +0 0 0.5 +-0.5 -0.5 0.5 +-0.5 -0.5 -0.5 +-0.5 0.5 0.5 +-0.5 0.5 -0.5 +0.5 -0.5 0.5 +0.5 -0.5 -0.5 +0.5 0.5 0.5 +0.5 0.5 -0.5 +-0.5 0 0 +0.5 0 0 +0 -0.5 0 +0 0.5 0 +0 0 -0.5 +0 0 0.5 +-0.5 -0.5 0.5 +-0.5 -0.5 -0.5 +-0.5 0.5 0.5 +-0.5 0.5 -0.5 +0.5 -0.5 0.5 +0.5 -0.5 -0.5 +0.5 0.5 0.5 +0.5 0.5 -0.5 +-0.5 0 0 +0.5 0 0 +0 -0.5 0 +0 0.5 0 +0 0 -0.5 +0 0 0.5 +-0.5 -0.5 0.5 +-0.5 -0.5 -0.5 +-0.5 0.5 0.5 +-0.5 0.5 -0.5 +0.5 -0.5 0.5 +0.5 -0.5 -0.5 +0.5 0.5 0.5 +0.5 0.5 -0.5 +-0.5 0 0 +0.5 0 0 +0 -0.5 0 +0 0.5 0 +0 0 -0.5 +0 0 0.5 +-0.5 -0.5 0.5 +-0.5 -0.5 -0.5 +-0.5 0.5 0.5 +-0.5 0.5 -0.5 +0.5 -0.5 0.5 +0.5 -0.5 -0.5 +0.5 0.5 0.5 +0.5 0.5 -0.5 +-0.5 0 0 +0.5 0 0 +0 -0.5 0 +0 0.5 0 +0 0 -0.5 +0 0 0.5 +-0.5 -0.5 0.5 +-0.5 -0.5 -0.5 +-0.5 0.5 0.5 +-0.5 0.5 -0.5 +0.5 -0.5 0.5 +0.5 -0.5 -0.5 +0.5 0.5 0.5 +0.5 0.5 -0.5 +-0.5 0 0 +0.5 0 0 +0 -0.5 0 +0 0.5 0 +0 0 -0.5 +0 0 0.5 +CELLS 24 96 +3 1 0 8 +3 2 3 8 +3 8 3 1 +3 8 0 2 +3 19 23 18 +3 20 23 21 +3 23 19 21 +3 23 20 18 +3 28 29 38 +3 33 32 38 +3 38 32 28 +3 38 29 33 +3 44 53 45 +3 49 53 48 +3 53 44 48 +3 53 49 45 +3 68 57 59 +3 68 63 61 +3 59 63 68 +3 61 57 68 +3 83 72 70 +3 83 74 76 +3 72 83 76 +3 74 83 70 +CELL_TYPES 24 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +5 +POINT_DATA 84 +SCALARS GLOBAL_ID int 1 +LOOKUP_TABLE default +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +CELL_DATA 24 +SCALARS GLOBAL_ID int 1 +LOOKUP_TABLE default +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 +-1 diff --git a/tests/unit_tests/test_mesh.py b/tests/unit_tests/test_mesh.py index 27322165f64..1cbb1c4a8ea 100644 --- a/tests/unit_tests/test_mesh.py +++ b/tests/unit_tests/test_mesh.py @@ -3,6 +3,7 @@ import numpy as np import pytest import openmc +from pathlib import Path @pytest.mark.parametrize("val_left,val_right", [(0, 0), (-1., -1.), (2.0, 2)]) @@ -449,3 +450,70 @@ def test_mesh_get_homogenized_materials(): m5, = mesh_void.get_homogenized_materials( model, n_samples=1000, include_void=False) assert m5.get_mass_density('H1') == pytest.approx(1.0) + + +def test_umesh(run_in_tmpdir, request): + """Performs a minimal UnstructuredMesh simulation, reads in the resulting statepoint + file and writes the mesh data to vtk and hdf files. It is necessary to read in the + unstructured mesh from a statepoint file to ensure it has all the required attributes + """ + + surf1 = openmc.Sphere(R=1000.0, boundary_type="vacuum") + cell1 = openmc.Cell(region=-surf1) + my_geometry = openmc.Geometry([cell1]) + + umesh = openmc.UnstructuredMesh( + request.path.parent.parent + / "regression_tests/unstructured_mesh/test_mesh_dagmc_tets.vtk", + "moab", + ) + umesh.id = ( + 1 # setting ID to make it easier to get the mesh from the statepoint later + ) + mesh_filter = openmc.MeshFilter(umesh) + + # Create flux mesh tally to score alpha production + mesh_tally = openmc.Tally(name="test_tally") + mesh_tally.filters = [mesh_filter] + mesh_tally.scores = ["flux"] + + tallies = openmc.Tallies([mesh_tally]) + + my_source = openmc.IndependentSource() + my_source.space = openmc.stats.Point((0.4, 0, 0.4)) + + settings = openmc.Settings() + settings.run_mode = "fixed source" + settings.batches = 1 + settings.particles = 100 + settings.source = my_source + + my_model = openmc.Model( + materials=None, geometry=my_geometry, settings=settings, tallies=tallies + ) + + statepoint_file = my_model.run() + + statepoint = openmc.StatePoint(statepoint_file) + + my_tally = statepoint.get_tally(name="test_tally") + + umesh_from_sp = statepoint.meshes[1] + + umesh_from_sp.write_data_to_vtk( + datasets={"mean": my_tally.mean.flatten()}, + filename="test_mesh.vtk", + ) + umesh_from_sp.write_data_to_vtk( + datasets={"mean": my_tally.mean.flatten()}, + filename="test_mesh.hdf", + ) + with pytest.raises(ValueError): + # Supported file extensions are vtk or hdf, not hdf5, so this should raise an error + umesh_from_sp.write_data_to_vtk( + datasets={"mean": my_tally.mean.flatten()}, + filename="test_mesh.hdf5", + ) + + assert Path("test_mesh.vtk").exists() + assert Path("test_mesh.hdf").exists() From f1ee07da387a9b21768e11e65eb4b765427f42f3 Mon Sep 17 00:00:00 2001 From: shimwell Date: Wed, 8 Jan 2025 17:41:49 +0100 Subject: [PATCH 2/7] testing data shape --- tests/unit_tests/test_mesh.py | 41 ++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 18 deletions(-) diff --git a/tests/unit_tests/test_mesh.py b/tests/unit_tests/test_mesh.py index 1cbb1c4a8ea..9c652fcb97a 100644 --- a/tests/unit_tests/test_mesh.py +++ b/tests/unit_tests/test_mesh.py @@ -453,9 +453,10 @@ def test_mesh_get_homogenized_materials(): def test_umesh(run_in_tmpdir, request): - """Performs a minimal UnstructuredMesh simulation, reads in the resulting statepoint - file and writes the mesh data to vtk and hdf files. It is necessary to read in the - unstructured mesh from a statepoint file to ensure it has all the required attributes + """Performs a minimal UnstructuredMesh simulation, reads in the resulting + statepoint file and writes the mesh data to vtk and hdf files. It is + necessary to read in the unstructured mesh from a statepoint file to ensure + it has all the required attributes """ surf1 = openmc.Sphere(R=1000.0, boundary_type="vacuum") @@ -467,9 +468,8 @@ def test_umesh(run_in_tmpdir, request): / "regression_tests/unstructured_mesh/test_mesh_dagmc_tets.vtk", "moab", ) - umesh.id = ( - 1 # setting ID to make it easier to get the mesh from the statepoint later - ) + # setting ID to make it easier to get the mesh from the statepoint later + umesh.id = 1 mesh_filter = openmc.MeshFilter(umesh) # Create flux mesh tally to score alpha production @@ -480,12 +480,11 @@ def test_umesh(run_in_tmpdir, request): tallies = openmc.Tallies([mesh_tally]) my_source = openmc.IndependentSource() - my_source.space = openmc.stats.Point((0.4, 0, 0.4)) settings = openmc.Settings() settings.run_mode = "fixed source" - settings.batches = 1 - settings.particles = 100 + settings.batches = 2 + settings.particles = 10 settings.source = my_source my_model = openmc.Model( @@ -500,20 +499,26 @@ def test_umesh(run_in_tmpdir, request): umesh_from_sp = statepoint.meshes[1] - umesh_from_sp.write_data_to_vtk( - datasets={"mean": my_tally.mean.flatten()}, - filename="test_mesh.vtk", - ) - umesh_from_sp.write_data_to_vtk( - datasets={"mean": my_tally.mean.flatten()}, - filename="test_mesh.hdf", - ) + datasets={ + "mean": my_tally.mean.flatten(), + "std_dev": my_tally.std_dev.flatten() + } + + umesh_from_sp.write_data_to_vtk(datasets=datasets, filename="test_mesh.vtk") + umesh_from_sp.write_data_to_vtk(datasets=datasets, filename="test_mesh.hdf") + with pytest.raises(ValueError): # Supported file extensions are vtk or hdf, not hdf5, so this should raise an error umesh_from_sp.write_data_to_vtk( - datasets={"mean": my_tally.mean.flatten()}, + datasets=datasets, filename="test_mesh.hdf5", ) + with pytest.raises(ValueError): + # Supported file extensions are vtk or hdf, not hdf5, so this should raise an error + umesh_from_sp.write_data_to_vtk( + datasets={'incorrectly_shaped_data': np.array(([1,2,3]))}, + filename="test_mesh.hdf", + ) assert Path("test_mesh.vtk").exists() assert Path("test_mesh.hdf").exists() From 52e6b8a9c07fe28b4f098e711b8d0382a4e0c61c Mon Sep 17 00:00:00 2001 From: shimwell Date: Wed, 8 Jan 2025 17:46:52 +0100 Subject: [PATCH 3/7] improved tests, added match string --- tests/unit_tests/test_mesh.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit_tests/test_mesh.py b/tests/unit_tests/test_mesh.py index 9c652fcb97a..6876ad89563 100644 --- a/tests/unit_tests/test_mesh.py +++ b/tests/unit_tests/test_mesh.py @@ -507,14 +507,14 @@ def test_umesh(run_in_tmpdir, request): umesh_from_sp.write_data_to_vtk(datasets=datasets, filename="test_mesh.vtk") umesh_from_sp.write_data_to_vtk(datasets=datasets, filename="test_mesh.hdf") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unsupported file extension"): # Supported file extensions are vtk or hdf, not hdf5, so this should raise an error umesh_from_sp.write_data_to_vtk( datasets=datasets, filename="test_mesh.hdf5", ) - with pytest.raises(ValueError): - # Supported file extensions are vtk or hdf, not hdf5, so this should raise an error + with pytest.raises(ValueError, match="Cannot apply dataset"): + # The shape of the data should match the shape of the mesh, so this should raise an error umesh_from_sp.write_data_to_vtk( datasets={'incorrectly_shaped_data': np.array(([1,2,3]))}, filename="test_mesh.hdf", From b2c775c55ab13737c94bf0aa70107d00a7edffba Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Thu, 9 Jan 2025 09:32:10 +0100 Subject: [PATCH 4/7] skip single test if no dagmc --- tests/unit_tests/test_mesh.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/unit_tests/test_mesh.py b/tests/unit_tests/test_mesh.py index 6876ad89563..c87b193b9b2 100644 --- a/tests/unit_tests/test_mesh.py +++ b/tests/unit_tests/test_mesh.py @@ -451,7 +451,11 @@ def test_mesh_get_homogenized_materials(): model, n_samples=1000, include_void=False) assert m5.get_mass_density('H1') == pytest.approx(1.0) +skip_if_no_dagmc = pytest.mark.skipif( + not openmc.lib._dagmc_enabled(), + reason="DAGMC CAD geometry is not enabled.") +@skip_if_no_dagmc def test_umesh(run_in_tmpdir, request): """Performs a minimal UnstructuredMesh simulation, reads in the resulting statepoint file and writes the mesh data to vtk and hdf files. It is From aa1383571ef7b0136c5da8fccae6d4e606235442 Mon Sep 17 00:00:00 2001 From: Jon Shimwell Date: Thu, 9 Jan 2025 10:28:18 +0100 Subject: [PATCH 5/7] review improvements by mwestphal --- openmc/mesh.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openmc/mesh.py b/openmc/mesh.py index 019c44c8b46..f681ac79c54 100644 --- a/openmc/mesh.py +++ b/openmc/mesh.py @@ -2369,7 +2369,7 @@ def write_data_to_vtk( Parameters ---------- filename : str or pathlib.Path - Name of the VTK file to write. If the filename ends in '.hdf' then a VTKHDF + Name of the VTK file to write. If the filename ends in '.vtkhdf' then a VTKHDF format file will be written and can be opened with Paraview versions 5.13.0 and above, if the filename ends in '.vtk' then a .vtk file will be written. datasets : dict @@ -2381,7 +2381,7 @@ def write_data_to_vtk( volume of the mesh elements """ - if Path(filename).suffix == ".hdf": + if Path(filename).suffix == ".vtkhdf": def append_dataset(dset, array): """Convenience function to append data to an HDF5 dataset""" @@ -2558,7 +2558,7 @@ def append_dataset(dset, array): else: raise ValueError( - f"Unsupported file extension for '{filename}'. Extension must be '.hdf' or '.vtk'." + f"Unsupported file extension for '{filename}'. Extension must be '.vtkhdf' or '.vtk'." ) @classmethod From 177e8dc8b9ede14a6a4aa3bad5cb3e07a010db93 Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Thu, 9 Jan 2025 14:45:24 +0100 Subject: [PATCH 6/7] changed .hdf to .vtkhdf --- tests/unit_tests/test_mesh.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/unit_tests/test_mesh.py b/tests/unit_tests/test_mesh.py index c87b193b9b2..8b4ad89faa1 100644 --- a/tests/unit_tests/test_mesh.py +++ b/tests/unit_tests/test_mesh.py @@ -509,10 +509,10 @@ def test_umesh(run_in_tmpdir, request): } umesh_from_sp.write_data_to_vtk(datasets=datasets, filename="test_mesh.vtk") - umesh_from_sp.write_data_to_vtk(datasets=datasets, filename="test_mesh.hdf") + umesh_from_sp.write_data_to_vtk(datasets=datasets, filename="test_mesh.vtkhdf") with pytest.raises(ValueError, match="Unsupported file extension"): - # Supported file extensions are vtk or hdf, not hdf5, so this should raise an error + # Supported file extensions are vtk or vtkhdf, not hdf5, so this should raise an error umesh_from_sp.write_data_to_vtk( datasets=datasets, filename="test_mesh.hdf5", @@ -521,8 +521,8 @@ def test_umesh(run_in_tmpdir, request): # The shape of the data should match the shape of the mesh, so this should raise an error umesh_from_sp.write_data_to_vtk( datasets={'incorrectly_shaped_data': np.array(([1,2,3]))}, - filename="test_mesh.hdf", + filename="test_mesh.vtkhdf", ) assert Path("test_mesh.vtk").exists() - assert Path("test_mesh.hdf").exists() + assert Path("test_mesh.vtkhdf").exists() From ba4c097db9cdedc11b1f36db6a1dfedc7fd53b7c Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Thu, 9 Jan 2025 14:46:20 +0100 Subject: [PATCH 7/7] changed .hdf to .vtkhdf --- tests/unit_tests/test_mesh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit_tests/test_mesh.py b/tests/unit_tests/test_mesh.py index 8b4ad89faa1..9ae5ae6f649 100644 --- a/tests/unit_tests/test_mesh.py +++ b/tests/unit_tests/test_mesh.py @@ -458,7 +458,7 @@ def test_mesh_get_homogenized_materials(): @skip_if_no_dagmc def test_umesh(run_in_tmpdir, request): """Performs a minimal UnstructuredMesh simulation, reads in the resulting - statepoint file and writes the mesh data to vtk and hdf files. It is + statepoint file and writes the mesh data to vtk and vtkhdf files. It is necessary to read in the unstructured mesh from a statepoint file to ensure it has all the required attributes """