Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added implementations of mesh statistic check and field assignment check #19

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
143 changes: 143 additions & 0 deletions geosx_mesh_doctor/checks/add_fields.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
import logging
from dataclasses import dataclass
from math import sqrt
from numpy import empty
from numpy.random import rand

from vtkmodules.util.numpy_support import (
numpy_to_vtk,
vtk_to_numpy, )

from vtkmodules.vtkCommonCore import (
vtkDoubleArray, )

from . import vtk_utils

@dataclass( frozen=True )
class Options:
support: str
field_name: str
source: str
out_vtk: vtk_utils.VtkOutput


@dataclass( frozen=True )
class Result:
info: bool

def __analytic_field(mesh, support, name) -> bool:
if support == 'node':
# example function: distance from mesh center
nn = mesh.GetNumberOfPoints()
coords = vtk_to_numpy(mesh.GetPoints().GetData())
center = (coords.max(axis=0) + coords.min(axis=0))/2
data_arr = vtkDoubleArray()
data_np = empty(nn)

for i in range(nn):
val = 0
pt = mesh.GetPoint(i)
for j in range(len(pt)):
val += (pt[j] - center[j])*(pt[j]-center[j])
val = sqrt(val)
data_np[i] = val

data_arr = numpy_to_vtk(data_np)
data_arr.SetName(name)
mesh.GetPointData().AddArray(data_arr)
return True

elif support == 'cell':
# example function: random field
ne = mesh.GetNumberOfCells()
data_arr = vtkDoubleArray()
data_np = rand(ne, 1)

data_arr = numpy_to_vtk(data_np)
data_arr.SetName(name)
mesh.GetCellData().AddArray(data_arr)
return True
else:
logging.error('incorrect support option. Options are node, cell')
return False

def __compatible_meshes(dest_mesh, source_mesh) -> bool:
# for now, just check that meshes have same number of elements and same number of nodes
# and require that each cell has same nodes, each node has same coordinate
dest_ne = dest_mesh.GetNumberOfCells()
dest_nn = dest_mesh.GetNumberOfPoints()
source_ne = source_mesh.GetNumberOfCells()
source_nn = source_mesh.GetNumberOfPoints()

if dest_ne != source_ne:
logging.error('meshes have different number of cells')
return False
if dest_nn != source_nn:
logging.error('meshes have different number of nodes')
return False

for i in range(dest_nn):
if not ((dest_mesh.GetPoint(i)) == (source_mesh.GetPoint(i))):
logging.error('at least one node is in a different location')
return False

for i in range(dest_ne):
if not (vtk_to_numpy(dest_mesh.GetCell(i).GetPoints().GetData()) == vtk_to_numpy(source_mesh.GetCell(i).GetPoints().GetData())).all():
logging.error('at least one cell has different nodes')
return False

return True






def __transfer_field(mesh, support, field_name, source) -> bool:
from_mesh = vtk_utils.read_mesh( source )
same_mesh = __compatible_meshes(mesh, from_mesh)
if not same_mesh:
logging.error('meshes are not the same')
return False

if support == 'cell':
data = from_mesh.GetCellData().GetArray(field_name)
if data == None:
logging.error('Requested field does not exist on source mesh')
return False
else:
mesh.GetCellData().AddArray(data)
elif support == 'node':
data = from_mesh.GetPointData().GetArray(field_name)
if data == None:
logging.error('Requested field does not exist on source mesh')
return False
else:
mesh.GetPointData().AddArray(data)
return False
else:
logging.error('incorrect support option. Options are node, cell')
return False
return True


def __check( mesh, options: Options ) -> Result:
if options.source == 'function':
succ =__analytic_field(mesh, options.support, options.field_name)
if succ:
vtk_utils.write_mesh( mesh, options.out_vtk )
elif (options.source[-4:] == '.vtu' or options.source[-4:] == '.vtk'):
succ = __transfer_field(mesh, options.support, options.field_name, options.source)
if succ:
vtk_utils.write_mesh( mesh, options.out_vtk )
else:
logging.error('incorrect source option. Options are function, *.vtu, *.vtk.')
succ = False
return Result(info=succ)
#TODO: Better exception handle



def check( vtk_input_file: str, options: Options ) -> Result:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe create a broader "fields manipulation" feature instead? Which could handle the global id fields?

mesh = vtk_utils.read_mesh( vtk_input_file )
return __check( mesh, options )
102 changes: 102 additions & 0 deletions geosx_mesh_doctor/checks/mesh_stats.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
import logging
from dataclasses import dataclass
from vtkmodules.util.numpy_support import (
numpy_to_vtk,
vtk_to_numpy, )

from . import vtk_utils

@dataclass( frozen=True )
class Options:
info: str


@dataclass( frozen=True )
class Result:
num_elements: int
num_nodes: int
num_cell_types: int
cell_types: list
cell_type_counts: list
scalar_cell_data_names: list
scalar_cell_data_mins: list
scalar_cell_data_maxs: list
tensor_cell_data_names: list
scalar_point_data_names: list
scalar_point_data_mins: list
scalar_point_data_maxs: list
tensor_point_data_names: list
Comment on lines +21 to +28
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

List of what ? list[int]?
Use a struct "Toto" {name, min, max} and use this struct as type hint.

scalar_cell: Sequence[Toto]
scalar_point: Sequence[Toto]
...

has_point_global_ids: bool
has_cell_global_ids: bool
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't use has_..., check if the previous container is empty instead.

min_coords: list
max_coords: list
#TODO: compress this, or just print the stuff here and dont pass it


def __check( mesh, options: Options ) -> Result:

ne=mesh.GetNumberOfCells()
nn=mesh.GetNumberOfPoints()
nct = mesh.GetDistinctCellTypesArray().GetSize()
cts = []
for ct in range(nct):
cts.append(vtk_utils.vtkid_to_string(mesh.GetCellType(ct)))

ct_counts = [0]*nct
for c in range(ne):
for ct in range(nct):
if vtk_utils.vtkid_to_string(mesh.GetCell(c).GetCellType()) == cts[ct]:
ct_counts[ct] += 1
break

cd_scalar_names = []
cd_scalar_maxs = []
cd_scalar_mins = []
ncd = mesh.GetCellData().GetNumberOfArrays()
for cdi in range(ncd):
cd = mesh.GetCellData().GetArray(cdi)
if cd.GetNumberOfComponents() == 1: # assumes scalar cell data for max and min
cd_scalar_names.append(cd.GetName())
cd_np = vtk_to_numpy(cd)
cd_scalar_maxs.append(cd_np.max())
cd_scalar_mins.append(cd_np.min())

cd_tensor_names = []
for cdi in range(ncd):
cd = mesh.GetCellData().GetArray(cdi)
if cd.GetNumberOfComponents() != 1:
cd_tensor_names.append(cd.GetName())

pd_scalar_names = []
pd_scalar_maxs = []
pd_scalar_mins = []
npd = mesh.GetPointData().GetNumberOfArrays()
for pdi in range(npd):
pd = mesh.GetPointData().GetArray(pdi)
if pd.GetNumberOfComponents() == 1: # assumes scalar point data for max and min
pd_scalar_names.append(pd.GetName())
pd_np = vtk_to_numpy(pd)
pd_scalar_maxs.append(pd_np.max())
pd_scalar_mins.append(pd_np.min())

pd_tensor_names = []
for pdi in range(npd):
pd = mesh.GetPointData().GetArray(pdi)
if pd.GetNumberOfComponents() != 1:
pd_tensor_names.append(pd.GetName())

point_ids = bool(mesh.GetPointData().GetGlobalIds())
cell_ids = bool(mesh.GetCellData().GetGlobalIds())

coords = vtk_to_numpy(mesh.GetPoints().GetData())
center = (coords.max(axis=0) + coords.min(axis=0))/2


return Result( num_elements=ne, num_nodes=nn, num_cell_types=nct, cell_types=cts, cell_type_counts=ct_counts,
scalar_cell_data_names=cd_scalar_names, scalar_cell_data_mins=cd_scalar_mins, scalar_cell_data_maxs=cd_scalar_maxs, tensor_cell_data_names=cd_tensor_names,
scalar_point_data_names=pd_scalar_names, scalar_point_data_mins=pd_scalar_mins, scalar_point_data_maxs=pd_scalar_maxs, tensor_point_data_names=pd_tensor_names,
has_point_global_ids=point_ids, has_cell_global_ids=cell_ids, min_coords=coords.min(axis=0), max_coords=coords.max(axis=0) )

def check( vtk_input_file: str, options: Options ) -> Result:
mesh = vtk_utils.read_mesh( vtk_input_file )
return __check( mesh, options )
44 changes: 44 additions & 0 deletions geosx_mesh_doctor/checks/vtk_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,21 @@
vtkXMLUnstructuredGridWriter,
)

from vtkmodules.vtkCommonDataModel import (
VTK_HEXAGONAL_PRISM,
VTK_HEXAHEDRON,
VTK_PENTAGONAL_PRISM,
VTK_PYRAMID,
VTK_TETRA,
VTK_VOXEL,
VTK_WEDGE,
VTK_TRIANGLE,
VTK_QUAD,
VTK_PIXEL,
VTK_LINE,
VTK_VERTEX,
)


@dataclass( frozen=True )
class VtkOutput:
Expand Down Expand Up @@ -138,3 +153,32 @@ def write_mesh( mesh: vtkUnstructuredGrid, vtk_output: VtkOutput ) -> int:
logging.critical( f"Could not find the appropriate VTK writer for extension \"{file_extension}\". Dying..." )
sys.exit( 1 )
return 0 if success_code else 2 # the Write member function return 1 in case of success, 0 otherwise.

def vtkid_to_string(id: int) -> str:
match id:
case 1: # VTK_VERTEX
return 'Vertex'
case 3: #VTK_LINE
return 'Line'
case 5: #VTK_TRIANGLE
return 'Triangle'
case 8: #VTK_PIXEL
return 'Pixel'
case 9: #VTK_QUAD
return 'Quad'
case 10: #VTK_TETRA
return 'Tetra'
case 11: #VTK_VOXEL
return 'Voxel'
case 12: #VTK_HEXAHEDRON
return 'Hex'
case 13: #VTK_WEDGE
return 'Wedge'
case 14: #VTK_PYRAMID
return 'Pyramid'
case 15: #VTK_PENTAGONAL_PRISM
return 'Pentagonal prism'
case 16: #VTK_HEXAGONAL_PRISM
return 'Hexagonal Prism'
case _:
return 'Unknown type'
2 changes: 2 additions & 0 deletions geosx_mesh_doctor/parsing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
NON_CONFORMAL = "non_conformal"
SELF_INTERSECTING_ELEMENTS = "self_intersecting_elements"
SUPPORTED_ELEMENTS = "supported_elements"
MESH_STATS = "mesh_stats"
ADD_FIELDS = "add_fields"


@dataclass( frozen=True )
Expand Down
36 changes: 36 additions & 0 deletions geosx_mesh_doctor/parsing/add_fields_parsing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import logging

from checks.add_fields import Options, Result


from . import vtk_output_parsing, ADD_FIELDS

__SUPPORT = "support"
__NAME = "name"
__SOURCE = "source"

def fill_subparser( subparsers ) -> None:
p = subparsers.add_parser( ADD_FIELDS,
help=f"Add cell or point data to a mesh." )
p.add_argument( '--' + __SUPPORT,
type=str,
required=True,
help=f"[string]: Where to define field (point/cell)." )
p.add_argument( '--' + __NAME,
type=str,
required=True,
help=f"[string]: Name of the field to add." )
p.add_argument( '--' + __SOURCE,
type=str,
required=True,
help=f"[string]: Where field data to add comes from (function, mesh)." )
vtk_output_parsing.fill_vtk_output_subparser( p )

def convert( parsed_options ) -> Options:
"""
"""
return Options( support=parsed_options[__SUPPORT], field_name=parsed_options[__NAME], source=parsed_options[__SOURCE], out_vtk=vtk_output_parsing.convert( parsed_options ) )

def display_results( options: Options, result: Result ):
if result.info != True:
logging.error( f"Field addition failed" )
40 changes: 40 additions & 0 deletions geosx_mesh_doctor/parsing/mesh_stats_parsing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import logging

from checks.mesh_stats import Options, Result

from . import MESH_STATS

def fill_subparser( subparsers ) -> None:
p = subparsers.add_parser( MESH_STATS,
help=f"Outputs basic properties of a mesh." )

def convert( parsed_options ) -> Options:
"""
"""
return Options( info="test" )

def display_results( options: Options, result: Result ):
logging.info( f"The mesh has {result.num_elements} elements and {result.num_nodes} nodes." )
logging.info( f"There are {result.num_cell_types} different types of cell in the mesh:" )
for i in range(result.num_cell_types):
logging.info( f"\t {result.cell_types[i]} \t ({result.cell_type_counts[i]} cells)" )

logging.info( f"The domain is contained in {result.min_coords[0]} <= x <= {result.max_coords[0]}")
logging.info( f" {result.min_coords[1]} <= y <= {result.max_coords[1]}")
logging.info( f" {result.min_coords[2]} <= z <= {result.max_coords[2]}")

logging.info( f"Does the mesh have global point ids: {result.has_point_global_ids}" )
logging.info( f"Does the mesh have global cell ids: {result.has_cell_global_ids}" )

logging.info( f"There are {len(result.scalar_cell_data_names)} scalar fields on the cells:" )
for i in range(len(result.scalar_cell_data_names)):
logging.info( f"\t {result.scalar_cell_data_names[i]} \t min = {result.scalar_cell_data_mins[i]} \t max = {result.scalar_cell_data_maxs[i]}" )
logging.info( f"There are {len(result.tensor_cell_data_names)} vector/tensor fields on the cells:" )
for i in range(len(result.tensor_cell_data_names)):
logging.info( f"\t {result.tensor_cell_data_names[i]}" )
logging.info( f"There are {len(result.scalar_point_data_names)} scalar fields on the points:" )
for i in range(len(result.scalar_point_data_names)):
logging.info( f"\t {result.scalar_point_data_names[i]} \t min = {result.scalar_point_data_mins[i]} \t max = {result.scalar_point_data_maxs[i]}" )
logging.info( f"There are {len(result.tensor_point_data_names)} vector/tensor fields on the points:" )
for i in range(len(result.tensor_point_data_names)):
logging.info( f"\t {result.tensor_point_data_names[i]}" )
Loading
Loading