Skip to content

Commit

Permalink
introduce cli using argparse. No necessity of using YAML.
Browse files Browse the repository at this point in the history
  • Loading branch information
YuXHe15 committed Jul 5, 2023
1 parent 09da4f6 commit a7927df
Show file tree
Hide file tree
Showing 26 changed files with 704 additions and 229 deletions.
9 changes: 9 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -179,3 +179,12 @@ test_main

# pyocc-utils lib
pythonocc-utils

#db_files
/amworkflow/src/infrastructure/database/files

#downloaded_files
usecases/param_wall
!usecases/param_wall/dodo.py
!usecases/param_wall/test1.py
!usecases/param_wall/param_wall.py
106 changes: 13 additions & 93 deletions amworkflow/src/constants/data_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,98 +3,18 @@
from polyfactory.factories.pydantic_factory import ModelFactory
from amworkflow.src.utils.parser import yaml_parser
from amworkflow.src.constants.enums import Directory as D
class BatchParameter(BaseModel):
isbatch: bool

class BaseGeometry(BaseModel):
radius: Optional[float] = None
length: Optional[float] = None
endpoint: float
num: int
class MapParamModel(object):
def __init__(self, data: list, label: list):
self.data = data
self.label = label
self.mapping()

@validator("*")
def validate_length_and_radius(cls, value, values):
if 'length' in values and 'radius' in values:
if values['length'] is not None and values['radius'] is not None:
raise ValueError("Only one of length or radius should have a value, or both should be None.")
elif values['length'] is None and values['radius'] is None:
raise ValueError("Either length or radius should have a value, but not both should be None.")

# elif values['length'] is not None:
# if values['length'] > values['endpoint']:
# raise ValueError("length should not be larger than endpoint.")

# elif values['radius'] is not None:
# if values['radius'] > values['endpoint']:
# raise ValueError("radius should not be larger than endpoint.")
return value

class BaseMeshLayer(BaseModel):
config: bool
num: float

class WallGeometryParameter(BaseModel):
length: BaseGeometry
height: BaseGeometry
width: BaseGeometry
radius: BaseGeometry

class STLParameter(BaseModel):
linear_deflection: float
angular_deflection: float

class MeshParameter(BaseModel):
layer_num: BaseMeshLayer
layer_thickness: BaseMeshLayer
mesh_size_factor: PositiveFloat


class WallParam(BaseModel):
batch_parameter: BatchParameter
mesh_parameter: MeshParameter
geometry_parameter: WallGeometryParameter
stl_parameter: STLParameter

class DB_WallGeometryFile(BaseModel):
batch_num: Optional[PositiveInt]
withCurve: Optional[bool]
length: Optional[PositiveFloat]
width: Optional[PositiveFloat]
height: Optional[PositiveFloat]
radius: Optional[float]
linear_deflection: Optional[PositiveFloat]
angular_deflection: Optional[PositiveFloat]
filename: Optional[str]
stl_hashname: Optional[constr(max_length=32, min_length=32)]

class DB_XdmfFile(BaseModel):
xdmf_hashname: Optional[constr(max_length=32, min_length=32)]
mesh_size_factor: Optional[PositiveFloat]
layer_thickness: Optional[ PositiveFloat]
layer_num: Optional[PositiveInt]
batch_num: Optional[PositiveInt]
stl_hashname: Optional[constr(max_length=32, min_length=32)]
filename: Optional[str]

class DB_H5File(BaseModel):
h5_hashname: Optional[constr(max_length=32, min_length=32)]
batch_num: Optional[PositiveInt]
xdmf_hashname: Optional[constr(max_length=32, min_length=32)]
filename: Optional[str]

class DBFactory(ModelFactory[DB_WallGeometryFile]):
__model__ = DB_WallGeometryFile

# data = yaml_parser(D.USECASE_PATH_PARAMWALL_PATH.value, "test1.yaml")
# # print(data)
# parseData = WallParam(**data)
# for key, item in parseData.geometry_parameter:
# if key == "with_curve":
# print(key, item)
# continue
# # try: print(key, item.length, item.endpoint)
# # except: print(key, item.radius, item.endpoint)
# print(key, item.radius, item.length)

# db_model = DBFactory.build()
# print(db_model.dict())
def mapping(self):
for ind, lbl in enumerate(self.label):
setattr(self, lbl, self.data[ind])

# lab = ["a", "b", "c"]
# val = [2, 3, 1]
# dat = MapParamModel(val, lab)
# print(dat.a)
14 changes: 13 additions & 1 deletion amworkflow/src/constants/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,16 @@ class Directory(Enum):

class Timestamp(Enum):
YY_MM_DD_HH_MM_SS = "%y%m%d%H%M%S"
YYYY_MM_DD_HH_MM = "%Y%m%d%H%M"
YYYY_MM_DD_HH_MM = "%Y%m%d%H%M"

class ParameterLabel(Enum):
GEOM_PARAM = "geometry_parameter"
ENDPOINT = "endpoint"
STARTPOINT = "startpoint"
NUM = "num"
BATCH_PARAM = "batch_parameter"
IS_BATCH = "isbatch"
MESH_PARAM = "batch_parameter"
STL_PARAM = "stl_param"
MDL_PROF = "model_profile"
MDL_NAME = "model_name"
30 changes: 29 additions & 1 deletion amworkflow/src/constants/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,30 @@
import numpy as np
class DimensionViolationException(Exception):
pass
pass

class GmshUseBeforeInitializedException(Exception):
def __init__(self, message = "Gmsh must be initialized first!"):
self.message = message
super().__init__(message)

class DimensionInconsistencyException(Exception):
def __init__(self, arr_a, arr_b):
self.dim_a = np.array(arr_a).shape()[0]
self.dim_b = np.array(arr_b).shape()[0]
self.message = f"Dimensions are inconsistent. Got Dim A {self.dim_a}, Dim B {self.dim_b}."
super().__init__(self.message)

class NoDataInDatabaseException(Exception):
def __init__(self, item: str):
self.message = f"{item} not found."
super().__init__(self.message)

class InvalidFileFormatException(Exception):
def __init__(self, item: str):
self.message = f"Invalid file format {item} imported."
super().__init__(self.message)

class InsufficientDataException(Exception):
def __init__(self):
self.message = "Insufficient Data, exit."
super().__init__(self.message)
143 changes: 110 additions & 33 deletions amworkflow/src/core/workflow.py
Original file line number Diff line number Diff line change
@@ -1,57 +1,142 @@
from OCC.Core.TopoDS import TopoDS_Shape
import yaml
import os
from amworkflow.src.constants.enums import Directory as D
from amworkflow.src.constants.enums import ParameterLabel as P
from amworkflow.src.constants.data_model import WallParam, DB_WallGeometryFile, DB_XdmfFile, DB_H5File
from amworkflow.src.utils.parser import yaml_parser
import gmsh
from amworkflow.src.infrastructure.database.models.model import XdmfFile, H5File, FEResult, SliceFile, GCode
from amworkflow.src.infrastructure.database.cruds.crud import insert_data
from amworkflow.src.infrastructure.database.models.model import XdmfFile, H5File, FEResult, SliceFile, GCode, ModelProfile, ModelParameter
from amworkflow.src.infrastructure.database.cruds.crud import insert_data, query_multi_data
from amworkflow.src.geometries.mesher import mesher, get_geom_pointer
from amworkflow.src.utils.writer import mesh_writer
from amworkflow.src.utils.permutator import simple_permutator
from amworkflow.src.utils.writer import namer, stl_writer, batch_num_creator
from amworkflow.tests.test import dimension_check
import numpy as np
from amworkflow.src.utils.download import downloader
from amworkflow.src.constants.data_model import MapParamModel
from amworkflow.src.utils.sanity_check import path_valid_check
import copy
from amworkflow.src.constants.exceptions import NoDataInDatabaseException, InsufficientDataException
from amworkflow.src.utils.reader import get_filename

class BaseWorkflow(object):
def __init__(self, yaml_dir: str, filename: str, data_model: callable, geom_db_model: callable = None, geom_db_data_model: callable = None, db : bool = True):
self.yaml_dir = yaml_dir
self.yaml_filename = filename
self.data_model = data_model
def __init__(self, args):
self.args = args
self.yaml_dir = self.args.yaml_dir
self.step_dir = self.args.step_dir
self.yaml_parser = yaml_parser
self.data = self.data_model(**yaml_parser(self.yaml_dir, self.yaml_filename))
self.isbatch = self.data.batch_parameter.isbatch
self.data: dict
self.namer = namer
self.model_name: str
self.model_hashname = self.namer(name_type="hex")
self.label = {}
self.geom_pointer: int
self.mpm = MapParamModel
self.shape = []
self.mesh_result = []
self.name_list = []
self.parm_list = []
self.permutation: np.ndarray
self.hashname_list = []
self.mesh_name_list = []
self.mesh_hashname_list = []
self.db = db
self.db = True
self.start_vector = []
self.end_vector = []
self.num_vector = []
self.title = []
self.db_data_collection = {}
self.is_curve_list = []
self.batch_num = None
self.geom_db_model = geom_db_model
self.geom_db_data_model = geom_db_data_model
self.namer = namer
pass
self.data_init()

def data_init(self):
if self.model_name != None:
result = query_multi_data(ModelProfile, by_name=self.model_name, column_name="model_name", target_column_name="model_name")
if self.model_name in result:
indicator = (0,0)
if self.args.edit:
indicator = (0,1)
else:
if self.args.remove:
indicator = (0,2)
else:
indicator = (0,3)
else:
indicator = (0,4)
else:
if self.step_dir != None:
path_valid_check(self.step_dir, format=["stp", "step"])
stp_filename = get_filename(self.step_dir)
result = query_multi_data(ModelProfile, by_name=stp_filename, column_name="model_name", target_column_name="model_name")
if stp_filename in result:
indicator = (1,0)
else:
indicator = (1,1)
else:
if self.yaml_dir != None:
path_valid_check(self.yaml_dir, format=["yml", "yaml"])
self.data = yaml_parser(self.yaml_dir)
if "model_name" in self.data["model_profile"]:
self.model_name = self.data["model_profile"]["model_name"]
indicator = (2,0)
else: raise InsufficientDataException()
else:
raise InsufficientDataException()

match indicator[0]:
case 1:
#TODO: read the step file stored in the database and convert it to an OCC representation.
match indicator[1]:
case 1:
#TODO: convert the file to an OCC representation and store the file and info to db.
pass
case 2:
for lbl in self.data.keys():
if lbl == "geometry_parameter":
for key, item in self.data[lbl].items():
self.start_vector.append(item[P.STARTPOINT.value])
self.end_vector.append(item[P.ENDPOINT.value])
self.num_vector.append(item[P.NUM.value])
self.title.append(copy.copy(key))
self.start_vector = np.array(self.start_vector)
self.end_vector = np.array(self.end_vector)
self.num_vector = np.array(self.num_vector)
self.label[lbl] = self.title
else:
for key, item in self.data[lbl].items():
self.label[lbl].append(key)

case 0:
self.query_list = query_multi_data(table = ModelParameter,
by_name= self.model_name,
column_name="model_name")
self.param_type = dict.fromkeys(set(rows["param_type"] for rows in self.query_list), {})
self.param_list = [(rows["param_name"],rows["param_type"]) for rows in self.query_list]
self.data = self.param_type.copy()
for pair in self.param_list:
for p_type, p_value in self.data.items():
if pair[1] == p_type:
p_value[pair[0]] = None
match indicator[1]:
case 1:
#TODO: compare differences between inputs and loaded parameters, replace and add new parameters.
pass
case 2:
#TODO: remove certain model profile from database
pass
case 3:
#TODO: do nothing, fill data into the loaded model.
pass
case 4:
#TODO: Create a new model profile with given parameters.
pass

def create(self) -> None:
'''
create the real entity of the geometry, then prepare necessary info for sequential process.
create the real entity of the geometry, then prepare necessary info for sequential processes.
Geometries created by this method should be placed in self.shape.
'''
self.data_assign()
self.permutation = self.permutator()
dimension_check(self.permutation)
is_start_vector = False
self.batch_num = batch_num_creator()
Expand Down Expand Up @@ -113,6 +198,11 @@ def mesh(self):
filename=self.hashname_list[index],
output_filename = mesh_hashname,
format="xdmf")
# mesh_writer(item=model,
# directory=D.USECASE_PATH_PARAMWALL_PATH.value,
# filename=self.hashname_list[index],
# output_filename = mesh_hashname,
# format="msh")
db_model_xdmf = DB_XdmfFile()
db_model_h5 = DB_H5File()
db_model_xdmf.xdmf_hashname = mesh_hashname
Expand All @@ -132,6 +222,7 @@ def mesh(self):
h5_collection = self.db_data_collection["mesh"]["h5"]
xdmf_collection.append(db_model_xdmf.dict())
h5_collection.append(db_model_h5.dict())
gmsh.finalize()
if self.db:
self.db_insert(XdmfFile, xdmf_collection)
self.db_insert(H5File, h5_collection)
Expand All @@ -148,21 +239,7 @@ def gcode():
'''
pass

def data_assign(self):
for ind, item in self.data.geometry_parameter:
if item.length != None :
self.start_vector.append(item.length)
elif item.radius != None:
self.start_vector.append(item.radius)
self.is_curve_list.append(True)
else:
self.start_vector.append(0)
self.end_vector.append(item.endpoint)
self.num_vector.append(item.num)
self.title.append(copy.copy(ind))
self.start_vector = np.array(self.start_vector)
self.end_vector = np.array(self.end_vector)
self.num_vector = np.array(self.num_vector)


def permutator(self):
_, perm = simple_permutator(start_point=self.start_vector,
Expand Down
Loading

0 comments on commit a7927df

Please sign in to comment.