diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml index 2a13c2380..4e8b53d4d 100644 --- a/.github/workflows/coverity.yml +++ b/.github/workflows/coverity.yml @@ -13,7 +13,7 @@ jobs: - uses: cvmfs-contrib/github-action-cvmfs@v3 - uses: aidasoft/run-lcg-view@v4 with: - coverity-cmake-command: 'cmake -DCMAKE_CXX_STANDARD=17 -DENABLE_SIO=ON -DUSE_EXTERNAL_CATCH2=OFF ..' + coverity-cmake-command: 'cmake -DCMAKE_CXX_STANDARD=17 -DENABLE_SIO=ON -DENABLE_JULIA=ON -DUSE_EXTERNAL_CATCH2=OFF ..' coverity-project: 'AIDASoft%2Fpodio' coverity-project-token: ${{ secrets.PODIO_COVERITY_TOKEN }} github-pat: ${{ secrets.READ_COVERITY_IMAGE }} diff --git a/.github/workflows/edm4hep.yaml b/.github/workflows/edm4hep.yaml index 188c56f64..76d970d63 100644 --- a/.github/workflows/edm4hep.yaml +++ b/.github/workflows/edm4hep.yaml @@ -44,6 +44,7 @@ jobs: cd $STARTDIR/podio mkdir build && cd build cmake -DENABLE_SIO=ON \ + -DENABLE_JULIA=ON \ -DCMAKE_INSTALL_PREFIX=../install \ -DCMAKE_CXX_STANDARD=17 \ -DCMAKE_CXX_FLAGS=" -fdiagnostics-color=always -Werror -Wno-error=deprecated-declarations " \ diff --git a/.github/workflows/key4hep.yml b/.github/workflows/key4hep.yml index 5135b4741..9ae74d422 100644 --- a/.github/workflows/key4hep.yml +++ b/.github/workflows/key4hep.yml @@ -28,6 +28,7 @@ jobs: mkdir build install cd build cmake -DENABLE_SIO=ON \ + -DENABLE_JULIA=OFF \ -DCMAKE_INSTALL_PREFIX=../install \ -DCMAKE_CXX_STANDARD=17 \ -DCMAKE_CXX_FLAGS=" -fdiagnostics-color=always -Werror -Wno-error=deprecated-declarations " \ diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index d22047cbf..b15ada282 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -32,6 +32,7 @@ jobs: mkdir build cd build cmake .. -DENABLE_SIO=ON \ + -DENABLE_JULIA=ON \ -DENABLE_RNTUPLE=ON \ -DCMAKE_CXX_STANDARD=17 \ -DCMAKE_CXX_FLAGS=" -fdiagnostics-color=always -Werror "\ diff --git a/.github/workflows/sanitizers.yaml b/.github/workflows/sanitizers.yaml index f730c287e..537cf7c57 100644 --- a/.github/workflows/sanitizers.yaml +++ b/.github/workflows/sanitizers.yaml @@ -43,11 +43,15 @@ jobs: -DCMAKE_CXX_FLAGS=" -fdiagnostics-color=always " \ -DUSE_EXTERNAL_CATCH2=OFF \ -DENABLE_SIO=ON \ + -DENABLE_JULIA=ON \ -G Ninja .. echo "::endgroup::" echo "::group::Build" ninja -k0 echo "::endgroup::" + echo "::group::Julia StaticArrays Package Install" + julia -e 'import Pkg; Pkg.add("StaticArrays")' + echo "::endgroup" echo "::group::Run tests" ctest --output-on-failure - echo "::endgroup::" + echo "::endgroup::" \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c181cba67..5cd382e26 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -37,6 +37,7 @@ jobs: mkdir build install cd build cmake -DENABLE_SIO=ON \ + -DENABLE_JULIA=ON \ -DENABLE_RNTUPLE=${{ matrix.RNTUPLE }} \ -DCMAKE_INSTALL_PREFIX=../install \ -DCMAKE_CXX_STANDARD=${{ matrix.CXX_STANDARD }} \ @@ -48,9 +49,12 @@ jobs: echo "::group::Build" ninja -k0 echo "::endgroup" + echo "::group::Julia StaticArrays Package Install" + julia -e 'import Pkg; Pkg.add("StaticArrays")' + echo "::endgroup" echo "::group::Run tests" ctest --output-on-failure echo "::endgroup::" echo "::group::Install" ninja install - echo "::endgroup::" + echo "::endgroup::" \ No newline at end of file diff --git a/.github/workflows/ubuntu.yml b/.github/workflows/ubuntu.yml index 9814ca7cb..2d7f68aac 100644 --- a/.github/workflows/ubuntu.yml +++ b/.github/workflows/ubuntu.yml @@ -27,6 +27,7 @@ jobs: mkdir build install cd build cmake -DENABLE_SIO=${{ matrix.sio }} \ + -DENABLE_JULIA=ON \ -DCMAKE_INSTALL_PREFIX=../install \ -DCMAKE_CXX_STANDARD=17 \ -DCMAKE_CXX_FLAGS=" -fdiagnostics-color=always -Werror -Wno-error=deprecated-declarations " \ @@ -38,9 +39,12 @@ jobs: echo "::group::Build" ninja -k0 echo "::endgroup" + echo "::group::Julia StaticArrays Package Install" + julia -e 'import Pkg; Pkg.add("StaticArrays")' + echo "::endgroup" echo "::group::Run tests" ctest --output-on-failure echo "::endgroup::" echo "::group::Install" ninja install - echo "::endgroup::" + echo "::endgroup::" \ No newline at end of file diff --git a/.gitignore b/.gitignore index 3315983f2..5c074cd6e 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ install tests/src tests/datamodel tests/extension_model +tests/datamodeljulia +tests/unittests/Project.toml +tests/unittests/Manifest.toml # Python *pyc diff --git a/CMakeLists.txt b/CMakeLists.txt index b9bca5cd0..bc30d6539 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -70,6 +70,7 @@ option(ENABLE_SIO "Build SIO I/O support" OFF) option(PODIO_RELAX_PYVER "Do not require exact python version match with ROOT" OFF) option(ENABLE_RNTUPLE "Build with support for the new ROOT NTtuple format" OFF) option(PODIO_USE_CLANG_FORMAT "Use clang-format to format the code" OFF) +option(ENABLE_JULIA "Enable Julia support. When enabled, Julia datamodels will be generated, and Julia tests will run." OFF) #--- Declare ROOT dependency --------------------------------------------------- diff --git a/README.md b/README.md index 58c003219..fbf5bd10d 100755 --- a/README.md +++ b/README.md @@ -162,6 +162,7 @@ The generation script has the following additional options: - `--clangformat` (`-c`): Apply clang-format after file creation (uses [option `-style=file`](https://clang.llvm.org/docs/ClangFormatStyleOptions.html) with llvm as backup style), needs clang-format in `$PATH`. - `--quiet` (`-q`): Suppress all print out to STDOUT - `--dryrun` (`-d`): Only run the generation logic and validate yaml, do not write files to disk +- `--lang` (`-l`): Specify the programming language (default: cpp), choices: cpp, julia ## Running tests After compilation you can run rudimentary tests with diff --git a/cmake/podioMacros.cmake b/cmake/podioMacros.cmake index cf8124d18..9ac7bd3af 100644 --- a/cmake/podioMacros.cmake +++ b/cmake/podioMacros.cmake @@ -128,13 +128,15 @@ set_property(CACHE PODIO_USE_CLANG_FORMAT PROPERTY STRINGS AUTO ON OFF) # passed directly to podio_class_generator.py and validated there # Default is ROOT # SCHEMA_EVOLUTION OPTIONAL: The path to the yaml file declaring the necessary schema evolution -# ) +# LANG OPTIONAL: The programming language choice +# Default is cpp +# ) # # Note that the create_${datamodel} target will always be called, but if the YAML_FILE has not changed # this is essentially a no-op, and should not cause re-compilation. #--------------------------------------------------------------------------------------------------- function(PODIO_GENERATE_DATAMODEL datamodel YAML_FILE RETURN_HEADERS RETURN_SOURCES) - CMAKE_PARSE_ARGUMENTS(ARG "" "OLD_DESCRIPTION;OUTPUT_FOLDER;UPSTREAM_EDM;SCHEMA_EVOLUTION" "IO_BACKEND_HANDLERS" ${ARGN}) + CMAKE_PARSE_ARGUMENTS(ARG "" "OLD_DESCRIPTION;OUTPUT_FOLDER;UPSTREAM_EDM;SCHEMA_EVOLUTION" "IO_BACKEND_HANDLERS;LANG" ${ARGN}) IF(NOT ARG_OUTPUT_FOLDER) SET(ARG_OUTPUT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR}) ENDIF() @@ -153,6 +155,13 @@ function(PODIO_GENERATE_DATAMODEL datamodel YAML_FILE RETURN_HEADERS RETURN_SOUR SET(ARG_IO_BACKEND_HANDLERS "ROOT") ENDIF() + # Check if the LANG argument is specified and set the language accordingly. + IF(ARG_LANG) + SET(LANGUAGE_ARG "--lang=${ARG_LANG}") + ELSE() + SET(LANGUAGE_ARG "--lang=cpp") # Default to C++ + ENDIF() + SET(SCHEMA_EVOLUTION_ARG "") IF (ARG_SCHEMA_EVOLUTION) SET(SCHEMA_EVOLUTION_ARG "--evolution_file=${ARG_SCHEMA_EVOLUTION}") @@ -201,7 +210,7 @@ function(PODIO_GENERATE_DATAMODEL datamodel YAML_FILE RETURN_HEADERS RETURN_SOUR message(STATUS "Creating '${datamodel}' datamodel") # we need to boostrap the data model, so this has to be executed in the cmake run execute_process( - COMMAND ${Python_EXECUTABLE} ${podio_PYTHON_DIR}/podio_class_generator.py ${CLANG_FORMAT_ARG} ${OLD_DESCRIPTION_ARG} ${SCHEMA_EVOLUTION_ARG} ${UPSTREAM_EDM_ARG} ${YAML_FILE} ${ARG_OUTPUT_FOLDER} ${datamodel} ${ARG_IO_BACKEND_HANDLERS} + COMMAND ${Python_EXECUTABLE} ${podio_PYTHON_DIR}/podio_class_generator.py ${CLANG_FORMAT_ARG} ${OLD_DESCRIPTION_ARG} ${SCHEMA_EVOLUTION_ARG} ${UPSTREAM_EDM_ARG} ${YAML_FILE} ${ARG_OUTPUT_FOLDER} ${datamodel} ${ARG_IO_BACKEND_HANDLERS} ${LANGUAGE_ARG} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} RESULT_VARIABLE podio_generate_command_retval ) diff --git a/doc/templates.md b/doc/templates.md index 8fc6d8c27..8397359a9 100644 --- a/doc/templates.md +++ b/doc/templates.md @@ -32,6 +32,9 @@ They are broadly split along the classes that are generated for each datatype or | `CollectionData.{h,cc}.jinja2` | The classes managing the collection storage (not user facing!) | `[/]CollectionData.h`, `src/CollectionData.cc` | | `selection.xml.jinja2` | The `selection.xml` file that is necessary for generating a root dictionary for the generated datamodel | `src/selection.xml` | | `SIOBlock.{h,cc}.jinja2` | The SIO blocks that are necessary for the SIO backend | `[/]SIOBlock.h`, `src/SIOBlock.cc` | +| `MutableStruct.jl.jinja2` | The mutable struct definitions of components and datatypes for julia |`[/]Struct.jl`, `[/]Struct.jl` | +| `ParentModule.jl.jinja2` | The constructor and collection definitions of components and datatypes in the data model are contained within a single module named after the package-name |`[/].jl` | + The presence of a `[]` subdirectory for the header files is controlled by the `includeSubfolder` option in the yaml definition file. @@ -55,7 +58,7 @@ The main entry point to the generation is the `process` method which essentially The following gives an overview of the information that is available from the dictionary that is passed to the templates from the `ClassGenerator`. Each (top level) key in this dict is directly available as a variable in the Jinja2 templates, e.g. ```python -component['include'] = # list of includes +component['includes'] = # list of includes ``` will become available as ```jinja2 @@ -123,6 +126,8 @@ In principle all members are accessible in the templates, however, the most impo | `getter_name` | Method for generating the correct name for getter functions, depending on the `getSyntax` option in the yaml definition file. | | `setter_name` | Method for generating the correct name for setter functions, depending on the `getSyntax` option in the yaml definition file and on whether the member is a relation or not | | `signature` | The signature of a data member that can be used in function signatures, corresponds to `{{ full_type }} {{ name }}` | +| `jl_imports` | Import required for `StaticArrays: MVector` | +| `julia_type` | Equivalent julia type for the c++ type | ### `DataType` Defined in [`python/generator_utils.py`](/python/generator_utils.py). @@ -134,3 +139,20 @@ The available fields are | `bare_type` | The type without the namespace | | `namespace` | The (potentially empty) namespace | | `full_type` | The fully qualified type, corresponding to `{{ namespace }}::{{ bare_type }}`. | + +### Julia code generation +It is an experimental feature. +Builtin types mapping in Julia +| cpp | julia | +|-------------|--------------------------------------------------------------------------------| +| `bool` | `Bool` | +| `char` | `Char` | +| `short` | `Int16` | +| `int` | `Int32` | +| `unsigned int` | `UInt32` | +| `float` | `Float32` | +| `double` | `Float64` | +| `long` | `Int64` | +| `unsigned long` | `UInt64` | +| `long long` | `Int64` | +| `unsigned long long` | `UInt64` | diff --git a/python/podio_class_generator.py b/python/podio_class_generator.py index 14d96f606..eb688c6a6 100755 --- a/python/podio_class_generator.py +++ b/python/podio_class_generator.py @@ -28,6 +28,14 @@ Used {yamlfile} to create {nclasses} classes in {installdir}/ Read instructions in the README.md to run your first example! """ +REPORT_TEXT_JULIA = """ + Julia Code generation is an experimental feature. + Warning: ExtraCode and MutableExtraCode will be ignored during julia code generation. + PODIO Data Model + ================ + Used {yamlfile} to create {nfiles} julia files in {installdir}/ + Read instructions in the README.md to run your first example! +""" def get_clang_format(): @@ -80,11 +88,13 @@ class IncludeFrom(IntEnum): class ClassGenerator: """The entry point for reading a datamodel definition and generating the necessary source code from it.""" - def __init__(self, yamlfile, install_dir, package_name, io_handlers, verbose, dryrun, + # pylint: disable=too-many-arguments + def __init__(self, yamlfile, install_dir, package_name, io_handlers, proglang, verbose, dryrun, upstream_edm, old_description, evolution_file): self.install_dir = install_dir self.package_name = package_name self.io_handlers = io_handlers + self.proglang = proglang self.verbose = verbose self.dryrun = dryrun self.yamlfile = yamlfile @@ -122,22 +132,43 @@ def __init__(self, yamlfile, install_dir, package_name, io_handlers, verbose, dr self.generated_files = [] self.any_changes = False + def _process_parent_module(self, datamodel): + """Process parent module of julia that contains constructor definitions + of components and datatypes""" + self._fill_templates("ParentModule", datamodel) + def process(self): """Run the actual generation""" self.process_schema_evolution() + datamodel = {} + datamodel['class'] = DataType(self.package_name.capitalize()) + datamodel['upstream_edm'] = self.upstream_edm + datamodel['upstream_edm_name'] = '' + if self.upstream_edm: + datamodel['upstream_edm_name'] = self.upstream_edm.options["includeSubfolder"].split("/")[-2].capitalize() + datamodel['components'] = [] + datamodel['datatypes'] = [] for name, component in self.datamodel.components.items(): - self._process_component(name, component) + datamodel['components'].append(self._process_component(name, component)) + for name, datatype in self.datamodel.datatypes.items(): - self._process_datatype(name, datatype) + datamodel['datatypes'].append(self._process_datatype(name, datatype)) - self._write_edm_def_file() + datamodel['static_arrays_import'] = self._has_static_arrays_import(datamodel['components'] + datamodel['datatypes']) + datamodel['includes'] = self._sort_components_and_datatypes(datamodel['components'] + datamodel['datatypes']) - if 'ROOT' in self.io_handlers: - self.prepare_iorules() - self._create_selection_xml() + if self.proglang == "julia": + self._process_parent_module(datamodel) - self._write_cmake_lists_file() + if self.proglang == "cpp": + self._write_edm_def_file() + + if 'ROOT' in self.io_handlers: + self.prepare_iorules() + self._create_selection_xml() + + self._write_cmake_lists_file() self.print_report() @@ -177,11 +208,16 @@ def print_report(self): """Print a summary report about the generated code""" if not self.verbose: return - - nclasses = 5 * len(self.datamodel.datatypes) + len(self.datamodel.components) - text = REPORT_TEXT.format(yamlfile=self.yamlfile, - nclasses=nclasses, - installdir=self.install_dir) + if self.proglang == "julia": + nfiles = len(self.datamodel.datatypes) + len(self.datamodel.components) + 1 + text = REPORT_TEXT_JULIA.format(yamlfile=self.yamlfile, + nfiles=nfiles, + installdir=self.install_dir) + if self.proglang == "cpp": + nclasses = 5 * len(self.datamodel.datatypes) + len(self.datamodel.components) + text = REPORT_TEXT.format(yamlfile=self.yamlfile, + nclasses=nclasses, + installdir=self.install_dir) for summaryline in text.splitlines(): print(summaryline) @@ -201,13 +237,13 @@ def _eval_template(self, template, data, old_schema_data=None): def _write_file(self, name, content): """Write the content to file. Dispatch to the correct directory depending on whether it is a header or a .cc file.""" - if name.endswith("h"): + if name.endswith("h") or name.endswith("jl"): fullname = os.path.join(self.install_dir, self.package_name, name) else: fullname = os.path.join(self.install_dir, "src", name) if not self.dryrun: self.generated_files.append(fullname) - if self.clang_format: + if self.clang_format and not name.endswith('jl'): with subprocess.Popen(self.clang_format, stdin=subprocess.PIPE, stdout=subprocess.PIPE) as cfproc: content = cfproc.communicate(input=content.encode())[0].decode() @@ -227,13 +263,17 @@ def get_fn_format(tmpl): 'Obj': 'Obj', 'SIOBlock': 'SIOBlock', 'Collection': 'Collection', - 'CollectionData': 'CollectionData'} + 'CollectionData': 'CollectionData', + 'MutableStruct': 'Struct' + } return f'{prefix.get(tmpl, "")}{{name}}{postfix.get(tmpl, "")}.{{end}}' endings = { 'Data': ('h',), - 'PrintInfo': ('h',) + 'PrintInfo': ('h',), + 'MutableStruct': ('jl',), + 'ParentModule': ('jl',), }.get(template_base, ('h', 'cc')) fn_templates = [] @@ -251,7 +291,6 @@ def _fill_templates(self, template_base, data, old_schema_data=None): data['package_name'] = self.package_name data['use_get_syntax'] = self.get_syntax data['incfolder'] = self.incfolder - for filename, template in self._get_filenames_templates(template_base, data['class'].bare_type): self._write_file(filename, self._eval_template(template, data, old_schema_data)) @@ -259,10 +298,10 @@ def _process_component(self, name, component): """Process one component""" # Make a copy here and add the preprocessing steps to that such that the # original definition can be left untouched + # pylint: disable=too-many-nested-blocks component = deepcopy(component) includes = set() includes.update(*(m.includes for m in component['Members'])) - for member in component['Members']: if not (member.is_builtin or member.is_builtin_array): includes.add(self._build_include(member)) @@ -271,23 +310,32 @@ def _process_component(self, name, component): component['includes'] = self._sort_includes(includes) component['class'] = DataType(name) - self._fill_templates('Component', component) - - # Add potentially older schema for schema evolution - # based on ROOT capabilities for now - if name in self.root_schema_dict: - schema_evolutions = self.root_schema_dict[name] - component = copy.deepcopy(component) - for schema_evolution in schema_evolutions: - if isinstance(schema_evolution, RenamedMember): - for member in component['Members']: - if member.name == schema_evolution.member_name_new: - member.name = schema_evolution.member_name_old - component['class'] = DataType(name + self.old_schema_version) - else: - raise NotImplementedError + component['upstream_edm'] = self.upstream_edm + component['upstream_edm_name'] = '' + if self.upstream_edm: + component['upstream_edm_name'] = self.upstream_edm.options["includeSubfolder"].split("/")[-2].capitalize() + + if self.proglang == "cpp": self._fill_templates('Component', component) - self.root_schema_component_names.add(name + self.old_schema_version) + # Add potentially older schema for schema evolution + # based on ROOT capabilities for now + if name in self.root_schema_dict: + schema_evolutions = self.root_schema_dict[name] + component = copy.deepcopy(component) + for schema_evolution in schema_evolutions: + if isinstance(schema_evolution, RenamedMember): + for member in component['Members']: + if member.name == schema_evolution.member_name_new: + member.name = schema_evolution.member_name_old + component['class'] = DataType(name + self.old_schema_version) + else: + raise NotImplementedError + self._fill_templates('Component', component) + self.root_schema_component_names.add(name + self.old_schema_version) + + if self.proglang == "julia": + self._fill_templates('MutableStruct', component) + return component @staticmethod def _replace_component_in_paths(oldname, newname, paths): @@ -305,46 +353,51 @@ def _process_datatype(self, name, definition): """Process one datatype""" datatype = self._preprocess_datatype(name, definition) - # ROOT schema evolution preparation - # Compute and prepare the potential schema evolution parts - schema_evolution_datatype = copy.deepcopy(datatype) - needs_schema_evolution = False - for member in schema_evolution_datatype['Members']: - if member.is_array: - if member.array_type in self.root_schema_dict: - needs_schema_evolution = True - self._replace_component_in_paths(member.array_type, member.array_type + self.old_schema_version, - schema_evolution_datatype['includes_data']) - member.full_type = member.full_type.replace(member.array_type, member.array_type + self.old_schema_version) - member.array_type = member.array_type + self.old_schema_version + if self.proglang == "cpp": + # ROOT schema evolution preparation + # Compute and prepare the potential schema evolution parts + schema_evolution_datatype = copy.deepcopy(datatype) + needs_schema_evolution = False + for member in schema_evolution_datatype['Members']: + if member.is_array: + if member.array_type in self.root_schema_dict: + needs_schema_evolution = True + self._replace_component_in_paths(member.array_type, member.array_type + self.old_schema_version, + schema_evolution_datatype['includes_data']) + member.full_type = member.full_type.replace(member.array_type, member.array_type + self.old_schema_version) + member.array_type = member.array_type + self.old_schema_version + else: + if member.full_type in self.root_schema_dict: + needs_schema_evolution = True + # prepare the ROOT I/O rule + self._replace_component_in_paths(member.full_type, member.full_type + self.old_schema_version, + schema_evolution_datatype['includes_data']) + member.full_type = member.full_type + self.old_schema_version + member.bare_type = member.bare_type + self.old_schema_version + + if needs_schema_evolution: + print(f" Preparing explicit schema evolution for {name}") + schema_evolution_datatype['class'].bare_type = schema_evolution_datatype['class'].bare_type + self.old_schema_version # noqa + self._fill_templates('Data', schema_evolution_datatype) + self.root_schema_datatype_names.add(name + self.old_schema_version) + self._fill_templates('Collection', datatype, schema_evolution_datatype) else: - if member.full_type in self.root_schema_dict: - needs_schema_evolution = True - # prepare the ROOT I/O rule - self._replace_component_in_paths(member.full_type, member.full_type + self.old_schema_version, - schema_evolution_datatype['includes_data']) - member.full_type = member.full_type + self.old_schema_version - member.bare_type = member.bare_type + self.old_schema_version - - if needs_schema_evolution: - print(f" Preparing explicit schema evolution for {name}") - schema_evolution_datatype['class'].bare_type = schema_evolution_datatype['class'].bare_type + self.old_schema_version # noqa - self._fill_templates('Data', schema_evolution_datatype) - self.root_schema_datatype_names.add(name + self.old_schema_version) - self._fill_templates('Collection', datatype, schema_evolution_datatype) - else: + self._fill_templates('Collection', datatype) + + self._fill_templates('Data', datatype) + self._fill_templates('Object', datatype) + self._fill_templates('MutableObject', datatype) + self._fill_templates('Obj', datatype) self._fill_templates('Collection', datatype) + self._fill_templates('CollectionData', datatype) - self._fill_templates('Data', datatype) - self._fill_templates('Object', datatype) - self._fill_templates('MutableObject', datatype) - self._fill_templates('Obj', datatype) - self._fill_templates('Collection', datatype) - self._fill_templates('CollectionData', datatype) + if 'SIO' in self.io_handlers: + self._fill_templates('SIOBlock', datatype) - if 'SIO' in self.io_handlers: - self._fill_templates('SIOBlock', datatype) + if self.proglang == "julia": + self._fill_templates('MutableStruct', datatype) + return datatype def prepare_iorules(self): """Prepare the IORules to be put in the Reflex dictionary""" @@ -368,11 +421,19 @@ def prepare_iorules(self): else: raise NotImplementedError(f"Schema evolution for {schema_change} not yet implemented.") + @staticmethod + def _get_julia_params(datatype): + """Get the relations as parameteric types for MutableStructs""" + params = set() + for relation in datatype['OneToManyRelations'] + datatype['OneToOneRelations']: + if not relation.is_builtin: + params.add((relation.bare_type, relation.full_type)) + return list(params) + def _preprocess_for_obj(self, datatype): """Do the preprocessing that is necessary for the Obj classes""" fwd_declarations = defaultdict(list) includes, includes_cc = set(), set() - for relation in datatype['OneToOneRelations']: if relation.full_type != datatype['class'].full_type: fwd_declarations[relation.namespace].append(relation.bare_type) @@ -500,10 +561,14 @@ def _preprocess_datatype(self, name, definition): data = deepcopy(definition) data['class'] = DataType(name) data['includes_data'] = self._get_member_includes(definition["Members"]) + data['params_jl'] = sorted(self._get_julia_params(data), key=lambda x: x[0]) + data['upstream_edm'] = self.upstream_edm + data['upstream_edm_name'] = '' + if self.upstream_edm: + data['upstream_edm_name'] = self.upstream_edm.options["includeSubfolder"].split("/")[-2].capitalize() self._preprocess_for_class(data) self._preprocess_for_obj(data) self._preprocess_for_collection(data) - return data def _write_edm_def_file(self): @@ -612,6 +677,65 @@ def _build_include_for_class(self, classname, include_from: IncludeFrom) -> str: # the generated code) return '' + @staticmethod + def _sort_components_and_datatypes(data): + """Sorts a list of components and datatypes based on dependencies, ensuring that components and datatypes + with no dependencies or dependencies on built-in types come first. The function performs + topological sorting using Kahn's algorithm.""" + # Create a dictionary to store dependencies + dependencies = {} + bare_types_mapping = {} + + for component_data in data: + full_type = component_data['class'].full_type + bare_type = component_data['class'].bare_type + bare_types_mapping[full_type] = bare_type + dependencies[full_type] = set() + + # Check dependencies in 'Members' + if 'Members' in component_data: + for member_data in component_data['Members']: + member_full_type = member_data.full_type + if not member_data.is_builtin and not member_data.is_builtin_array: + dependencies[full_type].add(member_full_type) + + # Check dependencies in 'VectorMembers' + if 'VectorMembers' in component_data: + for vector_member_data in component_data['VectorMembers']: + vector_member_full_type = vector_member_data.full_type + if not vector_member_data.is_builtin and not vector_member_data.is_builtin_array: + dependencies[full_type].add(vector_member_full_type) + + # Perform topological sorting using Kahn's algorithm + sorted_components = [] + while dependencies: + ready = {component for component, deps in dependencies.items() if not deps} + if not ready: + sorted_components.extend(bare_types_mapping[component] for component in dependencies) + break + + for component in ready: + del dependencies[component] + sorted_components.append(bare_types_mapping[component]) + + for deps in dependencies.values(): + deps -= ready + + # Return the Sorted Components (bare_types) + return sorted_components + + @staticmethod + def _has_static_arrays_import(data): + """Checks if any member within a list of components and datatypes contains the import statement + 'using StaticArrays' in its jl_imports. Returns True if found in any member, otherwise False.""" + for component_data in data: + members_data = component_data.get('Members', []) + for member_data in members_data: + jl_imports = member_data.jl_imports + if 'using StaticArrays' in jl_imports: + return True + return False + def _sort_includes(self, includes): """Sort the includes in order to try to have the std includes at the bottom""" package_includes = sorted(i for i in includes if self.package_name in i) @@ -653,15 +777,18 @@ def read_upstream_edm(name_path): import argparse # pylint: disable=invalid-name # before 2.5.0 pylint is too strict with the naming here parser = argparse.ArgumentParser(description='Given a description yaml file this script generates ' - 'the necessary c++ files in the target directory') + 'the necessary c++ or julia files in the target directory') parser.add_argument('description', help='yaml file describing the datamodel') parser.add_argument('targetdir', help='Target directory where the generated data classes will be put. ' 'Header files will be put under //*.h. ' - 'Source files will be put under /src/*.cc') + 'Source files will be put under /src/*.cc. ' + 'Julia files will be put under //*.jl.') parser.add_argument('packagename', help='Name of the package.') parser.add_argument('iohandlers', choices=['ROOT', 'SIO'], nargs='+', help='The IO backend specific code that should be generated') + parser.add_argument('-l', '--lang', choices=['cpp', 'julia'], default='cpp', + help='Specify the programming language (default: cpp)') parser.add_argument('-q', '--quiet', dest='verbose', action='store_false', default=True, help='Don\'t write a report to screen') parser.add_argument('-d', '--dryrun', action='store_true', default=False, @@ -689,7 +816,7 @@ def read_upstream_edm(name_path): if not os.path.exists(directory): os.makedirs(directory) - gen = ClassGenerator(args.description, args.targetdir, args.packagename, args.iohandlers, + gen = ClassGenerator(args.description, args.targetdir, args.packagename, args.iohandlers, proglang=args.lang, verbose=args.verbose, dryrun=args.dryrun, upstream_edm=args.upstream_edm, old_description=args.old_description, evolution_file=args.evolution_file) if args.clangformat: diff --git a/python/podio_gen/generator_utils.py b/python/podio_gen/generator_utils.py index 84e4adb16..c225bc16b 100644 --- a/python/podio_gen/generator_utils.py +++ b/python/podio_gen/generator_utils.py @@ -29,6 +29,38 @@ def _prefix_name(name, prefix): return name +def get_julia_type(cpp_type, is_array=False, array_type=None, array_size=None): + """Parse the given c++ type to a Julia type""" + builtin_types_map = {"int": "Int32", "float": "Float32", "double": "Float64", + "bool": "Bool", "long": "Int64", "unsigned int": "UInt32", + "unsigned long": "UInt64", "char": "Char", "short": "Int16", + "long long": "Int64", "unsigned long long": "UInt64"} + # is a global type as described in test_MemberParser.py #L121 + # check for cpp_type=None as cpp_type can be None in case of array members + if cpp_type and cpp_type.startswith("::"): + cpp_type = cpp_type[2:] + if cpp_type in builtin_types_map: + return builtin_types_map[cpp_type] + + if not is_array: + if cpp_type.startswith('std::'): + cpp_type = cpp_type[5:] + if cpp_type in ALLOWED_FIXED_WIDTH_TYPES: + regex_string = re.split("(u|)int(8|16|32|64)_t", cpp_type) + cpp_type = regex_string[1].upper() + "Int" + regex_string[2] + return cpp_type + + else: + array_type = get_julia_type(array_type) + if '::' in array_type: + array_type = array_type.split('::')[1] + if array_type not in builtin_types_map.values(): + array_type = array_type + 'Struct' + return f"MVector{{{array_size}, {array_type}}}" + + return cpp_type + + class DefinitionError(Exception): """Exception raised by the ClassDefinitionValidator for invalid definitions. Mainly here to distinguish it from plain exceptions that are otherwise raised. @@ -97,12 +129,14 @@ def __init__(self, name, **kwargs): self.is_array = False # ensure that this will break somewhere if requested but not set self.namespace, self.bare_type = None, None + self.julia_type = None self.array_namespace, self.array_bare_type = None, None self.array_type = kwargs.pop('array_type', None) self.array_size = kwargs.pop('array_size', None) self.includes = set() + self.jl_imports = set() if kwargs: raise ValueError(f"Unused kwargs in MemberVariable: {list(kwargs.keys())}") @@ -123,6 +157,7 @@ def __init__(self, name, **kwargs): self.full_type = rf'std::array<{self.array_type}, {self.array_size}>' self.includes.add('#include ') + self.jl_imports.add('using StaticArrays') self.is_builtin = self.full_type in BUILTIN_TYPES @@ -151,6 +186,9 @@ def __init__(self, name, **kwargs): else: self.namespace, self.bare_type = _get_namespace_class(self.full_type) + self.julia_type = get_julia_type(self.bare_type, is_array=self.is_array, + array_type=self.array_type, array_size=self.array_size) + @property def docstring(self): """Docstring to be used in code generation""" diff --git a/python/podio_gen/test_MemberParser.py b/python/podio_gen/test_MemberParser.py index b860dc833..f0d352dc1 100644 --- a/python/podio_gen/test_MemberParser.py +++ b/python/podio_gen/test_MemberParser.py @@ -21,21 +21,25 @@ def test_parse_valid(self): # pylint: disable=too-many-statements self.assertEqual(parsed.name, r'someFloat') self.assertEqual(parsed.description, r'with an additional comment') self.assertTrue(parsed.default_val is None) + self.assertEqual(parsed.julia_type, r'Float32') parsed = parser.parse(r'float float2 // with numbers') self.assertEqual(parsed.full_type, r'float') self.assertEqual(parsed.name, r'float2') self.assertEqual(parsed.description, r'with numbers') + self.assertEqual(parsed.julia_type, r'Float32') parsed = parser.parse(r' float spacefloat // whitespace everywhere ') self.assertEqual(parsed.full_type, r'float') self.assertEqual(parsed.name, r'spacefloat') self.assertEqual(parsed.description, 'whitespace everywhere') + self.assertEqual(parsed.julia_type, r'Float32') parsed = parser.parse(r'int snake_case // snake case') self.assertEqual(parsed.full_type, r'int') self.assertEqual(parsed.name, r'snake_case') self.assertEqual(parsed.description, r'snake case') + self.assertEqual(parsed.julia_type, r'Int32') parsed = parser.parse(r'std::string mixed_UglyCase_12 // who wants this') self.assertEqual(parsed.full_type, r'std::string') @@ -47,11 +51,13 @@ def test_parse_valid(self): # pylint: disable=too-many-statements self.assertEqual(parsed.full_type, r'unsigned long long') self.assertEqual(parsed.name, r'uVar') self.assertEqual(parsed.description, r'an unsigned long variable') + self.assertEqual(parsed.julia_type, r'UInt64') parsed = parser.parse(r'unsigned int uInt // an unsigned integer') self.assertEqual(parsed.full_type, r'unsigned int') self.assertEqual(parsed.name, r'uInt') self.assertEqual(parsed.description, r'an unsigned integer') + self.assertEqual(parsed.julia_type, r'UInt32') # Fixed width integers in their various forms that they can be spelled out # and be considered valid in our case @@ -60,24 +66,28 @@ def test_parse_valid(self): # pylint: disable=too-many-statements self.assertEqual(parsed.name, r'qualified') self.assertEqual(parsed.description, r'qualified fixed width ints work') self.assertTrue(parsed.is_builtin) + self.assertEqual(parsed.julia_type, r'Int16') parsed = parser.parse(r'std::uint64_t bits // fixed width integer types should work') self.assertEqual(parsed.full_type, r'std::uint64_t') self.assertEqual(parsed.name, r'bits') self.assertEqual(parsed.description, r'fixed width integer types should work') self.assertTrue(parsed.is_builtin) + self.assertEqual(parsed.julia_type, r'UInt64') parsed = parser.parse(r'int32_t fixedInt // fixed width signed integer should work') self.assertEqual(parsed.full_type, r'std::int32_t') self.assertEqual(parsed.name, r'fixedInt') self.assertEqual(parsed.description, r'fixed width signed integer should work') self.assertTrue(parsed.is_builtin) + self.assertEqual(parsed.julia_type, r'Int32') parsed = parser.parse(r'uint16_t fixedUInt // fixed width unsigned int with 16 bits') self.assertEqual(parsed.full_type, r'std::uint16_t') self.assertEqual(parsed.name, r'fixedUInt') self.assertEqual(parsed.description, r'fixed width unsigned int with 16 bits') self.assertTrue(parsed.is_builtin) + self.assertEqual(parsed.julia_type, r'UInt16') # an array definition with space everywhere it is allowed parsed = parser.parse(r' std::array < double , 4 > someArray // a comment ') @@ -89,17 +99,20 @@ def test_parse_valid(self): # pylint: disable=too-many-statements self.assertEqual(int(parsed.array_size), 4) self.assertEqual(parsed.array_type, r'double') self.assertTrue(parsed.default_val is None) + self.assertEqual(parsed.julia_type, r'MVector{4, Float64}') # an array definition as terse as possible parsed = parser.parse(r'std::arrayanArray//with a comment') self.assertEqual(parsed.full_type, r'std::array') self.assertEqual(parsed.name, r'anArray') self.assertEqual(parsed.description, r'with a comment') + self.assertEqual(parsed.julia_type, r'MVector{2, Int32}') parsed = parser.parse('::TopLevelNamespaceType aValidType // hopefully') self.assertEqual(parsed.full_type, '::TopLevelNamespaceType') self.assertEqual(parsed.name, r'aValidType') self.assertEqual(parsed.description, 'hopefully') + self.assertEqual(parsed.julia_type, r'TopLevelNamespaceType') parsed = parser.parse(r'std::array<::GlobalType, 1> anArray // with a top level type') self.assertEqual(parsed.full_type, r'std::array<::GlobalType, 1>') @@ -107,6 +120,7 @@ def test_parse_valid(self): # pylint: disable=too-many-statements self.assertEqual(parsed.description, r'with a top level type') self.assertTrue(not parsed.is_builtin_array) self.assertEqual(parsed.array_type, r'::GlobalType') + self.assertEqual(parsed.julia_type, r'MVector{1, GlobalTypeStruct}') parsed = parser.parse(r'std::array fixedWidthArray // a fixed width type array') self.assertEqual(parsed.full_type, r'std::array') @@ -114,6 +128,7 @@ def test_parse_valid(self): # pylint: disable=too-many-statements self.assertEqual(parsed.description, r'a fixed width type array') self.assertTrue(parsed.is_builtin_array) self.assertEqual(parsed.array_type, r'std::int16_t') + self.assertEqual(parsed.julia_type, r'MVector{42, Int16}') parsed = parser.parse(r'std::array fixedWidthArray // a fixed width type array without namespace') self.assertEqual(parsed.full_type, r'std::array') @@ -121,6 +136,7 @@ def test_parse_valid(self): # pylint: disable=too-many-statements self.assertEqual(parsed.description, r'a fixed width type array without namespace') self.assertTrue(parsed.is_builtin_array) self.assertEqual(parsed.array_type, r'std::uint32_t') + self.assertEqual(parsed.julia_type, r'MVector{42, UInt32}') def test_parse_valid_default_value(self): """Test that member variables can be parsed correctly if they have a user @@ -226,22 +242,33 @@ def test_parse_valid_no_description(self): parsed = parser.parse('unsigned long long aLongWithoutDescription', False) self.assertEqual(parsed.full_type, 'unsigned long long') self.assertEqual(parsed.name, 'aLongWithoutDescription') + self.assertEqual(parsed.julia_type, r'UInt64') parsed = parser.parse('std::array unDescribedArray', False) self.assertEqual(parsed.full_type, 'std::array') self.assertEqual(parsed.name, 'unDescribedArray') self.assertEqual(parsed.array_type, 'unsigned long') self.assertTrue(parsed.is_builtin_array) + self.assertEqual(parsed.julia_type, r'MVector{123, UInt64}') + + parsed = parser.parse('std::array p [mm]', False) + self.assertEqual(parsed.full_type, 'std::array') + self.assertEqual(parsed.name, 'p') + self.assertEqual(parsed.array_type, 'int') + self.assertTrue(parsed.is_builtin_array) + self.assertEqual(parsed.julia_type, r'MVector{4, Int32}') parsed = parser.parse('unsigned long longWithReallyStupidName', False) self.assertEqual(parsed.full_type, 'unsigned long') self.assertEqual(parsed.name, 'longWithReallyStupidName') + self.assertEqual(parsed.julia_type, r'UInt64') parsed = parser.parse('NonBuiltIn aType // descriptions are not ignored even though they are not required', False) self.assertEqual(parsed.full_type, 'NonBuiltIn') self.assertEqual(parsed.name, 'aType') self.assertEqual(parsed.description, 'descriptions are not ignored even though they are not required') self.assertTrue(not parsed.is_builtin) + self.assertEqual(parsed.julia_type, r'NonBuiltIn') def test_parse_unit(self): """Test that units are properly parsed""" diff --git a/python/templates/CMakeLists.txt b/python/templates/CMakeLists.txt index 76b36354b..83a52996a 100644 --- a/python/templates/CMakeLists.txt +++ b/python/templates/CMakeLists.txt @@ -22,4 +22,8 @@ set(PODIO_TEMPLATES ${CMAKE_CURRENT_LIST_DIR}/macros/iterator.jinja2 ${CMAKE_CURRENT_LIST_DIR}/macros/sioblocks.jinja2 ${CMAKE_CURRENT_LIST_DIR}/macros/utils.jinja2 + + ${CMAKE_CURRENT_LIST_DIR}/MutableStruct.jl.jinja2 + ${CMAKE_CURRENT_LIST_DIR}/ParentModule.jl.jinja2 + ${CMAKE_CURRENT_LIST_DIR}/macros/julia_helpers.jinja2 ) diff --git a/python/templates/MutableStruct.jl.jinja2 b/python/templates/MutableStruct.jl.jinja2 new file mode 100644 index 000000000..302ac35af --- /dev/null +++ b/python/templates/MutableStruct.jl.jinja2 @@ -0,0 +1,37 @@ +{% import "macros/julia_helpers.jinja2" as julia_helpers %} +mutable struct {{ class.bare_type }}Struct{{ julia_helpers.julia_parameters(params_jl,"T" ) }} +{% for member in Members %} + {% if member.is_array %} + {% if not member.is_builtin_array and upstream_edm and (member.array_type in upstream_edm.components or member.array_type in upstream_edm.datatypes) %} + {{ member.name }}::MVector{{ '{' }}{{ member.array_size }}, {{ upstream_edm_name }}.{{ member.array_bare_type }}Struct{{ '}' }} + {% else %} + {{ member.name }}::{{ member.julia_type }} + {% endif %} + {% elif member.is_builtin %} + {{ member.name }}::{{ member.julia_type }} + {% else %} + {% if upstream_edm and (member.full_type in upstream_edm.components or member.full_type in upstream_edm.datatypes) %} + {{ member.name }}::{{ upstream_edm_name }}.{{ member.julia_type }}Struct + {% else %} + {{ member.name }}::{{ member.julia_type }}Struct + {% endif %} + {% endif %} +{% endfor %} +{% for relation in OneToManyRelations %} + {{ relation.name }}::Vector{ {{ relation.julia_type }}T } +{% endfor %} +{% for relation in OneToOneRelations %} + {{ relation.name }}::Union{Nothing, {{ relation.julia_type }}T } +{% endfor %} +{% for member in VectorMembers %} + {% if member.is_builtin %} + {{ member.name }}::Vector{ {{ member.julia_type }} } + {% else %} + {% if upstream_edm and (member.full_type in upstream_edm.components or member.full_type in upstream_edm.datatypes) %} + {{ member.name }}::Vector{ {{ upstream_edm_name }}.{{ member.julia_type }}Struct } + {% else %} + {{ member.name }}::Vector{ {{ member.julia_type }}Struct } + {% endif %} + {% endif %} +{% endfor %} +end diff --git a/python/templates/ParentModule.jl.jinja2 b/python/templates/ParentModule.jl.jinja2 new file mode 100644 index 000000000..827f46cf5 --- /dev/null +++ b/python/templates/ParentModule.jl.jinja2 @@ -0,0 +1,157 @@ +{% import "macros/julia_helpers.jinja2" as julia_helpers %} +{% if upstream_edm %} +include("{{ upstream_edm.options["includeSubfolder"] }}{{ upstream_edm_name }}.jl") +{% endif %} + +module {{ class.bare_type }} +{% for component in components %} +export {{ component['class'].bare_type }} +{% endfor %} +{% for datatype in datatypes %} +export {{ datatype['class'].bare_type }} +export {{ datatype['class'].bare_type }}Collection +{% endfor %} + +{% if upstream_edm %} +import ..{{ upstream_edm_name }} +{% endif %} + +{% if static_arrays_import %} +try + using StaticArrays +catch + import Pkg + Pkg.activate(@__DIR__) + Pkg.add("StaticArrays") + using StaticArrays +end +{% endif %} +{% for sort_include in includes %} +include("{{ sort_include }}Struct.jl") +{% endfor %} + +{% for component in components %} +function {{ component['class'].bare_type }}( +{% for member in component['Members'] %} + {% if member.is_array %} + {% if member.is_builtin_array %} + {{ member.name }}::{{ member.julia_type }} = {{ member.julia_type }}(undef), + {% else %} + {% if upstream_edm and (member.array_type in upstream_edm.components or member.array_type in upstream_edm.datatypes) %} + {{ member.name }}::MVector{{ '{' }}{{ member.array_size }}, {{ upstream_edm_name }}.{{ member.array_bare_type }}Struct{{ '}' }} = MVector{{ '{' }}{{ member.array_size }}, {{ upstream_edm_name }}.{{ member.array_bare_type }}Struct{{ '}' }}(undef), + {% else %} + {{ member.name }}::{{ member.julia_type }} = {{ member.julia_type }}(undef), + {% endif %} + {% endif %} + {% elif member.is_builtin %} + {{ member.name }}::{{ julia_helpers.classify_data_type(member.julia_type) }} = {{ member.julia_type }}(0), + {% else %} + {% if upstream_edm and (member.full_type in upstream_edm.components or member.full_type in upstream_edm.datatypes) %} + {{ member.name }}::{{ upstream_edm_name }}.{{ member.julia_type }}Struct = {{ upstream_edm_name }}.{{ member.julia_type }}(), + {% else %} + {{ member.name }}::{{ member.julia_type }}Struct = {{ member.julia_type }}(), + {% endif %} + {% endif %} +{% endfor %} +) + return {{ component['class'].bare_type }}Struct{{ julia_helpers.julia_parameters(component['params_jl'], "Struct", upstream_edm, upstream_edm_name) }}( + {% for member in component['Members'] %} + {% if member.is_builtin %} + {{ member.julia_type }}({{ member.name }}), + {% else %} + {{ member.name }}, + {% endif %} + {% endfor %} + ) +end + +{% endfor %} + +{% for datatype in datatypes %} +function {{ datatype['class'].bare_type }}( +{% for member in datatype['Members'] %} + {% if member.is_array %} + {% if member.is_builtin_array %} + {{ member.name }}::{{ member.julia_type }} = {{ member.julia_type }}(undef), + {% else %} + {% if upstream_edm and (member.array_type in upstream_edm.components or member.array_type in upstream_edm.datatypes) %} + {{ member.name }}::MVector{{ '{' }}{{ member.array_size }}, {{ upstream_edm_name }}.{{ member.array_bare_type }}Struct{{ '}' }} = MVector{{ '{' }}{{ member.array_size }}, {{ upstream_edm_name }}.{{ member.array_bare_type }}Struct{{ '}' }}(undef), + {% else %} + {{ member.name }}::{{ member.julia_type }} = {{ member.julia_type }}(undef), + {% endif %} + {% endif %} + {% elif member.is_builtin %} + {{ member.name }}::{{ julia_helpers.classify_data_type(member.julia_type) }} = {{ member.julia_type }}(0), + {% else %} + {% if upstream_edm and (member.full_type in upstream_edm.components or member.full_type in upstream_edm.datatypes) %} + {{ member.name }}::{{ upstream_edm_name }}.{{ member.julia_type }}Struct = {{ upstream_edm_name }}.{{ member.julia_type }}(), + {% else %} + {{ member.name }}::{{ member.julia_type }}Struct = {{ member.julia_type }}(), + {% endif %} + {% endif %} +{% endfor %} +{% for relation in datatype['OneToManyRelations'] %} + {% if upstream_edm and (relation.full_type in upstream_edm.components or relation.full_type in upstream_edm.datatypes) %} + {{ relation.name }}::Vector{ {{ upstream_edm_name }}.{{ relation.julia_type }}Struct } = Vector{ {{ upstream_edm_name }}.{{ relation.julia_type }}Struct }(), + {% else %} + {{ relation.name }}::Vector{ {{ relation.julia_type }}Struct } = Vector{ {{ relation.julia_type }}Struct }(), + {% endif %} +{% endfor %} +{% for relation in datatype['OneToOneRelations'] %} + {% if upstream_edm and (relation.full_type in upstream_edm.components or relation.full_type in upstream_edm.datatypes) %} + {{ relation.name }}::Union{Nothing, {{ upstream_edm_name }}.{{ relation.julia_type }}Struct } = nothing, + {% else %} + {{ relation.name }}::Union{Nothing, {{ relation.julia_type }}Struct } = nothing, + {% endif %} +{% endfor %} +{% for member in datatype['VectorMembers'] %} + {% if member.is_builtin %} + {{ member.name }}::Vector{ {{ member.name }}T } = Vector{ {{ member.julia_type }} }([]), + {% else %} + {% if upstream_edm and (member.full_type in upstream_edm.components or member.full_type in upstream_edm.datatypes) %} + {{ member.name }}::Vector{ {{ upstream_edm_name }}.{{ member.julia_type }}Struct } = Vector{ {{ upstream_edm_name }}.{{ member.julia_type }}Struct }([]), + {% else %} + {{ member.name }}::Vector{ {{ member.julia_type }}Struct } = Vector{ {{ member.julia_type }}Struct }([]), + {% endif %} + {% endif %} +{% endfor %} +{% set structure_start = '{' %} +{% set structure_end = '}' %} +) {% if datatype['VectorMembers']|selectattr('is_builtin')|list -%} + where {{ structure_start -}} +{% for member in datatype['VectorMembers'] %} +{% if member.is_builtin %} +{{ member.name }}T<:{{ julia_helpers.classify_data_type(member.julia_type) }}{% if not loop.last %},{% endif %} +{% endif %} +{% endfor -%} +{{ structure_end }} +{% endif %} + + return {{ datatype['class'].bare_type }}Struct{{ julia_helpers.julia_parameters(datatype['params_jl'], "Struct", upstream_edm, upstream_edm_name) }}( + {% for member in datatype['Members'] %} + {% if member.is_builtin %} + {{ member.julia_type }}({{ member.name }}), + {% else %} + {{ member.name }}, + {% endif %} + {% endfor %} + {% for relation in datatype['OneToManyRelations'] %} + {{ relation.name }}, + {% endfor %} + {% for relation in datatype['OneToOneRelations'] %} + {{ relation.name }}, + {% endfor %} + {% for member in datatype['VectorMembers'] %} + {% if member.is_builtin %} + convert(Vector{ {{ member.julia_type }} }, {{ member.name }}), + {% else %} + {{ member.name }}, + {% endif %} + {% endfor %} + ) +end + +{{ datatype['class'].bare_type }}Collection = Vector{ {{ datatype['class'].bare_type }}Struct{{ julia_helpers.julia_parameters(datatype['params_jl'], "Struct", upstream_edm, upstream_edm_name) }} } + +{% endfor %} +end diff --git a/python/templates/macros/julia_helpers.jinja2 b/python/templates/macros/julia_helpers.jinja2 new file mode 100644 index 000000000..c90e8138f --- /dev/null +++ b/python/templates/macros/julia_helpers.jinja2 @@ -0,0 +1,32 @@ +{% macro classify_data_type(data_type) %} + {%- set type_map = { + 'Int8': 'Integer', + 'UInt8': 'Integer', + 'Int16': 'Integer', + 'UInt16': 'Integer', + 'Int32': 'Integer', + 'UInt32': 'Integer', + 'Int64': 'Integer', + 'UInt64': 'Integer', + 'Float16': 'Real', + 'Float32': 'Real', + 'Float64': 'Real' + } -%} + + {{ type_map.get(data_type, data_type) }} +{%- endmacro -%} + + +{% macro julia_parameters(params, suffix, upstream_edm=None, upstream_edm_name=None) %} +{%- if params -%}{ +{%- set comma = joiner(',') -%} +{%- for par in params -%} +{{comma() }} + {%- if upstream_edm and (par[1] in upstream_edm.components or par[1] in upstream_edm.datatypes) %} + {{- upstream_edm_name }}.{{ par[0] }}{{ suffix -}} + {%- else %} + {{- par[0] }}{{ suffix -}} + {%- endif %} +{%- endfor -%} +}{%- endif -%} +{%- endmacro -%} diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 9a1583f47..4132ff19c 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -63,6 +63,34 @@ add_subdirectory(schema_evolution) CREATE_PODIO_TEST(ostream_operator.cpp "") CREATE_PODIO_TEST(write_ascii.cpp "") +if (ENABLE_JULIA) + message(STATUS "Julia Datamodel generation is enabled.") + PODIO_GENERATE_DATAMODEL(datamodeljulia datalayout.yaml headers sources + IO_BACKEND_HANDLERS ${PODIO_IO_HANDLERS} + LANG julia + ) + + PODIO_GENERATE_DATAMODEL(extensionmodeljulia datalayout_extension.yaml ext_headers ext_sources + UPSTREAM_EDM ../../datamodeljulia:datalayout.yaml + IO_BACKEND_HANDLERS ${PODIO_IO_HANDLERS} + OUTPUT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR}/extension_model + LANG julia + ) + + find_program(Julia_EXECUTABLE julia) + if (Julia_EXECUTABLE) + message(STATUS "Found Julia, will add the Julia tests") + add_test(NAME julia-unittests COMMAND julia ${CMAKE_CURRENT_SOURCE_DIR}/unittests/unittest.jl) + set_tests_properties(julia-unittests PROPERTIES + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR} + ) + else() + message(WARNING "Julia not found. Cannot run the Julia tests.") + endif() +else() + message(STATUS "Julia Datamodel generation is disabled.") +endif() + # Customize CTest to potentially disable some of the tests with known problems configure_file(CTestCustom.cmake ${PROJECT_BINARY_DIR}/CTestCustom.cmake @ONLY) diff --git a/tests/unittests/unittest.jl b/tests/unittests/unittest.jl new file mode 100644 index 000000000..990610cc5 --- /dev/null +++ b/tests/unittests/unittest.jl @@ -0,0 +1,122 @@ +# Check if StaticArrays is available, if not, install it locally +try + using StaticArrays +catch + import Pkg + Pkg.activate(@__DIR__) + Pkg.add("StaticArrays") + using StaticArrays +end +# datamodeljulia/Datamodeljulia.jl file included inside extension_model/extensionmodeljulia/Extensionmodeljulia.jl file +include("../extension_model/extensionmodeljulia/Extensionmodeljulia.jl") +using .Datamodeljulia +using .Extensionmodeljulia +using Test +@testset "Julia Bindings" begin + @testset "Relations" begin + + mcp1 = ExampleMC() + mcp1.PDG = 2212 + + mcp2 = ExampleMC() + mcp2.PDG = 2212 + + mcp3 = ExampleMC() + mcp3.PDG = 1 + push!(mcp3.parents,mcp1) + + mcp4 = ExampleMC() + mcp4.PDG = -2 + push!(mcp4.parents,mcp2) + + mcp5 = ExampleMC() + mcp5.PDG = -24 + push!(mcp5.parents,mcp1) + push!(mcp5.parents,mcp2) + + + mcp1.PDG = 12 + mcp2.PDG = 13 + + # passes if values are changed in parents + + @test mcp3.parents[1].PDG == 12 + @test mcp4.parents[1].PDG == 13 + @test mcp5.parents[1].PDG == 12 + @test mcp5.parents[2].PDG == 13 + end + + @testset "Vector Members" begin + + m1 = ExampleWithVectorMember() + m1.count = [1,2,3,4,5] + m1.count[5] = 6 + + @test m1.count[5] == 6 + @test m1.count[1] == 1 + @test m1.count[2] == 2 + @test m1.count[3] == 3 + @test m1.count[4] == 4 + end + + @testset "Cyclic Dependency" begin + + cd1 = ExampleForCyclicDependency1() + cd2 = ExampleForCyclicDependency2() + cd1.ref = cd2 + cd2.ref = cd1 + + @test cd1.ref === cd2 + @test cd2.ref === cd1 + end + + @testset "One To One Relations" begin + + c1 = ExampleCluster() + c1.energy = Float64(5) + + c2 = ExampleWithOneRelation() + c2.cluster = c1 + + @test c2.cluster.energy == Float64(5) + + end + + @testset "Collections" begin + mcp1 = ExampleMC() + mcp1.PDG = 2212 + mcp2 = ExampleMC() + mcp2.PDG = 2212 + mcp3 = ExampleMC() + mcp3.PDG = 1 + push!(mcp3.parents,mcp1) + a = ExampleMCCollection([mcp1,mcp2,mcp3]) + mc1=a[1] + mc2=a[2] + mc3=a[3] + @test mc1.PDG == 2212 + @test mc2.PDG == 2212 + @test mc3.PDG == 1 + @test length(mc3.parents)== 1 + @test mc3.parents[1] == mc1 + end + + @testset "Upstream EDM" begin + # Upstream EDM : Datamodeljulia + s1 = NamespaceStruct(1, 2) + s2 = SimpleStruct(1, 2, 3) + s3 = NotSoSimpleStruct(s2) + + # Extensionmodeljulia + ec1 = ExtComponent(s3, s1) + + @test s1.x == Int32(1) + @test s1.y == Int32(2) + @test s3.data.x == Int32(1) + @test s3.data.y == Int32(2) + @test ec1.nspStruct.x == Int32(1) + @test ec1.nspStruct.y == Int32(2) + @test ec1.aStruct.data.x == Int32(1) + @test ec1.aStruct.data.y == Int32(2) + end +end; \ No newline at end of file