From 2c38510c1292634e61f460c495073d86a4bfe430 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 27 Nov 2023 18:26:45 +0100 Subject: [PATCH 01/97] Check `ctest` output for HDF5-DIAG. (#858) HDF5 errors that aren't promoted to an exception by HighFive will simply print an error message. This means that tests can pass even if there are HDF5 errors. This will run the tests with `ctest --verbose` and check that the output doesn't contain `"HDF5-DIAG"`. --- .github/workflows/ci.yml | 24 ++++++++++++++++++------ include/highfive/H5DataType.hpp | 2 +- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e7f5fca10..04c4ea154 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -80,7 +80,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG # Job testing several versions of hdf5 @@ -118,7 +120,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples @@ -156,7 +160,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples working-directory: ${{github.workspace}}/build/src/examples @@ -199,7 +205,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples working-directory: ${{github.workspace}}/build/src/examples @@ -295,7 +303,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples working-directory: ${{github.workspace}}/build/src/examples @@ -348,4 +358,6 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build shell: bash -l {0} - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG diff --git a/include/highfive/H5DataType.hpp b/include/highfive/H5DataType.hpp index 886107961..b2b1881f3 100644 --- a/include/highfive/H5DataType.hpp +++ b/include/highfive/H5DataType.hpp @@ -70,7 +70,7 @@ class DataType: public Object { /// \brief Returns the length (in bytes) of this type elements /// /// Notice that the size of variable length sequences may have limited applicability - /// given that it refers to the size of the control structure. For info see + /// given that it refers to the size of the control structure. For info see /// https://support.hdfgroup.org/HDF5/doc/RM/RM_H5T.html#Datatype-GetSize size_t getSize() const; From c8bf74afac01f06b225a7460f4b6c8efdbf90851 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 29 Nov 2023 19:04:26 +0100 Subject: [PATCH 02/97] Typos. (#873) --- include/highfive/H5DataType.hpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/include/highfive/H5DataType.hpp b/include/highfive/H5DataType.hpp index b2b1881f3..1c28626bf 100644 --- a/include/highfive/H5DataType.hpp +++ b/include/highfive/H5DataType.hpp @@ -96,7 +96,6 @@ class DataType: public Object { /// /// \brief Check the DataType was default constructed. - /// Such value might represent auto-detection of the datatype from a buffer /// bool empty() const noexcept; @@ -152,7 +151,7 @@ class FixedLengthStringType: public StringType { /// requires `4*n` bytes. /// /// The string padding is subtle, essentially it's just a hint. A - /// nullterminated string is guaranteed to have one `'\0'` which marks the + /// null-terminated string is guaranteed to have one `'\0'` which marks the /// semantic end of the string. The length of the buffer must be at least /// `size` bytes regardless. HDF5 will read or write `size` bytes, /// irrespective of the when the `\0` occurs. From cdf79dfb211bc0dc4b2ebc28edacf356d7f41309 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 30 Nov 2023 10:57:38 +0100 Subject: [PATCH 03/97] Move selection related tests. (#876) Part of reorganizing the tests requires grouping tests. One such group is anything related to selections, e.g. column, row or point selections, hyperslabs, etc. This commit only moves code from one file to the next without any changes. --- tests/unit/CMakeLists.txt | 2 +- tests/unit/test_high_five_selection.cpp | 536 ++++++++++++++++++++++++ tests/unit/tests_high_five_base.cpp | 505 ---------------------- 3 files changed, 537 insertions(+), 506 deletions(-) create mode 100644 tests/unit/test_high_five_selection.cpp diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 3644d117c..cb6752cbc 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -7,7 +7,7 @@ if(MSVC) endif() ## Base tests -foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types) +foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) catch_discover_tests(${test_name}) diff --git a/tests/unit/test_high_five_selection.cpp b/tests/unit/test_high_five_selection.cpp new file mode 100644 index 000000000..e3b91e4cc --- /dev/null +++ b/tests/unit/test_high_five_selection.cpp @@ -0,0 +1,536 @@ +/* + * Copyright (c), 2017-2023, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include "tests_high_five.hpp" + +using namespace HighFive; +using Catch::Matchers::Equals; + +template +void selectionArraySimpleTest() { + typedef typename std::vector Vector; + + std::ostringstream filename; + filename << "h5_rw_select_test_" << typeNameHelper() << "_test.h5"; + + const size_t size_x = 10; + const size_t offset_x = 2, count_x = 5; + + const std::string dataset_name("dset"); + + Vector values(size_x); + + ContentGenerate generator; + std::generate(values.begin(), values.end(), generator); + + // Create a new file using the default property lists. + File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); + + DataSet dataset = file.createDataSet(dataset_name, DataSpace::From(values)); + + dataset.write(values); + + file.flush(); + + // select slice + { + // read it back + Vector result; + std::vector offset{offset_x}; + std::vector size{count_x}; + + Selection slice = dataset.select(offset, size); + + CHECK(slice.getSpace().getDimensions()[0] == size_x); + CHECK(slice.getMemSpace().getDimensions()[0] == count_x); + + slice.read(result); + + CHECK(result.size() == 5); + + for (size_t i = 0; i < count_x; ++i) { + REQUIRE(values[i + offset_x] == result[i]); + } + } + + // select cherry pick + { + // read it back + Vector result; + std::vector ids{1, 3, 4, 7}; + + Selection slice = dataset.select(ElementSet(ids)); + + CHECK(slice.getSpace().getDimensions()[0] == size_x); + CHECK(slice.getMemSpace().getDimensions()[0] == ids.size()); + + slice.read(result); + + CHECK(result.size() == ids.size()); + + for (size_t i = 0; i < ids.size(); ++i) { + const std::size_t id = ids[i]; + REQUIRE(values[id] == result[i]); + } + } +} + +TEST_CASE("selectionArraySimpleString") { + selectionArraySimpleTest(); +} + +TEMPLATE_LIST_TEST_CASE("selectionArraySimple", "[template]", dataset_test_types) { + selectionArraySimpleTest(); +} + +TEST_CASE("selectionByElementMultiDim") { + const std::string file_name("h5_test_selection_multi_dim.h5"); + // Create a 2-dim dataset + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + std::vector dims{3, 3}; + + auto set = file.createDataSet("test", DataSpace(dims), AtomicType()); + int values[3][3] = {{1, 2, 3}, {4, 5, 6}, {7, 8, 9}}; + set.write(values); + + { + int value; + set.select(ElementSet{{1, 1}}).read(value); + CHECK(value == 5); + } + + { + int value[2]; + set.select(ElementSet{0, 0, 2, 2}).read(value); + CHECK(value[0] == 1); + CHECK(value[1] == 9); + } + + { + int value[2]; + set.select(ElementSet{{0, 1}, {1, 2}}).read(value); + CHECK(value[0] == 2); + CHECK(value[1] == 6); + } + + { + SilenceHDF5 silencer; + CHECK_THROWS_AS(set.select(ElementSet{0, 1, 2}), DataSpaceException); + } +} + +template +void columnSelectionTest() { + std::ostringstream filename; + filename << "h5_rw_select_column_test_" << typeNameHelper() << "_test.h5"; + + const size_t x_size = 10; + const size_t y_size = 7; + + const std::string dataset_name("dset"); + + T values[x_size][y_size]; + + ContentGenerate generator; + generate2D(values, x_size, y_size, generator); + + // Create a new file using the default property lists. + File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); + + // Create the data space for the dataset. + std::vector dims{x_size, y_size}; + + DataSpace dataspace(dims); + // Create a dataset with arbitrary type + DataSet dataset = file.createDataSet(dataset_name, dataspace); + + dataset.write(values); + + file.flush(); + + std::vector columns{1, 3, 5}; + + Selection slice = dataset.select(columns); + T result[x_size][3]; + slice.read(result); + + CHECK(slice.getSpace().getDimensions()[0] == x_size); + CHECK(slice.getMemSpace().getDimensions()[0] == x_size); + + for (size_t i = 0; i < 3; ++i) + for (size_t j = 0; j < x_size; ++j) + REQUIRE(result[j][i] == values[j][columns[i]]); +} + +TEMPLATE_LIST_TEST_CASE("columnSelection", "[template]", numerical_test_types) { + columnSelectionTest(); +} + +std::vector> global_indices_2d(const std::vector& offset, + const std::vector& count) { + std::vector> indices; + indices.reserve(count[0] * count[1]); + + for (size_t i = 0; i < count[0]; ++i) { + for (size_t j = 0; j < count[1]; ++j) { + indices.push_back({offset[0] + i, offset[1] + j}); + } + } + + return indices; +} + +std::vector> local_indices_2d(const std::vector& count) { + return global_indices_2d({0ul, 0ul}, count); +} + +std::vector> local_indices_1d(const std::vector& count) { + std::vector> local_indices; + for (size_t i = 0; i < count[0]; ++i) { + local_indices.push_back({i}); + } + + return local_indices; +} + +struct RegularHyperSlabAnswer { + static RegularHyperSlabAnswer createRegular(const std::vector& offset, + const std::vector& count) { + return RegularHyperSlabAnswer{global_indices_2d(offset, count), + local_indices_1d({count[0] * count[1]})}; + } + + // These are the selected indices in the + // outer (larger) array. + std::vector> global_indices; + + // These are the selected indices in the compacted (inner) + // array. + std::vector> local_indices; +}; + +struct RegularHyperSlabTestData { + std::string desc; + HyperSlab slab; + RegularHyperSlabAnswer answer; +}; + +std::vector make_regular_hyperslab_test_data() { + std::vector test_data; + + // The dataset is 10x8, we define the following regular + // hyperslabs: + // x----------------x + // | | + // | x------x e | 1 + // | | a | | + // x-|------|-------x 3 + // | | x-|-------x 4 + // | | | | b | + // | | c-|-------c 5 + // | | b-|-------b 6 + // | | | | c | + // | d----x-d-------x 7 + // | | d | | + // | a------a | 9 + // | | + // ------------------ + // 1 3 4 8 + + std::map slabs; + + slabs["a"] = RegularHyperSlab(/* offset = */ {1ul, 1ul}, + /* count = */ {8ul, 3ul}); + + slabs["b"] = RegularHyperSlab(/* offset = */ {4ul, 3ul}, + /* count = */ {2ul, 5ul}); + + slabs["c"] = RegularHyperSlab(/* offset = */ {5ul, 3ul}, + /* count = */ {2ul, 5ul}); + + slabs["d"] = RegularHyperSlab(/* offset = */ {7ul, 1ul}, + /* count = */ {2ul, 3ul}); + + slabs["e"] = RegularHyperSlab(/* offset = */ {0ul, 0ul}, + /* count = */ {3ul, 8ul}); + + // Union, regular + auto slab_bc_union = HyperSlab(slabs["b"]) | slabs["c"]; + auto answer_bc_union = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {3ul, 5ul}); + test_data.push_back({"b | c", slab_bc_union, answer_bc_union}); + + // Intersection, always regular + auto slab_ab_cut = HyperSlab(slabs["a"]) & slabs["b"]; + auto answer_ab_cut = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {2ul, 1ul}); + test_data.push_back({"a & b", slab_ab_cut, answer_ab_cut}); + + // Intersection, always regular + auto slab_bc_cut = HyperSlab(slabs["b"]) & slabs["c"]; + auto answer_bc_cut = RegularHyperSlabAnswer::createRegular({5ul, 3ul}, {1ul, 5ul}); + test_data.push_back({"b & c", slab_bc_cut, answer_bc_cut}); + + // Xor, regular + auto slab_ad_xor = HyperSlab(slabs["a"]) ^ slabs["d"]; + auto answer_ad_xor = RegularHyperSlabAnswer::createRegular({1ul, 1ul}, {6ul, 3ul}); + test_data.push_back({"a ^ b", slab_ad_xor, answer_ad_xor}); + + // (not b) and c, regular + auto slab_bc_nota = HyperSlab(slabs["b"]).notA(slabs["c"]); + auto answer_bc_nota = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); + test_data.push_back({"b notA a", slab_bc_nota, answer_bc_nota}); + + // (not c) and b, regular + auto slab_cb_notb = HyperSlab(slabs["c"]).notB(slabs["b"]); + auto answer_cb_notb = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); + test_data.push_back({"c notB b", slab_cb_notb, answer_cb_notb}); + + return test_data; +} + +template +File setupHyperSlabFile(T (&values)[x_size][y_size], + const std::string& filename, + const std::string& dataset_name) { + ContentGenerate generator; + generate2D(values, x_size, y_size, generator); + + // Create a new file using the default property lists. + File file(filename, File::ReadWrite | File::Create | File::Truncate); + + // Create the data space for the dataset. + std::vector dims{x_size, y_size}; + + DataSpace dataspace(dims); + // Create a dataset with arbitrary type + DataSet dataset = file.createDataSet(dataset_name, dataspace); + + dataset.write(values); + file.flush(); + + return file; +} + +template +void regularHyperSlabSelectionTest() { + std::ostringstream filename; + filename << "h5_rw_select_regular_hyperslab_test_" << typeNameHelper() << "_test.h5"; + const std::string dataset_name("dset"); + + const size_t x_size = 10; + const size_t y_size = 8; + + T values[x_size][y_size]; + + auto file = setupHyperSlabFile(values, filename.str(), dataset_name); + auto test_cases = make_regular_hyperslab_test_data(); + + for (const auto& test_case: test_cases) { + SECTION(test_case.desc) { + std::vector result; + + file.getDataSet(dataset_name).select(test_case.slab).read(result); + + auto n_selected = test_case.answer.global_indices.size(); + for (size_t i = 0; i < n_selected; ++i) { + const auto ig = test_case.answer.global_indices[i]; + const auto il = test_case.answer.local_indices[i]; + + REQUIRE(result[il[0]] == values[ig[0]][ig[1]]); + } + } + } +} + +TEMPLATE_LIST_TEST_CASE("hyperSlabSelection", "[template]", numerical_test_types) { + regularHyperSlabSelectionTest(); +} + +struct IrregularHyperSlabAnswer { + // These are the selected indices in the outer (larger) array. + std::vector> global_indices; +}; + +struct IrregularHyperSlabTestData { + std::string desc; + HyperSlab slab; + IrregularHyperSlabAnswer answer; +}; + +std::vector make_irregular_hyperslab_test_data() { + // The dataset is 10x8, with two regular hyperslabs: + // x----------------x + // | | + // | bbbb | + // | bbbb | + // | aaaabb | + // | aaaabb | + // | bbbb | + // | bbbb | + // | | + // | | + // | | + // | | + // ------------------ + + auto slabs = std::map{}; + slabs["a"] = RegularHyperSlab{{2ul, 0ul}, {1ul, 2ul}}; + slabs["b"] = RegularHyperSlab{{1ul, 1ul}, {3ul, 2ul}}; + + std::vector test_data; + + // Union, irregular + auto slab_ab_union = HyperSlab(slabs["a"]) | slabs["b"]; + // clang-format off + auto answer_ab_union = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 0ul}, {2ul, 1ul}, {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"a | b", slab_ab_union, answer_ab_union}); + + // xor, irregular + auto slab_ab_xor = HyperSlab(slabs["a"]) ^ slabs["b"]; + // clang-format off + auto answer_ab_xor = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 0ul}, {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"a xor b", slab_ab_xor, answer_ab_xor}); + + // (not a) and e, irregular + auto slab_ab_nota = HyperSlab(slabs["a"]).notA(slabs["b"]); + // clang-format off + auto answer_ab_nota = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"a nota b", slab_ab_nota, answer_ab_nota}); + + // (not a) and e, irregular + auto slab_ba_notb = HyperSlab(slabs["b"]).notB(slabs["a"]); + // clang-format off + auto answer_ba_notb = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"b notb a", slab_ba_notb, answer_ba_notb}); + + return test_data; +} + +template +void irregularHyperSlabSelectionReadTest() { + std::ostringstream filename; + filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper() << "_test.h5"; + + const std::string dataset_name("dset"); + + const size_t x_size = 10; + const size_t y_size = 8; + + T values[x_size][y_size]; + auto file = setupHyperSlabFile(values, filename.str(), dataset_name); + + auto test_cases = make_irregular_hyperslab_test_data(); + + for (const auto& test_case: test_cases) { + SECTION(test_case.desc) { + std::vector result; + + file.getDataSet(dataset_name).select(test_case.slab).read(result); + + auto n_selected = test_case.answer.global_indices.size(); + for (size_t i = 0; i < n_selected; ++i) { + const auto ig = test_case.answer.global_indices[i]; + + REQUIRE(result[i] == values[ig[0]][ig[1]]); + } + } + } +} + +TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionRead", "[template]", numerical_test_types) { + irregularHyperSlabSelectionReadTest(); +} + +template +void irregularHyperSlabSelectionWriteTest() { + std::ostringstream filename; + filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper() << "_test.h5"; + + const std::string dataset_name("dset"); + + const size_t x_size = 10; + const size_t y_size = 8; + + T orig_values[x_size][y_size]; + auto file = setupHyperSlabFile(orig_values, filename.str(), dataset_name); + + auto test_cases = make_irregular_hyperslab_test_data(); + + for (const auto& test_case: test_cases) { + SECTION(test_case.desc) { + auto n_selected = test_case.answer.global_indices.size(); + std::vector changed_values(n_selected); + ContentGenerate gen; + std::generate(changed_values.begin(), changed_values.end(), gen); + + file.getDataSet(dataset_name).select(test_case.slab).write(changed_values); + + T overwritten_values[x_size][y_size]; + file.getDataSet(dataset_name).read(overwritten_values); + + T expected_values[x_size][y_size]; + for (size_t i = 0; i < x_size; ++i) { + for (size_t j = 0; j < y_size; ++j) { + expected_values[i][j] = orig_values[i][j]; + } + } + + for (size_t i = 0; i < n_selected; ++i) { + const auto ig = test_case.answer.global_indices[i]; + expected_values[ig[0]][ig[1]] = changed_values[i]; + } + + for (size_t i = 0; i < x_size; ++i) { + for (size_t j = 0; j < y_size; ++j) { + REQUIRE(expected_values[i][j] == overwritten_values[i][j]); + } + } + } + } +} + +TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionWrite", "[template]", std::tuple) { + irregularHyperSlabSelectionWriteTest(); +} diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 5cd0554e4..1ba790b3a 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -1112,511 +1112,6 @@ TEST_CASE("datasetOffset") { CHECK(ds_read.getOffset() > 0); } -template -void selectionArraySimpleTest() { - typedef typename std::vector Vector; - - std::ostringstream filename; - filename << "h5_rw_select_test_" << typeNameHelper() << "_test.h5"; - - const size_t size_x = 10; - const size_t offset_x = 2, count_x = 5; - - const std::string dataset_name("dset"); - - Vector values(size_x); - - ContentGenerate generator; - std::generate(values.begin(), values.end(), generator); - - // Create a new file using the default property lists. - File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); - - DataSet dataset = file.createDataSet(dataset_name, DataSpace::From(values)); - - dataset.write(values); - - file.flush(); - - // select slice - { - // read it back - Vector result; - std::vector offset{offset_x}; - std::vector size{count_x}; - - Selection slice = dataset.select(offset, size); - - CHECK(slice.getSpace().getDimensions()[0] == size_x); - CHECK(slice.getMemSpace().getDimensions()[0] == count_x); - - slice.read(result); - - CHECK(result.size() == 5); - - for (size_t i = 0; i < count_x; ++i) { - REQUIRE(values[i + offset_x] == result[i]); - } - } - - // select cherry pick - { - // read it back - Vector result; - std::vector ids{1, 3, 4, 7}; - - Selection slice = dataset.select(ElementSet(ids)); - - CHECK(slice.getSpace().getDimensions()[0] == size_x); - CHECK(slice.getMemSpace().getDimensions()[0] == ids.size()); - - slice.read(result); - - CHECK(result.size() == ids.size()); - - for (size_t i = 0; i < ids.size(); ++i) { - const std::size_t id = ids[i]; - REQUIRE(values[id] == result[i]); - } - } -} - -TEST_CASE("selectionArraySimpleString") { - selectionArraySimpleTest(); -} - -TEMPLATE_LIST_TEST_CASE("selectionArraySimple", "[template]", dataset_test_types) { - selectionArraySimpleTest(); -} - -TEST_CASE("selectionByElementMultiDim") { - const std::string file_name("h5_test_selection_multi_dim.h5"); - // Create a 2-dim dataset - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - std::vector dims{3, 3}; - - auto set = file.createDataSet("test", DataSpace(dims), AtomicType()); - int values[3][3] = {{1, 2, 3}, {4, 5, 6}, {7, 8, 9}}; - set.write(values); - - { - int value; - set.select(ElementSet{{1, 1}}).read(value); - CHECK(value == 5); - } - - { - int value[2]; - set.select(ElementSet{0, 0, 2, 2}).read(value); - CHECK(value[0] == 1); - CHECK(value[1] == 9); - } - - { - int value[2]; - set.select(ElementSet{{0, 1}, {1, 2}}).read(value); - CHECK(value[0] == 2); - CHECK(value[1] == 6); - } - - { - SilenceHDF5 silencer; - CHECK_THROWS_AS(set.select(ElementSet{0, 1, 2}), DataSpaceException); - } -} - -template -void columnSelectionTest() { - std::ostringstream filename; - filename << "h5_rw_select_column_test_" << typeNameHelper() << "_test.h5"; - - const size_t x_size = 10; - const size_t y_size = 7; - - const std::string dataset_name("dset"); - - T values[x_size][y_size]; - - ContentGenerate generator; - generate2D(values, x_size, y_size, generator); - - // Create a new file using the default property lists. - File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); - - // Create the data space for the dataset. - std::vector dims{x_size, y_size}; - - DataSpace dataspace(dims); - // Create a dataset with arbitrary type - DataSet dataset = file.createDataSet(dataset_name, dataspace); - - dataset.write(values); - - file.flush(); - - std::vector columns{1, 3, 5}; - - Selection slice = dataset.select(columns); - T result[x_size][3]; - slice.read(result); - - CHECK(slice.getSpace().getDimensions()[0] == x_size); - CHECK(slice.getMemSpace().getDimensions()[0] == x_size); - - for (size_t i = 0; i < 3; ++i) - for (size_t j = 0; j < x_size; ++j) - REQUIRE(result[j][i] == values[j][columns[i]]); -} - -TEMPLATE_LIST_TEST_CASE("columnSelection", "[template]", numerical_test_types) { - columnSelectionTest(); -} - -std::vector> global_indices_2d(const std::vector& offset, - const std::vector& count) { - std::vector> indices; - indices.reserve(count[0] * count[1]); - - for (size_t i = 0; i < count[0]; ++i) { - for (size_t j = 0; j < count[1]; ++j) { - indices.push_back({offset[0] + i, offset[1] + j}); - } - } - - return indices; -} - -std::vector> local_indices_2d(const std::vector& count) { - return global_indices_2d({0ul, 0ul}, count); -} - -std::vector> local_indices_1d(const std::vector& count) { - std::vector> local_indices; - for (size_t i = 0; i < count[0]; ++i) { - local_indices.push_back({i}); - } - - return local_indices; -} - -struct RegularHyperSlabAnswer { - static RegularHyperSlabAnswer createRegular(const std::vector& offset, - const std::vector& count) { - return RegularHyperSlabAnswer{global_indices_2d(offset, count), - local_indices_1d({count[0] * count[1]})}; - } - - // These are the selected indices in the - // outer (larger) array. - std::vector> global_indices; - - // These are the selected indices in the compacted (inner) - // array. - std::vector> local_indices; -}; - -struct RegularHyperSlabTestData { - std::string desc; - HyperSlab slab; - RegularHyperSlabAnswer answer; -}; - -std::vector make_regular_hyperslab_test_data() { - std::vector test_data; - - // The dataset is 10x8, we define the following regular - // hyperslabs: - // x----------------x - // | | - // | x------x e | 1 - // | | a | | - // x-|------|-------x 3 - // | | x-|-------x 4 - // | | | | b | - // | | c-|-------c 5 - // | | b-|-------b 6 - // | | | | c | - // | d----x-d-------x 7 - // | | d | | - // | a------a | 9 - // | | - // ------------------ - // 1 3 4 8 - - std::map slabs; - - slabs["a"] = RegularHyperSlab(/* offset = */ {1ul, 1ul}, - /* count = */ {8ul, 3ul}); - - slabs["b"] = RegularHyperSlab(/* offset = */ {4ul, 3ul}, - /* count = */ {2ul, 5ul}); - - slabs["c"] = RegularHyperSlab(/* offset = */ {5ul, 3ul}, - /* count = */ {2ul, 5ul}); - - slabs["d"] = RegularHyperSlab(/* offset = */ {7ul, 1ul}, - /* count = */ {2ul, 3ul}); - - slabs["e"] = RegularHyperSlab(/* offset = */ {0ul, 0ul}, - /* count = */ {3ul, 8ul}); - - // Union, regular - auto slab_bc_union = HyperSlab(slabs["b"]) | slabs["c"]; - auto answer_bc_union = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {3ul, 5ul}); - test_data.push_back({"b | c", slab_bc_union, answer_bc_union}); - - // Intersection, always regular - auto slab_ab_cut = HyperSlab(slabs["a"]) & slabs["b"]; - auto answer_ab_cut = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {2ul, 1ul}); - test_data.push_back({"a & b", slab_ab_cut, answer_ab_cut}); - - // Intersection, always regular - auto slab_bc_cut = HyperSlab(slabs["b"]) & slabs["c"]; - auto answer_bc_cut = RegularHyperSlabAnswer::createRegular({5ul, 3ul}, {1ul, 5ul}); - test_data.push_back({"b & c", slab_bc_cut, answer_bc_cut}); - - // Xor, regular - auto slab_ad_xor = HyperSlab(slabs["a"]) ^ slabs["d"]; - auto answer_ad_xor = RegularHyperSlabAnswer::createRegular({1ul, 1ul}, {6ul, 3ul}); - test_data.push_back({"a ^ b", slab_ad_xor, answer_ad_xor}); - - // (not b) and c, regular - auto slab_bc_nota = HyperSlab(slabs["b"]).notA(slabs["c"]); - auto answer_bc_nota = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); - test_data.push_back({"b notA a", slab_bc_nota, answer_bc_nota}); - - // (not c) and b, regular - auto slab_cb_notb = HyperSlab(slabs["c"]).notB(slabs["b"]); - auto answer_cb_notb = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); - test_data.push_back({"c notB b", slab_cb_notb, answer_cb_notb}); - - return test_data; -} - -template -File setupHyperSlabFile(T (&values)[x_size][y_size], - const std::string& filename, - const std::string& dataset_name) { - ContentGenerate generator; - generate2D(values, x_size, y_size, generator); - - // Create a new file using the default property lists. - File file(filename, File::ReadWrite | File::Create | File::Truncate); - - // Create the data space for the dataset. - std::vector dims{x_size, y_size}; - - DataSpace dataspace(dims); - // Create a dataset with arbitrary type - DataSet dataset = file.createDataSet(dataset_name, dataspace); - - dataset.write(values); - file.flush(); - - return file; -} - -template -void regularHyperSlabSelectionTest() { - std::ostringstream filename; - filename << "h5_rw_select_regular_hyperslab_test_" << typeNameHelper() << "_test.h5"; - const std::string dataset_name("dset"); - - const size_t x_size = 10; - const size_t y_size = 8; - - T values[x_size][y_size]; - - auto file = setupHyperSlabFile(values, filename.str(), dataset_name); - auto test_cases = make_regular_hyperslab_test_data(); - - for (const auto& test_case: test_cases) { - SECTION(test_case.desc) { - std::vector result; - - file.getDataSet(dataset_name).select(test_case.slab).read(result); - - auto n_selected = test_case.answer.global_indices.size(); - for (size_t i = 0; i < n_selected; ++i) { - const auto ig = test_case.answer.global_indices[i]; - const auto il = test_case.answer.local_indices[i]; - - REQUIRE(result[il[0]] == values[ig[0]][ig[1]]); - } - } - } -} - -TEMPLATE_LIST_TEST_CASE("hyperSlabSelection", "[template]", numerical_test_types) { - regularHyperSlabSelectionTest(); -} - -struct IrregularHyperSlabAnswer { - // These are the selected indices in the outer (larger) array. - std::vector> global_indices; -}; - -struct IrregularHyperSlabTestData { - std::string desc; - HyperSlab slab; - IrregularHyperSlabAnswer answer; -}; - -std::vector make_irregular_hyperslab_test_data() { - // The dataset is 10x8, with two regular hyperslabs: - // x----------------x - // | | - // | bbbb | - // | bbbb | - // | aaaabb | - // | aaaabb | - // | bbbb | - // | bbbb | - // | | - // | | - // | | - // | | - // ------------------ - - auto slabs = std::map{}; - slabs["a"] = RegularHyperSlab{{2ul, 0ul}, {1ul, 2ul}}; - slabs["b"] = RegularHyperSlab{{1ul, 1ul}, {3ul, 2ul}}; - - std::vector test_data; - - // Union, irregular - auto slab_ab_union = HyperSlab(slabs["a"]) | slabs["b"]; - // clang-format off - auto answer_ab_union = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 0ul}, {2ul, 1ul}, {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"a | b", slab_ab_union, answer_ab_union}); - - // xor, irregular - auto slab_ab_xor = HyperSlab(slabs["a"]) ^ slabs["b"]; - // clang-format off - auto answer_ab_xor = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 0ul}, {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"a xor b", slab_ab_xor, answer_ab_xor}); - - // (not a) and e, irregular - auto slab_ab_nota = HyperSlab(slabs["a"]).notA(slabs["b"]); - // clang-format off - auto answer_ab_nota = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"a nota b", slab_ab_nota, answer_ab_nota}); - - // (not a) and e, irregular - auto slab_ba_notb = HyperSlab(slabs["b"]).notB(slabs["a"]); - // clang-format off - auto answer_ba_notb = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"b notb a", slab_ba_notb, answer_ba_notb}); - - return test_data; -} - -template -void irregularHyperSlabSelectionReadTest() { - std::ostringstream filename; - filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper() << "_test.h5"; - - const std::string dataset_name("dset"); - - const size_t x_size = 10; - const size_t y_size = 8; - - T values[x_size][y_size]; - auto file = setupHyperSlabFile(values, filename.str(), dataset_name); - - auto test_cases = make_irregular_hyperslab_test_data(); - - for (const auto& test_case: test_cases) { - SECTION(test_case.desc) { - std::vector result; - - file.getDataSet(dataset_name).select(test_case.slab).read(result); - - auto n_selected = test_case.answer.global_indices.size(); - for (size_t i = 0; i < n_selected; ++i) { - const auto ig = test_case.answer.global_indices[i]; - - REQUIRE(result[i] == values[ig[0]][ig[1]]); - } - } - } -} - -TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionRead", "[template]", numerical_test_types) { - irregularHyperSlabSelectionReadTest(); -} - -template -void irregularHyperSlabSelectionWriteTest() { - std::ostringstream filename; - filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper() << "_test.h5"; - - const std::string dataset_name("dset"); - - const size_t x_size = 10; - const size_t y_size = 8; - - T orig_values[x_size][y_size]; - auto file = setupHyperSlabFile(orig_values, filename.str(), dataset_name); - - auto test_cases = make_irregular_hyperslab_test_data(); - - for (const auto& test_case: test_cases) { - SECTION(test_case.desc) { - auto n_selected = test_case.answer.global_indices.size(); - std::vector changed_values(n_selected); - ContentGenerate gen; - std::generate(changed_values.begin(), changed_values.end(), gen); - - file.getDataSet(dataset_name).select(test_case.slab).write(changed_values); - - T overwritten_values[x_size][y_size]; - file.getDataSet(dataset_name).read(overwritten_values); - - T expected_values[x_size][y_size]; - for (size_t i = 0; i < x_size; ++i) { - for (size_t j = 0; j < y_size; ++j) { - expected_values[i][j] = orig_values[i][j]; - } - } - - for (size_t i = 0; i < n_selected; ++i) { - const auto ig = test_case.answer.global_indices[i]; - expected_values[ig[0]][ig[1]] = changed_values[i]; - } - - for (size_t i = 0; i < x_size; ++i) { - for (size_t j = 0; j < y_size; ++j) { - REQUIRE(expected_values[i][j] == overwritten_values[i][j]); - } - } - } - } -} - -TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionWrite", "[template]", std::tuple) { - irregularHyperSlabSelectionWriteTest(); -} template void attribute_scalar_rw() { From 6cc408d7a14f2192be573e114429527bb7bed2d9 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 30 Nov 2023 11:02:31 +0100 Subject: [PATCH 04/97] Ensure elements in `std::array` are allocated. (#877) A bug in `inspected::prepare` left the elements of an `std::array` as default allocated. This fails when putting an `std::vector` inside an `std::array`, because the `std::vector`s need to be allocated with the appropriate length. --- include/highfive/bits/H5Inspector_misc.hpp | 7 ++++- tests/unit/test_all_types.cpp | 35 ++++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 05ed6bc3e..98da8affd 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -514,12 +514,17 @@ struct inspector> { return compute_total_size(dims); } - static void prepare(type& /* val */, const std::vector& dims) { + static void prepare(type& val, const std::vector& dims) { if (dims[0] > N) { std::ostringstream os; os << "Size of std::array (" << N << ") is too small for dims (" << dims[0] << ")."; throw DataSpaceException(os.str()); } + + std::vector next_dims(dims.begin() + 1, dims.end()); + for (auto&& e: val) { + inspector::prepare(e, next_dims); + } } static hdf5_type* data(type& val) { diff --git a/tests/unit/test_all_types.cpp b/tests/unit/test_all_types.cpp index 9dd72a869..23c8a27b3 100644 --- a/tests/unit/test_all_types.cpp +++ b/tests/unit/test_all_types.cpp @@ -174,6 +174,41 @@ TEMPLATE_TEST_CASE("Scalar in std::vector", "[Types]", bool, std::st } } +TEMPLATE_TEST_CASE("Scalar in std::array", "[Types]", bool, std::string) { + const std::string file_name("rw_dataset_array_vector" + typeNameHelper() + ".h5"); + const std::string dataset_name("dset"); + std::array, 6> t1; + for (auto& tt: t1) { + tt = std::vector(5); + } + + { + // Create a new file using the default property lists. + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + // Create the dataset + DataSet dataset = file.createDataSet( + dataset_name, + {6, 5}, + create_datatype< + typename details::inspector>>::base_type>()); + + // Write into the initial part of the dataset + dataset.write(t1); + } + + // read it back + { + File file(file_name, File::ReadOnly); + + std::array, 6> value; + DataSet dataset = file.getDataSet("/" + dataset_name); + dataset.read(value); + CHECK(t1 == value); + CHECK(value.size() == 6); + } +} + #if HIGHFIVE_CXX_STD >= 17 TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector", "[Types]", std::vector, std::byte) { const std::string file_name("rw_dataset_vector_" + typeNameHelper() + ".h5"); From 4d199f1937a799d2cb94fd29bb95f4a092f89e77 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 30 Nov 2023 18:42:12 +0100 Subject: [PATCH 05/97] Improve `details::Boolean` compatibility checks. (#880) Instead of comparing values, use `std::memcmp` to compare bytes. --- tests/unit/tests_high_five_base.cpp | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 1ba790b3a..65ac00368 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -9,6 +9,7 @@ #include #include #include +#include #include #include #include @@ -2439,10 +2440,24 @@ TEST_CASE("HighFiveReadType") { TEST_CASE("DirectWriteBool") { SECTION("Basic compatibility") { - using IntType = typename std::underlying_type::type; CHECK(sizeof(bool) == sizeof(details::Boolean)); - CHECK(true == static_cast(details::Boolean::HighFiveTrue)); - CHECK(false == static_cast(details::Boolean::HighFiveFalse)); + + auto n_bytes = 2 * sizeof(details::Boolean); + + auto* const enum_ptr = (details::Boolean*) malloc(n_bytes); + memset(enum_ptr, 187, n_bytes); + enum_ptr[0] = details::Boolean::HighFiveTrue; + enum_ptr[1] = details::Boolean::HighFiveFalse; + + auto* const bool_ptr = (bool*) malloc(n_bytes); + memset(bool_ptr, 68, n_bytes); + bool_ptr[0] = true; + bool_ptr[1] = false; + + CHECK(std::memcmp(bool_ptr, enum_ptr, n_bytes) == 0); + + free(enum_ptr); + free(bool_ptr); } auto file = File("rw_bool_from_ptr.h5", File::Truncate); From 30a9d14af234b79a0f1e490600d6f7120a6892c0 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 30 Nov 2023 18:42:43 +0100 Subject: [PATCH 06/97] Use `-Werror` during CI. (#878) * Use `-Werror` during CI. ... and fix `-Wshadow` warning. --- .github/build.sh | 1 + CMake/HighFiveWarnings.cmake | 9 ++++++++- CMakeLists.txt | 1 + include/highfive/bits/H5Converter_misc.hpp | 4 ++-- tests/unit/tests_high_five_base.cpp | 1 + 5 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/build.sh b/.github/build.sh index 4cdad7e3f..959f0570c 100644 --- a/.github/build.sh +++ b/.github/build.sh @@ -5,6 +5,7 @@ cmake --version set -x export HIGHFIVE_BUILD=$GITHUB_WORKSPACE/build cmake -B $HIGHFIVE_BUILD -S $GITHUB_WORKSPACE \ + -DHIGHFIVE_HAS_WERROR=On \ -DCMAKE_BUILD_TYPE=$BUILD_TYPE \ -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR \ "${CMAKE_OPTIONS[@]}" diff --git a/CMake/HighFiveWarnings.cmake b/CMake/HighFiveWarnings.cmake index 8e8ec2201..16896b648 100644 --- a/CMake/HighFiveWarnings.cmake +++ b/CMake/HighFiveWarnings.cmake @@ -22,7 +22,6 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" -Wformat=2 -Wconversion -Wsign-conversion - -Wno-error=deprecated-declarations ) if(NOT CMAKE_CXX_COMPILER_ID MATCHES "Intel") @@ -33,4 +32,12 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" -Wdouble-promotion ) endif() + + if(HIGHFIVE_HAS_WERROR) + target_compile_options(HighFiveWarnings + INTERFACE + -Werror + -Wno-error=deprecated-declarations + ) + endif() endif() diff --git a/CMakeLists.txt b/CMakeLists.txt index af274d9e2..d592f2d66 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -37,6 +37,7 @@ option(HIGHFIVE_BUILD_DOCS "Enable documentation building" ON) option(HIGHFIVE_VERBOSE "Set logging level to verbose." OFF) option(HIGHFIVE_GLIBCXX_ASSERTIONS "Enable bounds check for STL." OFF) option(HIGHFIVE_HAS_CONCEPTS "Print readable compiler errors w/ C++20 concepts" ON) +option(HIGHFIVE_HAS_WERROR "Convert warnings to errors." OFF) # Controls if HighFive classes are friends of each other. # diff --git a/include/highfive/bits/H5Converter_misc.hpp b/include/highfive/bits/H5Converter_misc.hpp index 00749d1b6..ed387702f 100644 --- a/include/highfive/bits/H5Converter_misc.hpp +++ b/include/highfive/bits/H5Converter_misc.hpp @@ -179,9 +179,9 @@ struct StringBuffer { /// internal buffer as needed. /// /// The `length` is the length of the string in bytes. - void assign(char const* data, size_t length, StringPadding padding) { + void assign(char const* data, size_t length, StringPadding pad) { if (buffer.isVariableLengthString()) { - if (padding == StringPadding::NullTerminated) { + if (pad == StringPadding::NullTerminated) { buffer.variable_length_pointers[i] = data; } else { buffer.variable_length_buffer[i] = std::string(data, length); diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 65ac00368..da15b0a90 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -20,6 +20,7 @@ #include #include + #include #include #include From 73dc5d9322ec72952fa1fc6fb8b336243971990e Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 4 Dec 2023 08:24:47 +0100 Subject: [PATCH 07/97] Move data type related to separate file. (#881) --- tests/unit/CMakeLists.txt | 2 +- tests/unit/tests_high_five_base.cpp | 378 --------------------- tests/unit/tests_high_five_data_type.cpp | 410 +++++++++++++++++++++++ 3 files changed, 411 insertions(+), 379 deletions(-) create mode 100644 tests/unit/tests_high_five_data_type.cpp diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index cb6752cbc..b8943067f 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -7,7 +7,7 @@ if(MSVC) endif() ## Base tests -foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection) +foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) catch_discover_tests(${test_name}) diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index da15b0a90..6f1953f4d 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -679,25 +679,6 @@ TEST_CASE("Test simple listings") { } } -TEST_CASE("Simple test for type equality") { - AtomicType d_var; - AtomicType size_var; - AtomicType d_var_test; - AtomicType size_var_cpy(size_var); - AtomicType int_var; - AtomicType uint_var; - - // check different type matching - CHECK(d_var == d_var_test); - CHECK(d_var != size_var); - - // check type copy matching - CHECK(size_var_cpy == size_var); - - // check sign change not matching - CHECK(int_var != uint_var); -} - TEST_CASE("StringType") { SECTION("enshrine-defaults") { auto fixed_length = FixedLengthStringType(32, StringPadding::SpacePadded); @@ -2080,365 +2061,6 @@ TEST_CASE("HighFiveLinkCreationOrderProperty") { } } -struct CSL1 { - int m1; - int m2; - int m3; -}; - -struct CSL2 { - CSL1 csl1; -}; - -CompoundType create_compound_csl1() { - auto t2 = AtomicType(); - CompoundType t1({{"m1", AtomicType{}}, {"m2", AtomicType{}}, {"m3", t2}}); - - return t1; -} - -CompoundType create_compound_csl2() { - CompoundType t1 = create_compound_csl1(); - - CompoundType t2({{"csl1", t1}}); - - return t2; -} - -HIGHFIVE_REGISTER_TYPE(CSL1, create_compound_csl1) -HIGHFIVE_REGISTER_TYPE(CSL2, create_compound_csl2) - -TEST_CASE("HighFiveCompounds") { - const std::string file_name("compounds_test.h5"); - const std::string dataset_name1("/a"); - const std::string dataset_name2("/b"); - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - - auto t3 = AtomicType(); - CompoundType t1 = create_compound_csl1(); - t1.commit(file, "my_type"); - - CompoundType t2 = create_compound_csl2(); - t2.commit(file, "my_type2"); - - { // Not nested - auto dataset = file.createDataSet(dataset_name1, DataSpace(2), t1); - - std::vector csl = {{1, 1, 1}, {2, 3, 4}}; - dataset.write(csl); - - file.flush(); - - std::vector result; - dataset.select({0}, {2}).read(result); - - CHECK(result.size() == 2); - CHECK(result[0].m1 == 1); - CHECK(result[0].m2 == 1); - CHECK(result[0].m3 == 1); - CHECK(result[1].m1 == 2); - CHECK(result[1].m2 == 3); - CHECK(result[1].m3 == 4); - } - - { // Nested - auto dataset = file.createDataSet(dataset_name2, DataSpace(2), t2); - - std::vector csl = {{{1, 1, 1}, {2, 3, 4}}}; - dataset.write(csl); - - file.flush(); - std::vector result = {{{1, 1, 1}, {2, 3, 4}}}; - dataset.select({0}, {2}).read(result); - - CHECK(result.size() == 2); - CHECK(result[0].csl1.m1 == 1); - CHECK(result[0].csl1.m2 == 1); - CHECK(result[0].csl1.m3 == 1); - CHECK(result[1].csl1.m1 == 2); - CHECK(result[1].csl1.m2 == 3); - CHECK(result[1].csl1.m3 == 4); - } - - // Test the constructor from hid - CompoundType t1_from_hid(t1); - CHECK(t1 == t1_from_hid); - - CompoundType t2_from_hid(t2); - CHECK(t2 == t2_from_hid); - - // Back from a DataType - CHECK_NOTHROW(CompoundType(DataType(t1_from_hid))); - CHECK_THROWS(CompoundType(AtomicType{})); -} - -struct GrandChild { - uint32_t gcm1; - uint32_t gcm2; - uint32_t gcm3; -}; - -struct Child { - GrandChild grandChild; - uint32_t cm1; -}; - -struct Parent { - uint32_t pm1; - Child child; -}; - -CompoundType create_compound_GrandChild() { - auto t2 = AtomicType(); - CompoundType t1({{"gcm1", AtomicType{}}, - {"gcm2", AtomicType{}}, - { - "gcm3", - t2, - }}); - return t1; -} - -CompoundType create_compound_Child() { - auto nestedType = create_compound_GrandChild(); - return CompoundType{{{ - "grandChild", - nestedType, - }, - {"cm1", AtomicType{}}}}; -} - -CompoundType create_compound_Parent() { - auto nestedType = create_compound_Child(); - return CompoundType{{{"pm1", AtomicType{}}, - { - "child", - nestedType, - }}}; -} - -HIGHFIVE_REGISTER_TYPE(GrandChild, create_compound_GrandChild) -HIGHFIVE_REGISTER_TYPE(Child, create_compound_Child) -HIGHFIVE_REGISTER_TYPE(Parent, create_compound_Parent) - -TEST_CASE("HighFiveCompoundsNested") { - const std::string file_name("nested_compounds_test.h5"); - const std::string dataset_name("/a"); - - { // Write - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - auto type = create_compound_Parent(); - - auto dataset = file.createDataSet(dataset_name, DataSpace(2), type); - CHECK(dataset.getDataType().getSize() == 20); - - std::vector csl = {Parent{1, Child{GrandChild{1, 1, 1}, 1}}, - Parent{2, Child{GrandChild{3, 4, 5}, 6}}}; - dataset.write(csl); - } - - { // Read - File file(file_name, File::ReadOnly); - std::vector result; - auto dataset = file.getDataSet(dataset_name); - CHECK(dataset.getDataType().getSize() == 20); - dataset.select({0}, {2}).read(result); - - CHECK(result.size() == 2); - CHECK(result[0].pm1 == 1); - CHECK(result[0].child.grandChild.gcm1 == 1); - CHECK(result[0].child.grandChild.gcm2 == 1); - CHECK(result[0].child.grandChild.gcm3 == 1); - CHECK(result[0].child.cm1 == 1); - CHECK(result[1].pm1 == 2); - CHECK(result[1].child.grandChild.gcm1 == 3); - CHECK(result[1].child.grandChild.gcm2 == 4); - CHECK(result[1].child.grandChild.gcm3 == 5); - CHECK(result[1].child.cm1 == 6); - } -} - -template -struct Record { - double d = 3.14; - int i = 42; - char s[N]; -}; - -template -void fill(Record& r) { - constexpr char ref[] = "123456789a123456789b123456789c123456789d123456789e123456789f"; - std::copy(ref, ref + N - 1, r.s); - r.s[N - 1] = '\0'; -} - -template -CompoundType rec_t() { - using RecN = Record; - return {{"d", create_datatype()}, - {"i", create_datatype()}, - {"s", create_datatype()}}; -} - -HIGHFIVE_REGISTER_TYPE(Record<4>, rec_t<4>) -HIGHFIVE_REGISTER_TYPE(Record<8>, rec_t<8>) -HIGHFIVE_REGISTER_TYPE(Record<9>, rec_t<9>) - -template -void save(File& f) { - const size_t numRec = 2; - std::vector> recs(numRec); - fill(recs[0]); - fill(recs[1]); - auto dataset = f.createDataSet>("records" + std::to_string(N), DataSpace::From(recs)); - dataset.write(recs); -} - -template -std::string check(File& f) { - const size_t numRec = 2; - std::vector> recs(numRec); - f.getDataSet("records" + std::to_string(N)).read(recs); - return std::string(recs[0].s); -} - -TEST_CASE("HighFiveCompoundsSeveralPadding") { - const std::string file_name("padded_compounds_test.h5"); - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - { // Write - // 4 have been choose because no padding - // /* offset | size */ type = struct Record<4> { - // /* 0 | 8 */ double d; - // /* 8 | 4 */ int i; - // /* 12 | 4 */ char s[4]; - // total size (bytes): 16 - CHECK_NOTHROW(save<4>(file)); - // 8 have been choose because there is a padding - // /* offset | size */ type = struct Record<8> { - // /* 0 | 8 */ double d; - // /* 8 | 4 */ int i; - // /* 12 | 8 */ char s[8]; - // /* XXX 4-byte padding */ - // total size (bytes): 24 - CHECK_NOTHROW(save<8>(file)); - // 9 have been choose because there should not be a padding on 9 - // /* offset | size */ type = struct Record<9> { - // /* 0 | 8 */ double d; - // /* 8 | 4 */ int i; - // /* 12 | 9 */ char s[9]; - // /* XXX 3-byte padding */ - // total size (bytes): 24 - CHECK_NOTHROW(save<9>(file)); - } - - { // Read - CHECK(check<4>(file) == std::string("123")); - CHECK(check<8>(file) == std::string("1234567")); - CHECK(check<9>(file) == std::string("12345678")); - } -} - -enum Position { - highfive_first = 1, - highfive_second = 2, - highfive_third = 3, - highfive_last = -1, -}; - -enum class Direction : signed char { - Forward = 1, - Backward = -1, - Left = -2, - Right = 2, -}; - -EnumType create_enum_position() { - return {{"highfive_first", Position::highfive_first}, - {"highfive_second", Position::highfive_second}, - {"highfive_third", Position::highfive_third}, - {"highfive_last", Position::highfive_last}}; -} -HIGHFIVE_REGISTER_TYPE(Position, create_enum_position) - -EnumType create_enum_direction() { - return {{"Forward", Direction::Forward}, - {"Backward", Direction::Backward}, - {"Left", Direction::Left}, - {"Right", Direction::Right}}; -} -HIGHFIVE_REGISTER_TYPE(Direction, create_enum_direction) - -TEST_CASE("HighFiveEnum") { - const std::string file_name("enum_test.h5"); - const std::string dataset_name1("/a"); - const std::string dataset_name2("/b"); - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - - { // Unscoped enum - auto e1 = create_enum_position(); - e1.commit(file, "Position"); - - auto dataset = file.createDataSet(dataset_name1, DataSpace(1), e1); - dataset.write(Position::highfive_first); - - file.flush(); - - Position result; - dataset.select(ElementSet({0})).read(result); - - CHECK(result == Position::highfive_first); - } - - { // Scoped enum - auto e1 = create_enum_direction(); - e1.commit(file, "Direction"); - - auto dataset = file.createDataSet(dataset_name2, DataSpace(5), e1); - std::vector robot_moves({Direction::Backward, - Direction::Forward, - Direction::Forward, - Direction::Left, - Direction::Left}); - dataset.write(robot_moves); - - file.flush(); - - std::vector result; - dataset.read(result); - - CHECK(result[0] == Direction::Backward); - CHECK(result[1] == Direction::Forward); - CHECK(result[2] == Direction::Forward); - CHECK(result[3] == Direction::Left); - CHECK(result[4] == Direction::Left); - } -} - -TEST_CASE("HighFiveReadType") { - const std::string file_name("readtype_test.h5"); - const std::string datatype_name1("my_type"); - const std::string datatype_name2("position"); - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - - CompoundType t1 = create_compound_csl1(); - t1.commit(file, datatype_name1); - - CompoundType t2 = file.getDataType(datatype_name1); - - auto t3 = create_enum_position(); - t3.commit(file, datatype_name2); - - DataType t4 = file.getDataType(datatype_name2); - - CHECK(t2 == t1); - CHECK(t4 == t3); -} - - TEST_CASE("DirectWriteBool") { SECTION("Basic compatibility") { CHECK(sizeof(bool) == sizeof(details::Boolean)); diff --git a/tests/unit/tests_high_five_data_type.cpp b/tests/unit/tests_high_five_data_type.cpp new file mode 100644 index 000000000..b40050f21 --- /dev/null +++ b/tests/unit/tests_high_five_data_type.cpp @@ -0,0 +1,410 @@ +/* + * Copyright (c), 2017-2023, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +#include +#include +#include + +#include +#include "tests_high_five.hpp" + +using namespace HighFive; +using Catch::Matchers::Equals; + +TEST_CASE("Simple test for type equality") { + AtomicType d_var; + AtomicType size_var; + AtomicType d_var_test; + AtomicType size_var_cpy(size_var); + AtomicType int_var; + AtomicType uint_var; + + // check different type matching + CHECK(d_var == d_var_test); + CHECK(d_var != size_var); + + // check type copy matching + CHECK(size_var_cpy == size_var); + + // check sign change not matching + CHECK(int_var != uint_var); +} + + +struct CSL1 { + int m1; + int m2; + int m3; +}; + +struct CSL2 { + CSL1 csl1; +}; + +CompoundType create_compound_csl1() { + auto t2 = AtomicType(); + CompoundType t1({{"m1", AtomicType{}}, {"m2", AtomicType{}}, {"m3", t2}}); + + return t1; +} + +CompoundType create_compound_csl2() { + CompoundType t1 = create_compound_csl1(); + + CompoundType t2({{"csl1", t1}}); + + return t2; +} + +HIGHFIVE_REGISTER_TYPE(CSL1, create_compound_csl1) +HIGHFIVE_REGISTER_TYPE(CSL2, create_compound_csl2) + +TEST_CASE("HighFiveCompounds") { + const std::string file_name("compounds_test.h5"); + const std::string dataset_name1("/a"); + const std::string dataset_name2("/b"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + auto t3 = AtomicType(); + CompoundType t1 = create_compound_csl1(); + t1.commit(file, "my_type"); + + CompoundType t2 = create_compound_csl2(); + t2.commit(file, "my_type2"); + + { // Not nested + auto dataset = file.createDataSet(dataset_name1, DataSpace(2), t1); + + std::vector csl = {{1, 1, 1}, {2, 3, 4}}; + dataset.write(csl); + + file.flush(); + + std::vector result; + dataset.select({0}, {2}).read(result); + + CHECK(result.size() == 2); + CHECK(result[0].m1 == 1); + CHECK(result[0].m2 == 1); + CHECK(result[0].m3 == 1); + CHECK(result[1].m1 == 2); + CHECK(result[1].m2 == 3); + CHECK(result[1].m3 == 4); + } + + { // Nested + auto dataset = file.createDataSet(dataset_name2, DataSpace(2), t2); + + std::vector csl = {{{1, 1, 1}, {2, 3, 4}}}; + dataset.write(csl); + + file.flush(); + std::vector result = {{{1, 1, 1}, {2, 3, 4}}}; + dataset.select({0}, {2}).read(result); + + CHECK(result.size() == 2); + CHECK(result[0].csl1.m1 == 1); + CHECK(result[0].csl1.m2 == 1); + CHECK(result[0].csl1.m3 == 1); + CHECK(result[1].csl1.m1 == 2); + CHECK(result[1].csl1.m2 == 3); + CHECK(result[1].csl1.m3 == 4); + } + + // Test the constructor from hid + CompoundType t1_from_hid(t1); + CHECK(t1 == t1_from_hid); + + CompoundType t2_from_hid(t2); + CHECK(t2 == t2_from_hid); + + // Back from a DataType + CHECK_NOTHROW(CompoundType(DataType(t1_from_hid))); + CHECK_THROWS(CompoundType(AtomicType{})); +} + +struct GrandChild { + uint32_t gcm1; + uint32_t gcm2; + uint32_t gcm3; +}; + +struct Child { + GrandChild grandChild; + uint32_t cm1; +}; + +struct Parent { + uint32_t pm1; + Child child; +}; + +CompoundType create_compound_GrandChild() { + auto t2 = AtomicType(); + CompoundType t1({{"gcm1", AtomicType{}}, + {"gcm2", AtomicType{}}, + { + "gcm3", + t2, + }}); + return t1; +} + +CompoundType create_compound_Child() { + auto nestedType = create_compound_GrandChild(); + return CompoundType{{{ + "grandChild", + nestedType, + }, + {"cm1", AtomicType{}}}}; +} + +CompoundType create_compound_Parent() { + auto nestedType = create_compound_Child(); + return CompoundType{{{"pm1", AtomicType{}}, + { + "child", + nestedType, + }}}; +} + +HIGHFIVE_REGISTER_TYPE(GrandChild, create_compound_GrandChild) +HIGHFIVE_REGISTER_TYPE(Child, create_compound_Child) +HIGHFIVE_REGISTER_TYPE(Parent, create_compound_Parent) + +TEST_CASE("HighFiveCompoundsNested") { + const std::string file_name("nested_compounds_test.h5"); + const std::string dataset_name("/a"); + + { // Write + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + auto type = create_compound_Parent(); + + auto dataset = file.createDataSet(dataset_name, DataSpace(2), type); + CHECK(dataset.getDataType().getSize() == 20); + + std::vector csl = {Parent{1, Child{GrandChild{1, 1, 1}, 1}}, + Parent{2, Child{GrandChild{3, 4, 5}, 6}}}; + dataset.write(csl); + } + + { // Read + File file(file_name, File::ReadOnly); + std::vector result; + auto dataset = file.getDataSet(dataset_name); + CHECK(dataset.getDataType().getSize() == 20); + dataset.select({0}, {2}).read(result); + + CHECK(result.size() == 2); + CHECK(result[0].pm1 == 1); + CHECK(result[0].child.grandChild.gcm1 == 1); + CHECK(result[0].child.grandChild.gcm2 == 1); + CHECK(result[0].child.grandChild.gcm3 == 1); + CHECK(result[0].child.cm1 == 1); + CHECK(result[1].pm1 == 2); + CHECK(result[1].child.grandChild.gcm1 == 3); + CHECK(result[1].child.grandChild.gcm2 == 4); + CHECK(result[1].child.grandChild.gcm3 == 5); + CHECK(result[1].child.cm1 == 6); + } +} + +template +struct Record { + double d = 3.14; + int i = 42; + char s[N]; +}; + +template +void fill(Record& r) { + constexpr char ref[] = "123456789a123456789b123456789c123456789d123456789e123456789f"; + std::copy(ref, ref + N - 1, r.s); + r.s[N - 1] = '\0'; +} + +template +CompoundType rec_t() { + using RecN = Record; + return {{"d", create_datatype()}, + {"i", create_datatype()}, + {"s", create_datatype()}}; +} + +HIGHFIVE_REGISTER_TYPE(Record<4>, rec_t<4>) +HIGHFIVE_REGISTER_TYPE(Record<8>, rec_t<8>) +HIGHFIVE_REGISTER_TYPE(Record<9>, rec_t<9>) + +template +void save(File& f) { + const size_t numRec = 2; + std::vector> recs(numRec); + fill(recs[0]); + fill(recs[1]); + auto dataset = f.createDataSet>("records" + std::to_string(N), DataSpace::From(recs)); + dataset.write(recs); +} + +template +std::string check(File& f) { + const size_t numRec = 2; + std::vector> recs(numRec); + f.getDataSet("records" + std::to_string(N)).read(recs); + return std::string(recs[0].s); +} + +TEST_CASE("HighFiveCompoundsSeveralPadding") { + const std::string file_name("padded_compounds_test.h5"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + { // Write + // 4 have been choose because no padding + // /* offset | size */ type = struct Record<4> { + // /* 0 | 8 */ double d; + // /* 8 | 4 */ int i; + // /* 12 | 4 */ char s[4]; + // total size (bytes): 16 + CHECK_NOTHROW(save<4>(file)); + // 8 have been choose because there is a padding + // /* offset | size */ type = struct Record<8> { + // /* 0 | 8 */ double d; + // /* 8 | 4 */ int i; + // /* 12 | 8 */ char s[8]; + // /* XXX 4-byte padding */ + // total size (bytes): 24 + CHECK_NOTHROW(save<8>(file)); + // 9 have been choose because there should not be a padding on 9 + // /* offset | size */ type = struct Record<9> { + // /* 0 | 8 */ double d; + // /* 8 | 4 */ int i; + // /* 12 | 9 */ char s[9]; + // /* XXX 3-byte padding */ + // total size (bytes): 24 + CHECK_NOTHROW(save<9>(file)); + } + + { // Read + CHECK(check<4>(file) == std::string("123")); + CHECK(check<8>(file) == std::string("1234567")); + CHECK(check<9>(file) == std::string("12345678")); + } +} + +enum Position { + highfive_first = 1, + highfive_second = 2, + highfive_third = 3, + highfive_last = -1, +}; + +enum class Direction : signed char { + Forward = 1, + Backward = -1, + Left = -2, + Right = 2, +}; + +EnumType create_enum_position() { + return {{"highfive_first", Position::highfive_first}, + {"highfive_second", Position::highfive_second}, + {"highfive_third", Position::highfive_third}, + {"highfive_last", Position::highfive_last}}; +} +HIGHFIVE_REGISTER_TYPE(Position, create_enum_position) + +EnumType create_enum_direction() { + return {{"Forward", Direction::Forward}, + {"Backward", Direction::Backward}, + {"Left", Direction::Left}, + {"Right", Direction::Right}}; +} +HIGHFIVE_REGISTER_TYPE(Direction, create_enum_direction) + +TEST_CASE("HighFiveEnum") { + const std::string file_name("enum_test.h5"); + const std::string dataset_name1("/a"); + const std::string dataset_name2("/b"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + { // Unscoped enum + auto e1 = create_enum_position(); + e1.commit(file, "Position"); + + auto dataset = file.createDataSet(dataset_name1, DataSpace(1), e1); + dataset.write(Position::highfive_first); + + file.flush(); + + Position result; + dataset.select(ElementSet({0})).read(result); + + CHECK(result == Position::highfive_first); + } + + { // Scoped enum + auto e1 = create_enum_direction(); + e1.commit(file, "Direction"); + + auto dataset = file.createDataSet(dataset_name2, DataSpace(5), e1); + std::vector robot_moves({Direction::Backward, + Direction::Forward, + Direction::Forward, + Direction::Left, + Direction::Left}); + dataset.write(robot_moves); + + file.flush(); + + std::vector result; + dataset.read(result); + + CHECK(result[0] == Direction::Backward); + CHECK(result[1] == Direction::Forward); + CHECK(result[2] == Direction::Forward); + CHECK(result[3] == Direction::Left); + CHECK(result[4] == Direction::Left); + } +} + +TEST_CASE("HighFiveReadType") { + const std::string file_name("readtype_test.h5"); + const std::string datatype_name1("my_type"); + const std::string datatype_name2("position"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + CompoundType t1 = create_compound_csl1(); + t1.commit(file, datatype_name1); + + CompoundType t2 = file.getDataType(datatype_name1); + + auto t3 = create_enum_position(); + t3.commit(file, datatype_name2); + + DataType t4 = file.getDataType(datatype_name2); + + CHECK(t2 == t1); + CHECK(t4 == t3); +} From 859d97b759dbcb3796bf570f0dc2784c30583774 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 08:27:18 +0100 Subject: [PATCH 08/97] Update doxygen-awesome to v2.3.1 (#882) Co-authored-by: github-actions --- doc/doxygen-awesome-css/doxygen-awesome.css | 255 +++++++++++++++----- 1 file changed, 197 insertions(+), 58 deletions(-) diff --git a/doc/doxygen-awesome-css/doxygen-awesome.css b/doc/doxygen-awesome-css/doxygen-awesome.css index 08238977a..ac7f0608e 100644 --- a/doc/doxygen-awesome-css/doxygen-awesome.css +++ b/doc/doxygen-awesome-css/doxygen-awesome.css @@ -80,21 +80,21 @@ html { --toc-max-height: calc(100vh - 2 * var(--spacing-medium) - 85px); /* colors for various content boxes: @warning, @note, @deprecated @bug */ - --warning-color: #f8d1cc; - --warning-color-dark: #b61825; - --warning-color-darker: #75070f; - --note-color: #faf3d8; - --note-color-dark: #f3a600; - --note-color-darker: #5f4204; - --todo-color: #e4f3ff; - --todo-color-dark: #1879C4; - --todo-color-darker: #274a5c; + --warning-color: #faf3d8; + --warning-color-dark: #f3a600; + --warning-color-darker: #5f4204; + --note-color: #e4f3ff; + --note-color-dark: #1879C4; + --note-color-darker: #274a5c; + --todo-color: #e4dafd; + --todo-color-dark: #5b2bdd; + --todo-color-darker: #2a0d72; --deprecated-color: #ecf0f3; --deprecated-color-dark: #5b6269; --deprecated-color-darker: #43454a; - --bug-color: #e4dafd; - --bug-color-dark: #5b2bdd; - --bug-color-darker: #2a0d72; + --bug-color: #f8d1cc; + --bug-color-dark: #b61825; + --bug-color-darker: #75070f; --invariant-color: #d8f1e3; --invariant-color-dark: #44b86f; --invariant-color-darker: #265532; @@ -169,6 +169,8 @@ html { --webkit-scrollbar-size: 7px; --webkit-scrollbar-padding: 4px; --webkit-scrollbar-color: var(--separator-color); + + --animation-duration: .12s } @media screen and (max-width: 767px) { @@ -208,21 +210,21 @@ html { --blockquote-background: #222325; --blockquote-foreground: #7e8c92; - --warning-color: #2e1917; - --warning-color-dark: #ad2617; - --warning-color-darker: #f5b1aa; - --note-color: #3b2e04; - --note-color-dark: #f1b602; - --note-color-darker: #ceb670; - --todo-color: #163750; - --todo-color-dark: #1982D2; - --todo-color-darker: #dcf0fa; + --warning-color: #3b2e04; + --warning-color-dark: #f1b602; + --warning-color-darker: #ceb670; + --note-color: #163750; + --note-color-dark: #1982D2; + --note-color-darker: #dcf0fa; + --todo-color: #2a2536; + --todo-color-dark: #7661b3; + --todo-color-darker: #ae9ed6; --deprecated-color: #2e323b; --deprecated-color-dark: #738396; --deprecated-color-darker: #abb0bd; - --bug-color: #2a2536; - --bug-color-dark: #7661b3; - --bug-color-darker: #ae9ed6; + --bug-color: #2e1917; + --bug-color-dark: #ad2617; + --bug-color-darker: #f5b1aa; --invariant-color: #303a35; --invariant-color-dark: #76ce96; --invariant-color-darker: #cceed5; @@ -269,21 +271,21 @@ html.dark-mode { --blockquote-background: #222325; --blockquote-foreground: #7e8c92; - --warning-color: #2e1917; - --warning-color-dark: #ad2617; - --warning-color-darker: #f5b1aa; - --note-color: #3b2e04; - --note-color-dark: #f1b602; - --note-color-darker: #ceb670; - --todo-color: #163750; - --todo-color-dark: #1982D2; - --todo-color-darker: #dcf0fa; + --warning-color: #3b2e04; + --warning-color-dark: #f1b602; + --warning-color-darker: #ceb670; + --note-color: #163750; + --note-color-dark: #1982D2; + --note-color-darker: #dcf0fa; + --todo-color: #2a2536; + --todo-color-dark: #7661b3; + --todo-color-darker: #ae9ed6; --deprecated-color: #2e323b; --deprecated-color-dark: #738396; --deprecated-color-darker: #abb0bd; - --bug-color: #2a2536; - --bug-color-dark: #7661b3; - --bug-color-darker: #ae9ed6; + --bug-color: #2e1917; + --bug-color-dark: #ad2617; + --bug-color-darker: #f5b1aa; --invariant-color: #303a35; --invariant-color-dark: #76ce96; --invariant-color-darker: #cceed5; @@ -316,7 +318,7 @@ body, table, div, p, dl, #nav-tree .label, .title, } h1, h2, h3, h4, h5 { - margin-top: .9em; + margin-top: 1em; font-weight: 600; line-height: initial; } @@ -1174,7 +1176,7 @@ div.toc li a.aboveActive { margin-right: var(--spacing-small); margin-bottom: calc(var(--navigation-font-size) / 4); transform: rotate(-90deg); - transition: transform 0.25s ease-out; + transition: transform var(--animation-duration) ease-out; } div.contents .toc.interactive.open > h3::before { @@ -1231,9 +1233,13 @@ div.fragment, pre.fragment { .contents > div.fragment, .textblock > div.fragment, .textblock > pre.fragment, + .textblock > .tabbed > ul > li > div.fragment, + .textblock > .tabbed > ul > li > pre.fragment, .contents > .doxygen-awesome-fragment-wrapper > div.fragment, .textblock > .doxygen-awesome-fragment-wrapper > div.fragment, - .textblock > .doxygen-awesome-fragment-wrapper > pre.fragment { + .textblock > .doxygen-awesome-fragment-wrapper > pre.fragment, + .textblock > .tabbed > ul > li > .doxygen-awesome-fragment-wrapper > div.fragment, + .textblock > .tabbed > ul > li > .doxygen-awesome-fragment-wrapper > pre.fragment { margin: var(--spacing-medium) calc(0px - var(--spacing-large)); border-radius: 0; border-left: 0; @@ -1323,8 +1329,9 @@ div.fragment span.lineno a { color: var(--fragment-link) !important; } -div.fragment .line:first-child .lineno { +div.fragment > .line:first-child .lineno { box-shadow: -999999px 0px 0 999999px var(--fragment-linenumber-background), -999998px 0px 0 999999px var(--fragment-linenumber-border); + background-color: var(--fragment-linenumber-background) !important; } div.line { @@ -1383,8 +1390,8 @@ dl.todo { color: var(--todo-color-darker); } -dl.todo dt { - color: var(--todo-color-dark); +dl.todo dt a { + color: var(--todo-color-dark) !important; } dl.bug dt a { @@ -1866,7 +1873,7 @@ div.dynheader img[src="closed.png"] { display: block; float: left; margin-left: -10px; - transition: transform 0.25s ease-out; + transition: transform var(--animation-duration) ease-out; } table.memberdecls img { @@ -2344,7 +2351,7 @@ doxygen-awesome-dark-mode-toggle { } doxygen-awesome-dark-mode-toggle > svg { - transition: transform .1s ease-in-out; + transition: transform var(--animation-duration) ease-in-out; } doxygen-awesome-dark-mode-toggle:active > svg { @@ -2429,7 +2436,7 @@ a.anchorlink { text-decoration: none; opacity: .15; display: none; - transition: opacity .1s ease-in-out, color .1s ease-in-out; + transition: opacity var(--animation-duration) ease-in-out, color var(--animation-duration) ease-in-out; } a.anchorlink svg { @@ -2453,15 +2460,10 @@ h2:hover a.anchorlink, h1:hover a.anchorlink, h3:hover a.anchorlink, h4:hover a. Optional tab feature */ -.tabbed { - margin: var(--spacing-medium) auto; -} - .tabbed ul { padding-inline-start: 0px; margin: 0; padding: var(--spacing-small) 0; - border-bottom: 1px solid var(--separator-color); } .tabbed li { @@ -2484,24 +2486,46 @@ h2:hover a.anchorlink, h1:hover a.anchorlink, h3:hover a.anchorlink, h4:hover a. flex-direction: row; } +@media screen and (max-width: 767px) { + .tabs-overview-container { + margin: 0 calc(0px - var(--spacing-large)); + } + .tabs-overview { + padding: 0 var(--spacing-large) + } +} + .tabs-overview button.tab-button { color: var(--page-foreground-color); margin: 0; border: none; background: transparent; - padding: var(--spacing-small) 0; + padding: calc(var(--spacing-large) / 2) 0; display: inline-block; font-size: var(--page-font-size); cursor: pointer; box-shadow: 0 1px 0 0 var(--separator-color); position: relative; + + -webkit-tap-highlight-color: transparent; +} + +.tabs-overview button.tab-button .tab-title::before { + display: block; + content: attr(title); + font-weight: 600; + height: 0; + overflow: hidden; + visibility: hidden; } .tabs-overview button.tab-button .tab-title { float: left; white-space: nowrap; - padding: var(--spacing-small) var(--spacing-large); + font-weight: normal; + padding: calc(var(--spacing-large) / 2) var(--spacing-large); border-radius: var(--border-radius-medium); + transition: background-color var(--animation-duration) ease-in-out, font-weight var(--animation-duration) ease-in-out; } .tabs-overview button.tab-button:not(:last-child) .tab-title { @@ -2513,18 +2537,133 @@ h2:hover a.anchorlink, h1:hover a.anchorlink, h3:hover a.anchorlink, h4:hover a. box-shadow: none; } -.tabs-overview button.tab-button.active { - color: var(--primary-color); +.tabs-overview button.tab-button.active .tab-title { + font-weight: 600; } -.tabs-overview button.tab-button.active::after { +.tabs-overview button.tab-button::after { content: ''; display: block; position: absolute; - left: 0px; + left: 0; bottom: 0; - right: 0px; - height: 3px; + right: 0; + height: 0; + width: 0%; + margin: 0 auto; border-radius: var(--border-radius-small) var(--border-radius-small) 0 0; background-color: var(--primary-color); + transition: width var(--animation-duration) ease-in-out, height var(--animation-duration) ease-in-out; +} + +.tabs-overview button.tab-button.active::after { + width: 100%; + box-sizing: border-box; + height: 3px; +} + + +/* + Navigation Buttons +*/ + +.section_buttons:not(:empty) { + margin-top: calc(var(--spacing-large) * 3); +} + +.section_buttons table.markdownTable { + display: block; + width: 100%; +} + +.section_buttons table.markdownTable tbody { + display: table !important; + width: 100%; + box-shadow: none; + border-spacing: 10px; +} + +.section_buttons table.markdownTable td { + padding: 0; +} + +.section_buttons table.markdownTable th { + display: none; +} + +.section_buttons table.markdownTable tr.markdownTableHead { + border: none; +} + +.section_buttons tr th, .section_buttons tr td { + background: none; + border: none; + padding: var(--spacing-large) 0 var(--spacing-small); +} + +.section_buttons a { + display: inline-block; + border: 1px solid var(--separator-color); + border-radius: var(--border-radius-medium); + color: var(--page-secondary-foreground-color) !important; + text-decoration: none; + transition: color var(--animation-duration) ease-in-out, background-color var(--animation-duration) ease-in-out; +} + +.section_buttons a:hover { + color: var(--page-foreground-color) !important; + background-color: var(--odd-color); +} + +.section_buttons tr td.markdownTableBodyLeft a { + padding: var(--spacing-medium) var(--spacing-large) var(--spacing-medium) calc(var(--spacing-large) / 2); +} + +.section_buttons tr td.markdownTableBodyRight a { + padding: var(--spacing-medium) calc(var(--spacing-large) / 2) var(--spacing-medium) var(--spacing-large); +} + +.section_buttons tr td.markdownTableBodyLeft a::before, +.section_buttons tr td.markdownTableBodyRight a::after { + color: var(--page-secondary-foreground-color) !important; + display: inline-block; + transition: color .08s ease-in-out, transform .09s ease-in-out; +} + +.section_buttons tr td.markdownTableBodyLeft a::before { + content: '〈'; + padding-right: var(--spacing-large); +} + + +.section_buttons tr td.markdownTableBodyRight a::after { + content: '〉'; + padding-left: var(--spacing-large); +} + + +.section_buttons tr td.markdownTableBodyLeft a:hover::before { + color: var(--page-foreground-color) !important; + transform: translateX(-3px); +} + +.section_buttons tr td.markdownTableBodyRight a:hover::after { + color: var(--page-foreground-color) !important; + transform: translateX(3px); +} + +@media screen and (max-width: 450px) { + .section_buttons a { + width: 100%; + box-sizing: border-box; + } + + .section_buttons tr td:nth-of-type(1).markdownTableBodyLeft a { + border-radius: var(--border-radius-medium) 0 0 var(--border-radius-medium); + border-right: none; + } + + .section_buttons tr td:nth-of-type(2).markdownTableBodyRight a { + border-radius: 0 var(--border-radius-medium) var(--border-radius-medium) 0; + } } From 1b50bc113eaed322e3435e197bd57453a0cc7b28 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 4 Dec 2023 10:02:25 +0100 Subject: [PATCH 09/97] Wrap all used H5T functions. (#883) --- include/highfive/H5DataType.hpp | 20 +-- include/highfive/bits/H5Attribute_misc.hpp | 2 +- include/highfive/bits/H5DataType_misc.hpp | 50 +++--- include/highfive/bits/H5Node_traits_misc.hpp | 11 +- include/highfive/bits/H5Slice_traits_misc.hpp | 3 +- include/highfive/bits/h5_wrapper.hpp | 12 ++ include/highfive/bits/h5t_wrapper.hpp | 158 ++++++++++++++++++ 7 files changed, 204 insertions(+), 52 deletions(-) create mode 100644 include/highfive/bits/h5_wrapper.hpp diff --git a/include/highfive/H5DataType.hpp b/include/highfive/H5DataType.hpp index 1c28626bf..0d596965f 100644 --- a/include/highfive/H5DataType.hpp +++ b/include/highfive/H5DataType.hpp @@ -19,6 +19,9 @@ #include "bits/string_padding.hpp" #include "H5PropertyList.hpp" +#include "bits/h5_wrapper.hpp" +#include "bits/h5t_wrapper.hpp" + namespace HighFive { @@ -235,21 +238,16 @@ class CompoundType: public DataType { ss << "hid " << _hid << " does not refer to a compound data type"; throw DataTypeException(ss.str()); } - int result = H5Tget_nmembers(_hid); - if (result < 0) { - throw DataTypeException("Could not get members of compound datatype"); - } - size_t n_members = static_cast(result); + size_t n_members = static_cast(detail::h5t_get_nmembers(_hid)); members.reserve(n_members); for (unsigned i = 0; i < n_members; i++) { - char* name = H5Tget_member_name(_hid, i); - size_t offset = H5Tget_member_offset(_hid, i); - hid_t member_hid = H5Tget_member_type(_hid, i); + char* name = detail::h5t_get_member_name(_hid, i); + size_t offset = detail::h5t_get_member_offset(_hid, i); + hid_t member_hid = detail::h5t_get_member_type(_hid, i); DataType member_type{member_hid}; members.emplace_back(std::string(name), member_type, offset); - if (H5free_memory(name) < 0) { - throw DataTypeException("Could not free names from the compound datatype"); - } + + detail::h5_free_memory(name); } } diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 651678829..939e111fd 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -94,7 +94,7 @@ inline void Attribute::read(T& array) const { if (c == DataTypeClass::VarLen || t.isVariableStr()) { #if H5_VERSION_GE(1, 12, 0) // This one have been created in 1.12.0 - (void) H5Treclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); + (void) detail::h5t_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); #else // This one is deprecated since 1.12.0 (void) H5Dvlen_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); diff --git a/include/highfive/bits/H5DataType_misc.hpp b/include/highfive/bits/H5DataType_misc.hpp index e579d611d..698d8fa28 100644 --- a/include/highfive/bits/H5DataType_misc.hpp +++ b/include/highfive/bits/H5DataType_misc.hpp @@ -16,7 +16,6 @@ #endif #include -#include #ifdef H5_USE_HALF_FLOAT #include @@ -38,7 +37,7 @@ inline bool DataType::empty() const noexcept { } inline DataTypeClass DataType::getClass() const { - return convert_type_class(H5Tget_class(_hid)); + return convert_type_class(detail::h5t_get_class(_hid)); } inline size_t DataType::getSize() const { @@ -46,7 +45,7 @@ inline size_t DataType::getSize() const { } inline bool DataType::operator==(const DataType& other) const { - return (H5Tequal(_hid, other._hid) > 0); + return detail::h5t_equal(_hid, other._hid) > 0; } inline bool DataType::operator!=(const DataType& other) const { @@ -54,11 +53,7 @@ inline bool DataType::operator!=(const DataType& other) const { } inline bool DataType::isVariableStr() const { - auto var_value = H5Tis_variable_str(_hid); - if (var_value < 0) { - HDF5ErrMapper::ToException("Unable to define datatype size to variable"); - } - return static_cast(var_value); + return detail::h5t_is_variable_str(_hid) > 0; } inline bool DataType::isFixedLenStr() const { @@ -66,7 +61,7 @@ inline bool DataType::isFixedLenStr() const { } inline bool DataType::isReference() const { - return H5Tequal(_hid, H5T_STD_REF_OBJ) > 0; + return detail::h5t_equal(_hid, H5T_STD_REF_OBJ) > 0; } inline StringType DataType::asStringType() const { @@ -183,11 +178,11 @@ template <> inline AtomicType::AtomicType() { _hid = detail::h5t_copy(H5T_NATIVE_FLOAT); // Sign position, exponent position, exponent size, mantissa position, mantissa size - H5Tset_fields(_hid, 15, 10, 5, 0, 10); + detail::h5t_set_fields(_hid, 15, 10, 5, 0, 10); // Total datatype size (in bytes) detail::h5t_set_size(_hid, 2); // Floating point exponent bias - H5Tset_ebias(_hid, 15); + detail::h5t_set_ebias(_hid, 15); } #endif @@ -316,8 +311,8 @@ inline AtomicType::AtomicType() { inline size_t find_first_atomic_member_size(hid_t hid) { // Recursive exit condition - if (H5Tget_class(hid) == H5T_COMPOUND) { - auto number_of_members = H5Tget_nmembers(hid); + if (detail::h5t_get_class(hid) == H5T_COMPOUND) { + auto number_of_members = detail::h5t_get_nmembers(hid); if (number_of_members == -1) { throw DataTypeException("Cannot get members of CompoundType with hid: " + std::to_string(hid)); @@ -327,11 +322,11 @@ inline size_t find_first_atomic_member_size(hid_t hid) { std::to_string(hid)); } - auto member_type = H5Tget_member_type(hid, 0); + auto member_type = detail::h5t_get_member_type(hid, 0); auto size = find_first_atomic_member_size(member_type); - H5Tclose(member_type); + detail::h5t_close(member_type); return size; - } else if (H5Tget_class(hid) == H5T_STRING) { + } else if (detail::h5t_get_class(hid) == H5T_STRING) { return 1; } return detail::h5t_get_size(hid); @@ -391,43 +386,36 @@ inline void CompoundType::create(size_t size) { } // Create the HDF5 type - if ((_hid = H5Tcreate(H5T_COMPOUND, size)) < 0) { - HDF5ErrMapper::ToException("Could not create new compound datatype"); - } + _hid = detail::h5t_create(H5T_COMPOUND, size); // Loop over all the members and insert them into the datatype for (const auto& member: members) { - if (H5Tinsert(_hid, member.name.c_str(), member.offset, member.base_type.getId()) < 0) { - HDF5ErrMapper::ToException("Could not add new member to datatype"); - } + detail::h5t_insert(_hid, member.name.c_str(), member.offset, member.base_type.getId()); } } #undef _H5_STRUCT_PADDING inline void CompoundType::commit(const Object& object, const std::string& name) const { - H5Tcommit2(object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + detail::h5t_commit2( + object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } template inline void EnumType::create() { // Create the HDF5 type - if ((_hid = H5Tenum_create(AtomicType::type>{}.getId())) < 0) { - HDF5ErrMapper::ToException("Could not create new enum datatype"); - } + _hid = detail::h5t_enum_create(AtomicType::type>{}.getId()); // Loop over all the members and insert them into the datatype for (const auto& member: members) { - if (H5Tenum_insert(_hid, member.name.c_str(), &(member.value)) < 0) { - HDF5ErrMapper::ToException( - "Could not add new member to this enum datatype"); - } + detail::h5t_enum_insert(_hid, member.name.c_str(), &(member.value)); } } template inline void EnumType::commit(const Object& object, const std::string& name) const { - H5Tcommit2(object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + detail::h5t_commit2( + object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } namespace { diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index 420842da2..d7ab82b61 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -177,14 +177,9 @@ inline Group NodeTraits::getGroup(const std::string& group_name) const template inline DataType NodeTraits::getDataType(const std::string& type_name, const DataTypeAccessProps& accessProps) const { - const auto hid = H5Topen2(static_cast(this)->getId(), - type_name.c_str(), - accessProps.getId()); - if (hid < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to open the datatype \"") + type_name + "\":"); - } - return DataType(hid); + return DataType(detail::h5t_open2(static_cast(this)->getId(), + type_name.c_str(), + accessProps.getId())); } template diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 7b07c9abf..313343d4e 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -205,7 +205,8 @@ inline void SliceTraits::read(T& array, const DataTransferProps& xfer_ if (c == DataTypeClass::VarLen || t.isVariableStr()) { #if H5_VERSION_GE(1, 12, 0) // This one have been created in 1.12.0 - (void) H5Treclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); + (void) + detail::h5t_reclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); #else // This one is deprecated since 1.12.0 (void) H5Dvlen_reclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); diff --git a/include/highfive/bits/h5_wrapper.hpp b/include/highfive/bits/h5_wrapper.hpp new file mode 100644 index 000000000..cfde5b8e6 --- /dev/null +++ b/include/highfive/bits/h5_wrapper.hpp @@ -0,0 +1,12 @@ +#pragma once +#include + +namespace HighFive { +namespace detail { +inline void h5_free_memory(void* mem) { + if (H5free_memory(mem) < 0) { + throw DataTypeException("Could not free memory allocated by HDF5"); + } +} +} // namespace detail +} // namespace HighFive diff --git a/include/highfive/bits/h5t_wrapper.hpp b/include/highfive/bits/h5t_wrapper.hpp index a8be1b52b..f2c7bb098 100644 --- a/include/highfive/bits/h5t_wrapper.hpp +++ b/include/highfive/bits/h5t_wrapper.hpp @@ -1,5 +1,6 @@ #pragma once +#include #include namespace HighFive { @@ -59,6 +60,53 @@ inline void h5t_set_strpad(hid_t hid, H5T_str_t strpad) { } } +inline int h5t_get_nmembers(hid_t hid) { + auto result = H5Tget_nmembers(hid); + + if (result < 0) { + throw DataTypeException("Could not get members of compound datatype"); + } + + return result; +} + +inline char* h5t_get_member_name(hid_t type_id, unsigned membno) { + char* name = H5Tget_member_name(type_id, membno); + if (name == nullptr) { + throw DataTypeException("Failed to get member names of compound datatype"); + } + + return name; +} + + +inline size_t h5t_get_member_offset(hid_t type_id, unsigned membno) { + // Note, this function is peculiar. On failure it returns 0, yet 0 is also + // what's returned on failure. + return H5Tget_member_offset(type_id, membno); +} + +inline hid_t h5t_get_member_type(hid_t type_id, unsigned membno) { + hid_t member_id = H5Tget_member_type(type_id, membno); + + if (member_id < 0) { + throw DataTypeException("Failed to get member type of compound datatype"); + } + + return member_id; +} + +#if H5_VERSION_GE(1, 12, 0) +inline herr_t h5t_reclaim(hid_t type_id, hid_t space_id, hid_t plist_id, void* buf) { + herr_t err = H5Treclaim(type_id, space_id, plist_id, buf); + if (err < 0) { + throw DataTypeException("Failed to reclaim HDF5 internal memory"); + } + + return err; +} +#endif + inline H5T_class_t h5t_get_class(hid_t type_id) { H5T_class_t class_id = H5Tget_class(type_id); if (class_id == H5T_NO_CLASS) { @@ -68,5 +116,115 @@ inline H5T_class_t h5t_get_class(hid_t type_id) { return class_id; } +inline htri_t h5t_equal(hid_t type1_id, hid_t type2_id) { + htri_t equal = H5Tequal(type1_id, type2_id); + if (equal < 0) { + throw DataTypeException("Failed to compare two datatypes"); + } + + return equal; +} + +inline htri_t h5t_is_variable_str(hid_t type_id) { + htri_t is_variable = H5Tis_variable_str(type_id); + if (is_variable < 0) { + HDF5ErrMapper::ToException( + "Failed to check if string is variable length"); + } + return is_variable; +} + +inline herr_t h5t_set_fields(hid_t type_id, + size_t spos, + size_t epos, + size_t esize, + size_t mpos, + size_t msize) { + herr_t err = H5Tset_fields(type_id, spos, epos, esize, mpos, msize); + if (err < 0) { + HDF5ErrMapper::ToException( + "Failed to create custom floating point data type"); + } + return err; +} + +inline herr_t h5t_set_ebias(hid_t type_id, size_t ebias) { + herr_t err = H5Tset_ebias(type_id, ebias); + if (err < 0) { + HDF5ErrMapper::ToException( + "Failed to exponent bias of floating point data type"); + } + + return err; +} + +inline hid_t h5t_create(H5T_class_t type, size_t size) { + hid_t type_id = H5Tcreate(type, size); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException("Failed to datatype"); + } + + return type_id; +} + +inline herr_t h5t_insert(hid_t parent_id, const char* name, size_t offset, hid_t member_id) { + herr_t err = H5Tinsert(parent_id, name, offset, member_id); + if (err < 0) { + HDF5ErrMapper::ToException("Failed to not add new member to datatype"); + } + + return err; +} + +inline herr_t h5t_commit2(hid_t loc_id, + const char* name, + hid_t type_id, + hid_t lcpl_id, + hid_t tcpl_id, + hid_t tapl_id) { + herr_t err = H5Tcommit2(loc_id, name, type_id, lcpl_id, tcpl_id, tapl_id); + if (err < 0) { + HDF5ErrMapper::ToException("Failed to commit datatype"); + } + + return err; +} + +inline herr_t h5t_close(hid_t type_id) { + auto err = H5Tclose(type_id); + if (err < 0) { + HDF5ErrMapper::ToException("Failed to close datatype"); + } + + return err; +} + +inline hid_t h5t_enum_create(hid_t base_id) { + hid_t type_id = H5Tenum_create(base_id); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException("Failed to create new enum datatype"); + } + return type_id; +} + +inline herr_t h5t_enum_insert(hid_t type, const char* name, const void* value) { + herr_t err = H5Tenum_insert(type, name, value); + if (err < 0) { + HDF5ErrMapper::ToException( + "Failed to add new member to this enum datatype"); + } + return err; +} + +inline hid_t h5t_open2(hid_t loc_id, const char* name, hid_t tapl_id) { + hid_t datatype_id = H5Topen2(loc_id, name, tapl_id); + if (datatype_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException( + std::string("Unable to open the datatype \"") + name + "\":"); + } + + return datatype_id; +} + } // namespace detail } // namespace HighFive From cb65179d7aa8df1888df1be2042b4b84d8d09bd5 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 4 Dec 2023 11:09:55 +0100 Subject: [PATCH 10/97] Wrap all used H5A functions. (#884) --- .../highfive/bits/H5Annotate_traits_misc.hpp | 59 +++----- include/highfive/bits/H5Attribute_misc.hpp | 20 +-- include/highfive/bits/h5a_wrapper.hpp | 131 ++++++++++++++++++ 3 files changed, 157 insertions(+), 53 deletions(-) create mode 100644 include/highfive/bits/h5a_wrapper.hpp diff --git a/include/highfive/bits/H5Annotate_traits_misc.hpp b/include/highfive/bits/H5Annotate_traits_misc.hpp index 85d2798fe..bf2be8a45 100644 --- a/include/highfive/bits/H5Annotate_traits_misc.hpp +++ b/include/highfive/bits/H5Annotate_traits_misc.hpp @@ -16,6 +16,7 @@ #include "H5Attribute_misc.hpp" #include "H5Iterables_misc.hpp" +#include "h5a_wrapper.hpp" namespace HighFive { @@ -23,16 +24,12 @@ template inline Attribute AnnotateTraits::createAttribute(const std::string& attribute_name, const DataSpace& space, const DataType& dtype) { - auto attr_id = H5Acreate2(static_cast(this)->getId(), - attribute_name.c_str(), - dtype.getId(), - space.getId(), - H5P_DEFAULT, - H5P_DEFAULT); - if (attr_id < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to create the attribute \"") + attribute_name + "\":"); - } + auto attr_id = detail::h5a_create2(static_cast(this)->getId(), + attribute_name.c_str(), + dtype.getId(), + space.getId(), + H5P_DEFAULT, + H5P_DEFAULT); return detail::make_attribute(attr_id); } @@ -57,30 +54,20 @@ inline Attribute AnnotateTraits::createAttribute(const std::string& at template inline void AnnotateTraits::deleteAttribute(const std::string& attribute_name) { - if (H5Adelete(static_cast(this)->getId(), attribute_name.c_str()) < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to delete attribute \"") + attribute_name + "\":"); - } + detail::h5a_delete(static_cast(this)->getId(), attribute_name.c_str()); } template inline Attribute AnnotateTraits::getAttribute(const std::string& attribute_name) const { - const auto attr_id = - H5Aopen(static_cast(this)->getId(), attribute_name.c_str(), H5P_DEFAULT); - if (attr_id < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to open the attribute \"") + attribute_name + "\":"); - } + const auto attr_id = detail::h5a_open(static_cast(this)->getId(), + attribute_name.c_str(), + H5P_DEFAULT); return detail::make_attribute(attr_id); } template inline size_t AnnotateTraits::getNumberAttributes() const { - int res = H5Aget_num_attrs(static_cast(this)->getId()); - if (res < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to count attributes in existing group or file")); - } + int res = detail::h5a_get_num_attrs(static_cast(this)->getId()); return static_cast(res); } @@ -92,27 +79,19 @@ inline std::vector AnnotateTraits::listAttributeNames() c size_t num_objs = getNumberAttributes(); names.reserve(num_objs); - if (H5Aiterate2(static_cast(this)->getId(), - H5_INDEX_NAME, - H5_ITER_INC, - NULL, - &details::internal_high_five_iterate, - static_cast(&iterateData)) < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to list attributes in group")); - } + detail::h5a_iterate2(static_cast(this)->getId(), + H5_INDEX_NAME, + H5_ITER_INC, + nullptr, + &details::internal_high_five_iterate, + static_cast(&iterateData)); return names; } template inline bool AnnotateTraits::hasAttribute(const std::string& attr_name) const { - int res = H5Aexists(static_cast(this)->getId(), attr_name.c_str()); - if (res < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to check for attribute in group")); - } - return res; + return detail::h5a_exists(static_cast(this)->getId(), attr_name.c_str()) > 0; } } // namespace HighFive diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 939e111fd..7be83cb01 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -14,36 +14,34 @@ #include #include -#include #include #include "../H5DataSpace.hpp" #include "H5Converter_misc.hpp" #include "H5ReadWrite_misc.hpp" #include "H5Utils.hpp" +#include "h5a_wrapper.hpp" namespace HighFive { inline std::string Attribute::getName() const { return details::get_name( - [&](char* buffer, size_t length) { return H5Aget_name(_hid, length, buffer); }); + [&](char* buffer, size_t length) { return detail::h5a_get_name(_hid, length, buffer); }); } inline size_t Attribute::getStorageSize() const { - return static_cast(H5Aget_storage_size(_hid)); + return static_cast(detail::h5a_get_storage_size(_hid)); } inline DataType Attribute::getDataType() const { DataType res; - res._hid = H5Aget_type(_hid); + res._hid = detail::h5a_get_type(_hid); return res; } inline DataSpace Attribute::getSpace() const { DataSpace space; - if ((space._hid = H5Aget_space(_hid)) < 0) { - HDF5ErrMapper::ToException("Unable to get DataSpace out of Attribute"); - } + space._hid = detail::h5a_get_space(_hid); return space; } @@ -107,9 +105,7 @@ inline void Attribute::read(T* array, const DataType& mem_datatype) const { static_assert(!std::is_const::value, "read() requires a non-const structure to read data into"); - if (H5Aread(getId(), mem_datatype.getId(), static_cast(array)) < 0) { - HDF5ErrMapper::ToException("Error during HDF5 Read: "); - } + detail::h5a_read(getId(), mem_datatype.getId(), static_cast(array)); } template @@ -147,9 +143,7 @@ inline void Attribute::write(const T& buffer) { template inline void Attribute::write_raw(const T* buffer, const DataType& mem_datatype) { - if (H5Awrite(getId(), mem_datatype.getId(), buffer) < 0) { - HDF5ErrMapper::ToException("Error during HDF5 Write: "); - } + detail::h5a_write(getId(), mem_datatype.getId(), buffer); } template diff --git a/include/highfive/bits/h5a_wrapper.hpp b/include/highfive/bits/h5a_wrapper.hpp new file mode 100644 index 000000000..76f4e56fc --- /dev/null +++ b/include/highfive/bits/h5a_wrapper.hpp @@ -0,0 +1,131 @@ +#pragma once + +#include +#include + +namespace HighFive { +namespace detail { + +inline hid_t h5a_create2(hid_t loc_id, + char const* const attr_name, + hid_t type_id, + hid_t space_id, + hid_t acpl_id, + hid_t aapl_id) { + auto attr_id = H5Acreate2(loc_id, attr_name, type_id, space_id, acpl_id, aapl_id); + if (attr_id < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to create the attribute \"") + attr_name + "\":"); + } + + return attr_id; +} + +inline void h5a_delete(hid_t loc_id, char const* const attr_name) { + if (H5Adelete(loc_id, attr_name) < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to delete attribute \"") + attr_name + "\":"); + } +} + +inline hid_t h5a_open(hid_t loc_id, char const* const attr_name, hid_t aapl_id) { + const auto attr_id = H5Aopen(loc_id, attr_name, aapl_id); + if (attr_id < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to open the attribute \"") + attr_name + "\":"); + } + + return attr_id; +} + + +inline int h5a_get_num_attrs(hid_t loc_id) { + int res = H5Aget_num_attrs(loc_id); + if (res < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to count attributes in existing group or file")); + } + + return res; +} + + +inline void h5a_iterate2(hid_t loc_id, + H5_index_t idx_type, + H5_iter_order_t order, + hsize_t* idx, + H5A_operator2_t op, + void* op_data) { + if (H5Aiterate2(loc_id, idx_type, order, idx, op, op_data) < 0) { + HDF5ErrMapper::ToException(std::string("Failed H5Aiterate2.")); + } +} + +inline int h5a_exists(hid_t obj_id, char const* const attr_name) { + int res = H5Aexists(obj_id, attr_name); + if (res < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to check for attribute in group")); + } + + return res; +} + +inline ssize_t h5a_get_name(hid_t attr_id, size_t buf_size, char* buf) { + ssize_t name_length = H5Aget_name(attr_id, buf_size, buf); + if (name_length < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to get name of attribute")); + } + + return name_length; +} + + +inline hid_t h5a_get_space(hid_t attr_id) { + hid_t attr = H5Aget_space(attr_id); + if (attr < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to get dataspace of attribute")); + } + + return attr; +} + +inline hsize_t h5a_get_storage_size(hid_t attr_id) { + // Docs: + // Returns the amount of storage size allocated for the attribute; + // otherwise returns 0 (zero). + return H5Aget_storage_size(attr_id); +} + +inline hid_t h5a_get_type(hid_t attr_id) { + hid_t type_id = H5Aget_type(attr_id); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException( + std::string("Unable to get datatype of attribute")); + } + + return type_id; +} + +inline herr_t h5a_read(hid_t attr_id, hid_t type_id, void* buf) { + herr_t err = H5Aread(attr_id, type_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to read attribute")); + } + + return err; +} + +inline herr_t h5a_write(hid_t attr_id, hid_t type_id, void const* buf) { + herr_t err = H5Awrite(attr_id, type_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to write attribute")); + } + + return err; +} + +} // namespace detail +} // namespace HighFive From be8ed40e8696dddd2e2b9897ed15ee6054450373 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 4 Dec 2023 15:49:21 +0100 Subject: [PATCH 11/97] Wrap all used H5D functions. (#885) --- include/highfive/bits/H5Attribute_misc.hpp | 3 +- include/highfive/bits/H5DataSet_misc.hpp | 21 +-- include/highfive/bits/H5Node_traits_misc.hpp | 31 ++--- include/highfive/bits/H5Slice_traits_misc.hpp | 35 +++-- include/highfive/bits/h5d_wrapper.hpp | 125 ++++++++++++++++++ 5 files changed, 160 insertions(+), 55 deletions(-) create mode 100644 include/highfive/bits/h5d_wrapper.hpp diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 7be83cb01..6ec780433 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -21,6 +21,7 @@ #include "H5ReadWrite_misc.hpp" #include "H5Utils.hpp" #include "h5a_wrapper.hpp" +#include "h5d_wrapper.hpp" namespace HighFive { @@ -95,7 +96,7 @@ inline void Attribute::read(T& array) const { (void) detail::h5t_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); #else // This one is deprecated since 1.12.0 - (void) H5Dvlen_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); + (void) detail::h5d_vlen_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); #endif } } diff --git a/include/highfive/bits/H5DataSet_misc.hpp b/include/highfive/bits/H5DataSet_misc.hpp index 4411b4c0d..4817fe001 100644 --- a/include/highfive/bits/H5DataSet_misc.hpp +++ b/include/highfive/bits/H5DataSet_misc.hpp @@ -14,26 +14,24 @@ #include #include -#include #include +#include "h5d_wrapper.hpp" #include "H5Utils.hpp" namespace HighFive { inline uint64_t DataSet::getStorageSize() const { - return H5Dget_storage_size(_hid); + return detail::h5d_get_storage_size(_hid); } inline DataType DataSet::getDataType() const { - return DataType(H5Dget_type(_hid)); + return DataType(detail::h5d_get_type(_hid)); } inline DataSpace DataSet::getSpace() const { DataSpace space; - if ((space._hid = H5Dget_space(_hid)) < 0) { - HDF5ErrMapper::ToException("Unable to get DataSpace out of DataSet"); - } + space._hid = detail::h5d_get_space(_hid); return space; } @@ -42,11 +40,7 @@ inline DataSpace DataSet::getMemSpace() const { } inline uint64_t DataSet::getOffset() const { - uint64_t addr = H5Dget_offset(_hid); - if (addr == HADDR_UNDEF) { - HDF5ErrMapper::ToException("Cannot get offset of DataSet."); - } - return addr; + return static_cast(detail::h5d_get_offset(_hid)); } inline void DataSet::resize(const std::vector& dims) { @@ -58,10 +52,7 @@ inline void DataSet::resize(const std::vector& dims) { } std::vector real_dims(dims.begin(), dims.end()); - - if (H5Dset_extent(getId(), real_dims.data()) < 0) { - HDF5ErrMapper::ToException("Could not resize dataset."); - } + detail::h5d_set_extent(getId(), real_dims.data()); } } // namespace HighFive diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index d7ab82b61..2cbda126a 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -12,7 +12,6 @@ #include #include -#include #include #include #include @@ -39,18 +38,13 @@ inline DataSet NodeTraits::createDataSet(const std::string& dataset_na bool parents) { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - const auto hid = H5Dcreate2(static_cast(this)->getId(), - dataset_name.c_str(), - dtype.getId(), - space.getId(), - lcpl.getId(), - createProps.getId(), - accessProps.getId()); - if (hid < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to create the dataset \"") + dataset_name + "\":"); - } - return DataSet(hid); + return DataSet(detail::h5d_create2(static_cast(this)->getId(), + dataset_name.c_str(), + dtype.getId(), + space.getId(), + lcpl.getId(), + createProps.getId(), + accessProps.getId())); } template @@ -119,14 +113,9 @@ inline DataSet NodeTraits::createDataSet(const std::string& dataset_na template inline DataSet NodeTraits::getDataSet(const std::string& dataset_name, const DataSetAccessProps& accessProps) const { - const auto hid = H5Dopen2(static_cast(this)->getId(), - dataset_name.c_str(), - accessProps.getId()); - if (hid < 0) { - HDF5ErrMapper::ToException(std::string("Unable to open the dataset \"") + - dataset_name + "\":"); - } - return DataSet(hid); + return DataSet(detail::h5d_open2(static_cast(this)->getId(), + dataset_name.c_str(), + accessProps.getId())); } template diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 313343d4e..3569b5094 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -15,7 +15,7 @@ #include #include -#include +#include "h5d_wrapper.hpp" #include #include "H5ReadWrite_misc.hpp" @@ -209,7 +209,10 @@ inline void SliceTraits::read(T& array, const DataTransferProps& xfer_ detail::h5t_reclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); #else // This one is deprecated since 1.12.0 - (void) H5Dvlen_reclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); + (void) detail::h5d_vlen_reclaim(t.getId(), + mem_space.getId(), + xfer_props.getId(), + r.getPointer()); #endif } } @@ -225,14 +228,12 @@ inline void SliceTraits::read(T* array, const auto& slice = static_cast(*this); - if (H5Dread(details::get_dataset(slice).getId(), - mem_datatype.getId(), - details::get_memspace_id(slice), - slice.getSpace().getId(), - xfer_props.getId(), - static_cast(array)) < 0) { - HDF5ErrMapper::ToException("Error during HDF5 Read."); - } + detail::h5d_read(details::get_dataset(slice).getId(), + mem_datatype.getId(), + details::get_memspace_id(slice), + slice.getSpace().getId(), + xfer_props.getId(), + static_cast(array)); } template @@ -281,14 +282,12 @@ inline void SliceTraits::write_raw(const T* buffer, const DataTransferProps& xfer_props) { const auto& slice = static_cast(*this); - if (H5Dwrite(details::get_dataset(slice).getId(), - mem_datatype.getId(), - details::get_memspace_id(slice), - slice.getSpace().getId(), - xfer_props.getId(), - static_cast(buffer)) < 0) { - HDF5ErrMapper::ToException("Error during HDF5 Write: "); - } + detail::h5d_write(details::get_dataset(slice).getId(), + mem_datatype.getId(), + details::get_memspace_id(slice), + slice.getSpace().getId(), + xfer_props.getId(), + static_cast(buffer)); } template diff --git a/include/highfive/bits/h5d_wrapper.hpp b/include/highfive/bits/h5d_wrapper.hpp new file mode 100644 index 000000000..6de7fa5a3 --- /dev/null +++ b/include/highfive/bits/h5d_wrapper.hpp @@ -0,0 +1,125 @@ +#pragma once + +#include +#include + +namespace HighFive { +namespace detail { + + +#if !H5_VERSION_GE(1, 12, 0) +inline herr_t h5d_vlen_reclaim(hid_t type_id, hid_t space_id, hid_t dxpl_id, void* buf) { + herr_t err = H5Dvlen_reclaim(type_id, space_id, dxpl_id, buf); + if (err < 0) { + throw DataSetException("Failed to reclaim HDF5 internal memory"); + } + + return err; +} +#endif + +inline hsize_t h5d_get_storage_size(hid_t dset_id) { + // Docs: + // H5Dget_storage_size() does not differentiate between 0 (zero), the + // value returned for the storage size of a dataset with no stored values, + // and 0 (zero), the value returned to indicate an error. + return H5Dget_storage_size(dset_id); +} + +inline hid_t h5d_get_space(hid_t dset_id) { + hid_t dset = H5Dget_space(dset_id); + if (dset == H5I_INVALID_HID) { + HDF5ErrMapper::ToException( + std::string("Unable to get dataspace of the dataset")); + } + + return dset; +} + +inline hid_t h5d_get_type(hid_t dset_id) { + hid_t type_id = H5Dget_type(dset_id); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException( + std::string("Unable to get datatype of the dataset")); + } + + return type_id; +} + +inline herr_t h5d_read(hid_t dset_id, + hid_t mem_type_id, + hid_t mem_space_id, + hid_t file_space_id, + hid_t dxpl_id, + void* buf) { + herr_t err = H5Dread(dset_id, mem_type_id, mem_space_id, file_space_id, dxpl_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to read the dataset")); + } + + return err; +} + +inline herr_t h5d_write(hid_t dset_id, + hid_t mem_type_id, + hid_t mem_space_id, + hid_t file_space_id, + hid_t dxpl_id, + const void* buf) { + herr_t err = H5Dwrite(dset_id, mem_type_id, mem_space_id, file_space_id, dxpl_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to write the dataset")); + } + + return err; +} + +inline haddr_t h5d_get_offset(hid_t dset_id) { + uint64_t addr = H5Dget_offset(dset_id); + if (addr == HADDR_UNDEF) { + HDF5ErrMapper::ToException("Cannot get offset of DataSet."); + } + return addr; +} + + +inline herr_t h5d_set_extent(hid_t dset_id, const hsize_t size[]) { + herr_t err = H5Dset_extent(dset_id, size); + if (H5Dset_extent(dset_id, size) < 0) { + HDF5ErrMapper::ToException("Could not resize dataset."); + } + + return err; +} + +inline hid_t h5d_create2(hid_t loc_id, + const char* name, + hid_t type_id, + hid_t space_id, + hid_t lcpl_id, + hid_t dcpl_id, + hid_t dapl_id) { + hid_t dataset_id = H5Dcreate2(loc_id, name, type_id, space_id, lcpl_id, dcpl_id, dapl_id); + + if (dataset_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException( + std::string("Failed to create the dataset \"") + name + "\":"); + } + + return dataset_id; +} + +inline hid_t h5d_open2(hid_t loc_id, const char* name, hid_t dapl_id) { + hid_t dataset_id = H5Dopen2(loc_id, name, dapl_id); + + if (dataset_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException(std::string("Unable to open the dataset \"") + + name + "\":"); + } + + return dataset_id; +} + + +} // namespace detail +} // namespace HighFive From dfa380157e217721469aa3862bbebad277fd0df2 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 7 Dec 2023 08:08:27 +0100 Subject: [PATCH 12/97] Wrap all used H5S functions. (#886) --- include/highfive/bits/H5Dataspace_misc.hpp | 42 ++----- include/highfive/bits/H5Slice_traits.hpp | 20 ++-- include/highfive/bits/H5Slice_traits_misc.hpp | 8 +- include/highfive/bits/h5s_wrapper.hpp | 106 ++++++++++++++++++ 4 files changed, 127 insertions(+), 49 deletions(-) create mode 100644 include/highfive/bits/h5s_wrapper.hpp diff --git a/include/highfive/bits/H5Dataspace_misc.hpp b/include/highfive/bits/H5Dataspace_misc.hpp index 0fdcacefd..03fb4a950 100644 --- a/include/highfive/bits/H5Dataspace_misc.hpp +++ b/include/highfive/bits/H5Dataspace_misc.hpp @@ -17,6 +17,7 @@ #include "H5Utils.hpp" #include "H5Converter_misc.hpp" +#include "h5s_wrapper.hpp" namespace HighFive { @@ -38,9 +39,7 @@ template inline DataSpace::DataSpace(const IT begin, const IT end) { std::vector real_dims(begin, end); - if ((_hid = H5Screate_simple(int(real_dims.size()), real_dims.data(), NULL)) < 0) { - throw DataSpaceException("Impossible to create dataspace"); - } + _hid = detail::h5s_create_simple(int(real_dims.size()), real_dims.data(), nullptr); } inline DataSpace::DataSpace(const std::vector& dims, const std::vector& maxdims) { @@ -57,10 +56,8 @@ inline DataSpace::DataSpace(const std::vector& dims, const std::vector(DataSpace::UNLIMITED), H5S_UNLIMITED); - if ((_hid = H5Screate_simple(int(dims.size()), real_dims.data(), real_maxdims.data())) < 0) { - throw DataSpaceException("Impossible to create dataspace"); - } -} // namespace HighFive + _hid = detail::h5s_create_simple(int(dims.size()), real_dims.data(), real_maxdims.data()); +} inline DataSpace::DataSpace(DataSpace::DataspaceType space_type) { H5S_class_t h5_dataspace_type; @@ -77,53 +74,34 @@ inline DataSpace::DataSpace(DataSpace::DataspaceType space_type) { "dataspace_scalar or dataspace_null"); } - if ((_hid = H5Screate(h5_dataspace_type)) < 0) { - throw DataSpaceException("Unable to create dataspace"); - } + _hid = detail::h5s_create(h5_dataspace_type); } inline DataSpace DataSpace::clone() const { DataSpace res; - if ((res._hid = H5Scopy(_hid)) < 0) { - throw DataSpaceException("Unable to copy dataspace"); - } + res._hid = detail::h5s_copy(_hid); return res; } inline size_t DataSpace::getNumberDimensions() const { - const int ndim = H5Sget_simple_extent_ndims(_hid); - if (ndim < 0) { - HDF5ErrMapper::ToException( - "Unable to get dataspace number of dimensions"); - } - return size_t(ndim); + return static_cast(detail::h5s_get_simple_extent_ndims(_hid)); } inline std::vector DataSpace::getDimensions() const { std::vector dims(getNumberDimensions()); if (!dims.empty()) { - if (H5Sget_simple_extent_dims(_hid, dims.data(), NULL) < 0) { - HDF5ErrMapper::ToException("Unable to get dataspace dimensions"); - } + detail::h5s_get_simple_extent_dims(_hid, dims.data(), nullptr); } return details::to_vector_size_t(std::move(dims)); } inline size_t DataSpace::getElementCount() const { - hssize_t nelements = H5Sget_simple_extent_npoints(_hid); - if (nelements < 0) { - HDF5ErrMapper::ToException( - "Unable to get number of elements in dataspace"); - } - - return static_cast(nelements); + return static_cast(detail::h5s_get_simple_extent_npoints(_hid)); } inline std::vector DataSpace::getMaxDimensions() const { std::vector maxdims(getNumberDimensions()); - if (H5Sget_simple_extent_dims(_hid, NULL, maxdims.data()) < 0) { - HDF5ErrMapper::ToException("Unable to get dataspace dimensions"); - } + detail::h5s_get_simple_extent_dims(_hid, nullptr, maxdims.data()); std::replace(maxdims.begin(), maxdims.end(), diff --git a/include/highfive/bits/H5Slice_traits.hpp b/include/highfive/bits/H5Slice_traits.hpp index 52c52713f..c753026c3 100644 --- a/include/highfive/bits/H5Slice_traits.hpp +++ b/include/highfive/bits/H5Slice_traits.hpp @@ -15,6 +15,7 @@ #include "H5Utils.hpp" #include "../H5PropertyList.hpp" +#include "h5s_wrapper.hpp" namespace HighFive { @@ -174,19 +175,14 @@ class HyperSlab { auto space = space_.clone(); for (const auto& sel: selects) { if (sel.op == Op::None) { - H5Sselect_none(space.getId()); + detail::h5s_select_none(space.getId()); } else { - auto error_code = - H5Sselect_hyperslab(space.getId(), - convert(sel.op), - sel.offset.empty() ? nullptr : sel.offset.data(), - sel.stride.empty() ? nullptr : sel.stride.data(), - sel.count.empty() ? nullptr : sel.count.data(), - sel.block.empty() ? nullptr : sel.block.data()); - - if (error_code < 0) { - HDF5ErrMapper::ToException("Unable to select hyperslab"); - } + detail::h5s_select_hyperslab(space.getId(), + convert(sel.op), + sel.offset.empty() ? nullptr : sel.offset.data(), + sel.stride.empty() ? nullptr : sel.stride.data(), + sel.count.empty() ? nullptr : sel.count.data(), + sel.block.empty() ? nullptr : sel.block.data()); } } return space; diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 3569b5094..4dfb1ea5f 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -16,7 +16,7 @@ #include #include "h5d_wrapper.hpp" -#include +#include "h5s_wrapper.hpp" #include "H5ReadWrite_misc.hpp" #include "H5Converter_misc.hpp" @@ -84,7 +84,7 @@ inline Selection SliceTraits::select(const HyperSlab& hyper_slab) cons auto filespace = slice.getSpace(); filespace = hyper_slab.apply(filespace); - auto n_elements = H5Sget_select_npoints(filespace.getId()); + auto n_elements = detail::h5s_get_select_npoints(filespace.getId()); auto memspace = DataSpace(std::array{size_t(n_elements)}); return detail::make_selection(memspace, filespace, details::get_dataset(slice)); @@ -149,9 +149,7 @@ inline Selection SliceTraits::select(const ElementSet& elements) const data = raw_elements.data(); } - if (H5Sselect_elements(space.getId(), H5S_SELECT_SET, num_elements, data) < 0) { - HDF5ErrMapper::ToException("Unable to select elements"); - } + detail::h5s_select_elements(space.getId(), H5S_SELECT_SET, num_elements, data); return detail::make_selection(DataSpace(num_elements), space, details::get_dataset(slice)); } diff --git a/include/highfive/bits/h5s_wrapper.hpp b/include/highfive/bits/h5s_wrapper.hpp new file mode 100644 index 000000000..32b872b6e --- /dev/null +++ b/include/highfive/bits/h5s_wrapper.hpp @@ -0,0 +1,106 @@ +#pragma once + +#include +namespace HighFive { +namespace detail { + +inline hid_t h5s_create_simple(int rank, const hsize_t dims[], const hsize_t maxdims[]) { + hid_t space_id = H5Screate_simple(rank, dims, maxdims); + if (space_id == H5I_INVALID_HID) { + throw DataSpaceException("Unable to create simple dataspace"); + } + + return space_id; +} + +inline hid_t h5s_create(H5S_class_t type) { + hid_t space_id = H5Screate(type); + + if (space_id == H5I_INVALID_HID) { + throw DataSpaceException("Unable to create dataspace"); + } + + return space_id; +} + +inline hid_t h5s_copy(hid_t space_id) { + hid_t copy_id = H5Scopy(space_id); + + if (copy_id < 0) { + throw DataSpaceException("Unable to copy dataspace"); + } + + return copy_id; +} + +inline herr_t h5s_select_none(hid_t spaceid) { + herr_t err = H5Sselect_none(spaceid); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to select None space"); + } + return err; +} + +inline herr_t h5s_select_hyperslab(hid_t space_id, + H5S_seloper_t op, + const hsize_t start[], + const hsize_t stride[], + const hsize_t count[], + const hsize_t block[]) { + herr_t err = H5Sselect_hyperslab(space_id, op, start, stride, count, block); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to select hyperslab"); + } + return err; +} + +inline hssize_t h5s_get_select_npoints(hid_t spaceid) { + hssize_t n_points = H5Sget_select_npoints(spaceid); + if (n_points < 0) { + HDF5ErrMapper::ToException( + "Unable to get number of points in selection"); + } + return n_points; +} + +inline herr_t h5s_select_elements(hid_t space_id, + H5S_seloper_t op, + size_t num_elem, + const hsize_t* coord) { + herr_t err = H5Sselect_elements(space_id, op, num_elem, coord); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to select elements"); + } + return err; +} + +inline int h5s_get_simple_extent_ndims(hid_t space_id) { + int ndim = H5Sget_simple_extent_ndims(space_id); + if (ndim < 0) { + HDF5ErrMapper::ToException( + "Unable to get number of dimensions of dataspace"); + } + return ndim; +} + +inline herr_t h5s_get_simple_extent_dims(hid_t space_id, hsize_t dims[], hsize_t maxdims[]) { + herr_t err = H5Sget_simple_extent_dims(space_id, dims, maxdims); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to get dimensions of dataspace"); + } + return err; +} + +inline hssize_t h5s_get_simple_extent_npoints(hid_t space_id) { + hssize_t nelements = H5Sget_simple_extent_npoints(space_id); + if (nelements < 0) { + HDF5ErrMapper::ToException( + "Unable to get number of elements in dataspace"); + } + + return nelements; +} + + +} // namespace detail +} // namespace HighFive From f419ae3486de4e156f46fa0622c5988f88401bcf Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 7 Dec 2023 11:09:51 +0100 Subject: [PATCH 13/97] Wrap all used H5P functions. (#887) --- include/highfive/bits/H5PropertyList_misc.hpp | 165 +++----- include/highfive/bits/h5p_wrapper.hpp | 375 ++++++++++++++++++ 2 files changed, 420 insertions(+), 120 deletions(-) create mode 100644 include/highfive/bits/h5p_wrapper.hpp diff --git a/include/highfive/bits/H5PropertyList_misc.hpp b/include/highfive/bits/H5PropertyList_misc.hpp index cef301e53..55aabe58f 100644 --- a/include/highfive/bits/H5PropertyList_misc.hpp +++ b/include/highfive/bits/H5PropertyList_misc.hpp @@ -8,7 +8,7 @@ */ #pragma once -#include +#include "h5p_wrapper.hpp" namespace HighFive { @@ -64,9 +64,7 @@ inline void PropertyList::_initializeIfNeeded() { if (_hid != H5P_DEFAULT) { return; } - if ((_hid = H5Pcreate(convert_plist_type(T))) < 0) { - HDF5ErrMapper::ToException("Unable to create property list"); - } + _hid = detail::h5p_create(convert_plist_type(T)); } template @@ -95,15 +93,11 @@ inline FileSpaceStrategy::FileSpaceStrategy(H5F_fspace_strategy_t strategy, , _threshold(threshold) {} inline FileSpaceStrategy::FileSpaceStrategy(const FileCreateProps& fcpl) { - if (H5Pget_file_space_strategy(fcpl.getId(), &_strategy, &_persist, &_threshold) < 0) { - HDF5ErrMapper::ToException("Unable to get file space strategy"); - } + detail::h5p_get_file_space_strategy(fcpl.getId(), &_strategy, &_persist, &_threshold); } inline void FileSpaceStrategy::apply(const hid_t list) const { - if (H5Pset_file_space_strategy(list, _strategy, _persist, _threshold) < 0) { - HDF5ErrMapper::ToException("Error setting file space strategy."); - } + detail::h5p_set_file_space_strategy(list, _strategy, _persist, _threshold); } inline H5F_fspace_strategy_t FileSpaceStrategy::getStrategy() const { @@ -122,15 +116,11 @@ inline FileSpacePageSize::FileSpacePageSize(hsize_t page_size) : _page_size(page_size) {} inline void FileSpacePageSize::apply(const hid_t list) const { - if (H5Pset_file_space_page_size(list, _page_size) < 0) { - HDF5ErrMapper::ToException("Error setting file space page size."); - } + detail::h5p_set_file_space_page_size(list, _page_size); } inline FileSpacePageSize::FileSpacePageSize(const FileCreateProps& fcpl) { - if (H5Pget_file_space_page_size(fcpl.getId(), &_page_size) < 0) { - HDF5ErrMapper::ToException("Unable to get file space page size"); - } + detail::h5p_get_file_space_page_size(fcpl.getId(), &_page_size); } inline hsize_t FileSpacePageSize::getPageSize() const { @@ -146,15 +136,11 @@ inline PageBufferSize::PageBufferSize(size_t page_buffer_size, , _min_raw(min_raw_percent) {} inline PageBufferSize::PageBufferSize(const FileAccessProps& plist) { - if (H5Pget_page_buffer_size(plist.getId(), &_page_buffer_size, &_min_meta, &_min_raw) < 0) { - HDF5ErrMapper::ToException("Error setting page buffer size."); - } + detail::h5p_get_page_buffer_size(plist.getId(), &_page_buffer_size, &_min_meta, &_min_raw); } inline void PageBufferSize::apply(const hid_t list) const { - if (H5Pset_page_buffer_size(list, _page_buffer_size, _min_meta, _min_raw) < 0) { - HDF5ErrMapper::ToException("Error setting page buffer size."); - } + detail::h5p_set_page_buffer_size(list, _page_buffer_size, _min_meta, _min_raw); } inline size_t PageBufferSize::getPageBufferSize() const { @@ -178,9 +164,7 @@ inline MPIOFileAccess::MPIOFileAccess(MPI_Comm comm, MPI_Info info) , _info(info) {} inline void MPIOFileAccess::apply(const hid_t list) const { - if (H5Pset_fapl_mpio(list, _comm, _info) < 0) { - HDF5ErrMapper::ToException("Unable to set-up MPIO Driver configuration"); - } + detail::h5p_set_fapl_mpio(list, _comm, _info); } inline void MPIOCollectiveMetadata::apply(const hid_t plist) const { @@ -210,9 +194,7 @@ inline bool MPIOCollectiveMetadata::isCollectiveWrite() const { inline void MPIOCollectiveMetadataRead::apply(const hid_t plist) const { - if (H5Pset_all_coll_metadata_ops(plist, collective_) < 0) { - HDF5ErrMapper::ToException("Unable to request collective metadata reads"); - } + detail::h5p_set_all_coll_metadata_ops(plist, collective_); } inline bool MPIOCollectiveMetadataRead::isCollective() const { @@ -220,18 +202,14 @@ inline bool MPIOCollectiveMetadataRead::isCollective() const { } inline MPIOCollectiveMetadataRead::MPIOCollectiveMetadataRead(const FileAccessProps& plist) { - if (H5Pget_all_coll_metadata_ops(plist.getId(), &collective_) < 0) { - HDF5ErrMapper::ToException("Error loading MPI metadata read."); - } + detail::h5p_get_all_coll_metadata_ops(plist.getId(), &collective_); } inline MPIOCollectiveMetadataRead::MPIOCollectiveMetadataRead(bool collective) : collective_(collective) {} inline void MPIOCollectiveMetadataWrite::apply(const hid_t plist) const { - if (H5Pset_coll_metadata_write(plist, collective_) < 0) { - HDF5ErrMapper::ToException("Unable to request collective metadata writes"); - } + detail::h5p_set_coll_metadata_write(plist, collective_); } inline bool MPIOCollectiveMetadataWrite::isCollective() const { @@ -239,9 +217,7 @@ inline bool MPIOCollectiveMetadataWrite::isCollective() const { } inline MPIOCollectiveMetadataWrite::MPIOCollectiveMetadataWrite(const FileAccessProps& plist) { - if (H5Pget_coll_metadata_write(plist.getId(), &collective_) < 0) { - HDF5ErrMapper::ToException("Error loading MPI metadata write."); - } + detail::h5p_get_coll_metadata_write(plist.getId(), &collective_); } inline MPIOCollectiveMetadataWrite::MPIOCollectiveMetadataWrite(bool collective) @@ -254,9 +230,7 @@ inline FileVersionBounds::FileVersionBounds(H5F_libver_t low, H5F_libver_t high) , _high(high) {} inline FileVersionBounds::FileVersionBounds(const FileAccessProps& fapl) { - if (H5Pget_libver_bounds(fapl.getId(), &_low, &_high) < 0) { - HDF5ErrMapper::ToException("Unable to access file version bounds"); - } + detail::h5p_get_libver_bounds(fapl.getId(), &_low, &_high); } inline std::pair FileVersionBounds::getVersion() const { @@ -264,24 +238,18 @@ inline std::pair FileVersionBounds::getVersion() con } inline void FileVersionBounds::apply(const hid_t list) const { - if (H5Pset_libver_bounds(list, _low, _high) < 0) { - HDF5ErrMapper::ToException("Error setting file version bounds"); - } + detail::h5p_set_libver_bounds(list, _low, _high); } inline MetadataBlockSize::MetadataBlockSize(hsize_t size) : _size(size) {} inline MetadataBlockSize::MetadataBlockSize(const FileAccessProps& fapl) { - if (H5Pget_meta_block_size(fapl.getId(), &_size) < 0) { - HDF5ErrMapper::ToException("Unable to access file metadata block size"); - } + detail::h5p_get_meta_block_size(fapl.getId(), &_size); } inline void MetadataBlockSize::apply(const hid_t list) const { - if (H5Pset_meta_block_size(list, _size) < 0) { - HDF5ErrMapper::ToException("Error setting metadata block size"); - } + detail::h5p_set_meta_block_size(list, _size); } inline hsize_t MetadataBlockSize::getSize() const { @@ -289,9 +257,7 @@ inline hsize_t MetadataBlockSize::getSize() const { } inline void EstimatedLinkInfo::apply(const hid_t hid) const { - if (H5Pset_est_link_info(hid, _entries, _length) < 0) { - HDF5ErrMapper::ToException("Error setting estimated link info"); - } + detail::h5p_set_est_link_info(hid, _entries, _length); } inline EstimatedLinkInfo::EstimatedLinkInfo(unsigned entries, unsigned length) @@ -299,9 +265,7 @@ inline EstimatedLinkInfo::EstimatedLinkInfo(unsigned entries, unsigned length) , _length(length) {} inline EstimatedLinkInfo::EstimatedLinkInfo(const GroupCreateProps& gcpl) { - if (H5Pget_est_link_info(gcpl.getId(), &_entries, &_length) < 0) { - HDF5ErrMapper::ToException("Unable to access group link size property"); - } + detail::h5p_get_est_link_info(gcpl.getId(), &_entries, &_length); } inline unsigned EstimatedLinkInfo::getEntries() const { @@ -313,9 +277,7 @@ inline unsigned EstimatedLinkInfo::getNameLength() const { } inline void Chunking::apply(const hid_t hid) const { - if (H5Pset_chunk(hid, static_cast(_dims.size()), _dims.data()) < 0) { - HDF5ErrMapper::ToException("Error setting chunk property"); - } + detail::h5p_set_chunk(hid, static_cast(_dims.size()), _dims.data()); } inline Chunking::Chunking(const std::vector& dims) @@ -326,10 +288,8 @@ inline Chunking::Chunking(const std::initializer_list& items) inline Chunking::Chunking(DataSetCreateProps& plist, size_t max_dims) : _dims(max_dims + 1) { - auto n_loaded = H5Pget_chunk(plist.getId(), static_cast(_dims.size()), _dims.data()); - if (n_loaded < 0) { - HDF5ErrMapper::ToException("Error getting chunk size"); - } + auto n_loaded = + detail::h5p_get_chunk(plist.getId(), static_cast(_dims.size()), _dims.data()); if (n_loaded >= static_cast(_dims.size())) { *this = Chunking(plist, 8 * max_dims); @@ -347,22 +307,22 @@ inline Chunking::Chunking(hsize_t item, Args... args) : Chunking(std::vector{item, static_cast(args)...}) {} inline void Deflate::apply(const hid_t hid) const { - if (!H5Zfilter_avail(H5Z_FILTER_DEFLATE) || H5Pset_deflate(hid, _level) < 0) { - HDF5ErrMapper::ToException("Error setting deflate property"); + if (detail::h5z_filter_avail(H5Z_FILTER_DEFLATE) == 0) { + HDF5ErrMapper::ToException("Deflate filter unavailable."); } + + detail::h5p_set_deflate(hid, _level); } inline Deflate::Deflate(unsigned int level) : _level(level) {} inline void Szip::apply(const hid_t hid) const { - if (!H5Zfilter_avail(H5Z_FILTER_SZIP)) { - HDF5ErrMapper::ToException("Error setting szip property"); + if (detail::h5z_filter_avail(H5Z_FILTER_SZIP) == 0) { + HDF5ErrMapper::ToException("SZIP filter unavailable."); } - if (H5Pset_szip(hid, _options_mask, _pixels_per_block) < 0) { - HDF5ErrMapper::ToException("Error setting szip property"); - } + detail::h5p_set_szip(hid, _options_mask, _pixels_per_block); } inline Szip::Szip(unsigned int options_mask, unsigned int pixels_per_block) @@ -378,28 +338,22 @@ inline unsigned Szip::getPixelsPerBlock() const { } inline void Shuffle::apply(const hid_t hid) const { - if (!H5Zfilter_avail(H5Z_FILTER_SHUFFLE)) { - HDF5ErrMapper::ToException("Error setting shuffle property"); + if (detail::h5z_filter_avail(H5Z_FILTER_SHUFFLE) == 0) { + HDF5ErrMapper::ToException("Shuffle filter unavailable."); } - if (H5Pset_shuffle(hid) < 0) { - HDF5ErrMapper::ToException("Error setting shuffle property"); - } + detail::h5p_set_shuffle(hid); } inline AllocationTime::AllocationTime(H5D_alloc_time_t alloc_time) : _alloc_time(alloc_time) {} inline AllocationTime::AllocationTime(const DataSetCreateProps& dcpl) { - if (H5Pget_alloc_time(dcpl.getId(), &_alloc_time) < 0) { - HDF5ErrMapper::ToException("Error getting allocation time"); - } + detail::h5p_get_alloc_time(dcpl.getId(), &_alloc_time); } inline void AllocationTime::apply(hid_t dcpl) const { - if (H5Pset_alloc_time(dcpl, _alloc_time) < 0) { - HDF5ErrMapper::ToException("Error setting allocation time"); - } + detail::h5p_set_alloc_time(dcpl, _alloc_time); } inline H5D_alloc_time_t AllocationTime::getAllocationTime() { @@ -407,15 +361,11 @@ inline H5D_alloc_time_t AllocationTime::getAllocationTime() { } inline Caching::Caching(const DataSetCreateProps& dcpl) { - if (H5Pget_chunk_cache(dcpl.getId(), &_numSlots, &_cacheSize, &_w0) < 0) { - HDF5ErrMapper::ToException("Error getting dataset cache parameters"); - } + detail::h5p_get_chunk_cache(dcpl.getId(), &_numSlots, &_cacheSize, &_w0); } inline void Caching::apply(const hid_t hid) const { - if (H5Pset_chunk_cache(hid, _numSlots, _cacheSize, _w0) < 0) { - HDF5ErrMapper::ToException("Error setting dataset cache parameters"); - } + detail::h5p_set_chunk_cache(hid, _numSlots, _cacheSize, _w0); } inline Caching::Caching(const size_t numSlots, const size_t cacheSize, const double w0) @@ -444,10 +394,7 @@ inline CreateIntermediateGroup::CreateIntermediateGroup(const ObjectCreateProps& inline void CreateIntermediateGroup::apply(const hid_t hid) const { - if (H5Pset_create_intermediate_group(hid, _create ? 1 : 0) < 0) { - HDF5ErrMapper::ToException( - "Error setting property for create intermediate groups"); - } + detail::h5p_set_create_intermediate_group(hid, _create ? 1 : 0); } inline CreateIntermediateGroup::CreateIntermediateGroup(const LinkCreateProps& lcpl) { @@ -456,12 +403,7 @@ inline CreateIntermediateGroup::CreateIntermediateGroup(const LinkCreateProps& l inline void CreateIntermediateGroup::fromPropertyList(hid_t hid) { unsigned c_bool = 0; - if (H5Pget_create_intermediate_group(hid, &c_bool) < 0) { - HDF5ErrMapper::ToException( - "Error getting property for create intermediate groups"); - } - - _create = bool(c_bool); + _create = bool(detail::h5p_get_create_intermediate_group(hid, &c_bool)); } inline bool CreateIntermediateGroup::isSet() const { @@ -473,17 +415,13 @@ inline UseCollectiveIO::UseCollectiveIO(bool enable) : _enable(enable) {} inline void UseCollectiveIO::apply(const hid_t hid) const { - if (H5Pset_dxpl_mpio(hid, _enable ? H5FD_MPIO_COLLECTIVE : H5FD_MPIO_INDEPENDENT) < 0) { - HDF5ErrMapper::ToException("Error setting H5Pset_dxpl_mpio."); - } + detail::h5p_set_dxpl_mpio(hid, _enable ? H5FD_MPIO_COLLECTIVE : H5FD_MPIO_INDEPENDENT); } inline UseCollectiveIO::UseCollectiveIO(const DataTransferProps& dxpl) { H5FD_mpio_xfer_t collective; - if (H5Pget_dxpl_mpio(dxpl.getId(), &collective) < 0) { - HDF5ErrMapper::ToException("Error getting H5Pset_dxpl_mpio."); - } + detail::h5p_get_dxpl_mpio(dxpl.getId(), &collective); if (collective != H5FD_MPIO_COLLECTIVE && collective != H5FD_MPIO_INDEPENDENT) { throw std::logic_error("H5Pget_dxpl_mpio returned something strange."); @@ -497,9 +435,7 @@ inline bool UseCollectiveIO::isCollective() const { } inline MpioNoCollectiveCause::MpioNoCollectiveCause(const DataTransferProps& dxpl) { - if (H5Pget_mpio_no_collective_cause(dxpl.getId(), &_local_cause, &_global_cause) < 0) { - HDF5ErrMapper::ToException("Failed to check mpio_no_collective_cause."); - } + detail::h5p_get_mpio_no_collective_cause(dxpl.getId(), &_local_cause, &_global_cause); } inline bool MpioNoCollectiveCause::wasCollective() const { @@ -532,16 +468,11 @@ inline unsigned LinkCreationOrder::getFlags() const { } inline void LinkCreationOrder::apply(const hid_t hid) const { - if (H5Pset_link_creation_order(hid, _flags) < 0) { - HDF5ErrMapper::ToException("Error setting LinkCreationOrder."); - } + detail::h5p_set_link_creation_order(hid, _flags); } inline void LinkCreationOrder::fromPropertyList(hid_t hid) { - if (H5Pget_link_creation_order(hid, &_flags) < 0) { - HDF5ErrMapper::ToException( - "Error getting property for link creation order"); - } + detail::h5p_get_link_creation_order(hid, &_flags); } inline AttributePhaseChange::AttributePhaseChange(unsigned max_compact, unsigned min_dense) @@ -549,10 +480,7 @@ inline AttributePhaseChange::AttributePhaseChange(unsigned max_compact, unsigned , _min_dense(min_dense) {} inline AttributePhaseChange::AttributePhaseChange(const GroupCreateProps& gcpl) { - if (H5Pget_attr_phase_change(gcpl.getId(), &_max_compact, &_min_dense) < 0) { - HDF5ErrMapper::ToException( - "Error getting property for attribute phase change"); - } + detail::h5p_get_attr_phase_change(gcpl.getId(), &_max_compact, &_min_dense); } inline unsigned AttributePhaseChange::max_compact() const { @@ -564,10 +492,7 @@ inline unsigned AttributePhaseChange::min_dense() const { } inline void AttributePhaseChange::apply(hid_t hid) const { - if (H5Pset_attr_phase_change(hid, _max_compact, _min_dense) < 0) { - HDF5ErrMapper::ToException( - "Error getting property for attribute phase change"); - } + detail::h5p_set_attr_phase_change(hid, _max_compact, _min_dense); } diff --git a/include/highfive/bits/h5p_wrapper.hpp b/include/highfive/bits/h5p_wrapper.hpp new file mode 100644 index 000000000..821c2fbcd --- /dev/null +++ b/include/highfive/bits/h5p_wrapper.hpp @@ -0,0 +1,375 @@ +#pragma once + +#include +#include + +namespace HighFive { +namespace detail { +inline hid_t h5p_create(hid_t cls_id) { + hid_t plist_id = H5Pcreate(cls_id); + if (plist_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException("Failed to create property list"); + } + + return plist_id; +} + +#if H5_VERSION_GE(1, 10, 1) +inline herr_t h5p_set_file_space_strategy(hid_t plist_id, + H5F_fspace_strategy_t strategy, + hbool_t persist, + hsize_t threshold) { + herr_t err = H5Pset_file_space_strategy(plist_id, strategy, persist, threshold); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to get file space strategy"); + } + + return err; +} + +inline herr_t h5p_get_file_space_strategy(hid_t plist_id, + H5F_fspace_strategy_t* strategy, + hbool_t* persist, + hsize_t* threshold) { + herr_t err = H5Pget_file_space_strategy(plist_id, strategy, persist, threshold); + if (err) { + HDF5ErrMapper::ToException("Error setting file space strategy."); + } + + return err; +} + +inline herr_t h5p_set_file_space_page_size(hid_t plist_id, hsize_t fsp_size) { + herr_t err = H5Pset_file_space_page_size(plist_id, fsp_size); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting file space page size."); + } + + return err; +} + +inline herr_t h5p_get_file_space_page_size(hid_t plist_id, hsize_t* fsp_size) { + herr_t err = H5Pget_file_space_page_size(plist_id, fsp_size); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to get file space page size"); + } + + return err; +} + +#ifndef H5_HAVE_PARALLEL +inline herr_t h5p_get_page_buffer_size(hid_t plist_id, + size_t* buf_size, + unsigned* min_meta_perc, + unsigned* min_raw_perc) { + herr_t err = H5Pget_page_buffer_size(plist_id, buf_size, min_meta_perc, min_raw_perc); + + if (err < 0) { + HDF5ErrMapper::ToException("Error setting page buffer size."); + } + + return err; +} + +inline herr_t h5p_set_page_buffer_size(hid_t plist_id, + size_t buf_size, + unsigned min_meta_per, + unsigned min_raw_per) { + herr_t err = H5Pset_page_buffer_size(plist_id, buf_size, min_meta_per, min_raw_per); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting page buffer size."); + } + + return err; +} +#endif +#endif + +#ifdef H5_HAVE_PARALLEL +inline herr_t h5p_set_fapl_mpio(hid_t fapl_id, MPI_Comm comm, MPI_Info info) { + herr_t err = H5Pset_fapl_mpio(fapl_id, comm, info); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to set-up MPIO Driver configuration"); + } + + return err; +} + +inline herr_t h5p_set_all_coll_metadata_ops(hid_t plist_id, hbool_t is_collective) { + herr_t err = H5Pset_all_coll_metadata_ops(plist_id, is_collective); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to request collective metadata reads"); + } + + return err; +} + +inline herr_t h5p_get_all_coll_metadata_ops(hid_t plist_id, hbool_t* is_collective) { + herr_t err = H5Pget_all_coll_metadata_ops(plist_id, is_collective); + if (err < 0) { + HDF5ErrMapper::ToException("Error loading MPI metadata read."); + } + + return err; +} + +inline herr_t h5p_set_coll_metadata_write(hid_t plist_id, hbool_t is_collective) { + herr_t err = H5Pset_coll_metadata_write(plist_id, is_collective); + + if (err < 0) { + HDF5ErrMapper::ToException("Unable to request collective metadata writes"); + } + + return err; +} + +inline herr_t h5p_get_coll_metadata_write(hid_t plist_id, hbool_t* is_collective) { + herr_t err = H5Pget_coll_metadata_write(plist_id, is_collective); + + if (err < 0) { + HDF5ErrMapper::ToException("Error loading MPI metadata write."); + } + + return err; +} + +#endif + +inline herr_t h5p_get_libver_bounds(hid_t plist_id, H5F_libver_t* low, H5F_libver_t* high) { + herr_t err = H5Pget_libver_bounds(plist_id, low, high); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to access file version bounds"); + } + + return err; +} + +inline herr_t h5p_set_libver_bounds(hid_t plist_id, H5F_libver_t low, H5F_libver_t high) { + herr_t err = H5Pset_libver_bounds(plist_id, low, high); + + if (err < 0) { + HDF5ErrMapper::ToException("Error setting file version bounds"); + } + + return err; +} + +inline herr_t h5p_get_meta_block_size(hid_t fapl_id, hsize_t* size) { + herr_t err = H5Pget_meta_block_size(fapl_id, size); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to access file metadata block size"); + } + + return err; +} + +inline herr_t h5p_set_meta_block_size(hid_t fapl_id, hsize_t size) { + herr_t err = H5Pset_meta_block_size(fapl_id, size); + + if (err < 0) { + HDF5ErrMapper::ToException("Error setting metadata block size"); + } + + return err; +} + +inline herr_t h5p_set_est_link_info(hid_t plist_id, + unsigned est_num_entries, + unsigned est_name_len) { + herr_t err = H5Pset_est_link_info(plist_id, est_num_entries, est_name_len); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting estimated link info"); + } + + return err; +} + +inline herr_t h5p_get_est_link_info(hid_t plist_id, + unsigned* est_num_entries, + unsigned* est_name_len) { + herr_t err = H5Pget_est_link_info(plist_id, est_num_entries, est_name_len); + + if (err < 0) { + HDF5ErrMapper::ToException("Unable to access group link size property"); + } + + return err; +} + +inline herr_t h5p_set_chunk(hid_t plist_id, int ndims, const hsize_t dim[]) { + herr_t err = H5Pset_chunk(plist_id, ndims, dim); + + if (err < 0) { + HDF5ErrMapper::ToException("Error setting chunk property"); + } + + return err; +} + +inline int h5p_get_chunk(hid_t plist_id, int max_ndims, hsize_t dim[]) { + int chunk_dims = H5Pget_chunk(plist_id, max_ndims, dim); + if (chunk_dims < 0) { + HDF5ErrMapper::ToException("Error getting chunk size"); + } + return chunk_dims; +} + +inline htri_t h5z_filter_avail(H5Z_filter_t id) { + htri_t tri = H5Zfilter_avail(id); + if (tri < 0) { + HDF5ErrMapper::ToException("Error checking filter availability"); + } + return tri; +} + +inline herr_t h5p_set_deflate(hid_t plist_id, unsigned level) { + herr_t err = H5Pset_deflate(plist_id, level); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting deflate property"); + } + return err; +} + +inline herr_t h5p_set_szip(hid_t plist_id, unsigned options_mask, unsigned pixels_per_block) { + herr_t err = H5Pset_szip(plist_id, options_mask, pixels_per_block); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting szip property"); + } + return err; +} + +inline herr_t h5p_set_shuffle(hid_t plist_id) { + herr_t err = H5Pset_shuffle(plist_id); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting shuffle property"); + } + return err; +} + +inline herr_t h5p_get_alloc_time(hid_t plist_id, H5D_alloc_time_t* alloc_time) { + herr_t err = H5Pget_alloc_time(plist_id, alloc_time); + if (err < 0) { + HDF5ErrMapper::ToException("Error getting allocation time"); + } + return err; +} + +inline herr_t h5p_set_alloc_time(hid_t plist_id, H5D_alloc_time_t alloc_time) { + herr_t err = H5Pset_alloc_time(plist_id, alloc_time); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting allocation time"); + } + return err; +} + +inline herr_t h5p_get_chunk_cache(hid_t dapl_id, + size_t* rdcc_nslots, + size_t* rdcc_nbytes, + double* rdcc_w0) { + herr_t err = H5Pget_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0); + if (err < 0) { + HDF5ErrMapper::ToException("Error getting dataset cache parameters"); + } + return err; +} + +inline herr_t h5p_set_chunk_cache(hid_t dapl_id, + size_t rdcc_nslots, + size_t rdcc_nbytes, + double rdcc_w0) { + herr_t err = H5Pset_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting dataset cache parameters"); + } + return err; +} + +inline herr_t h5p_set_create_intermediate_group(hid_t plist_id, unsigned crt_intmd) { + herr_t err = H5Pset_create_intermediate_group(plist_id, crt_intmd); + if (err < 0) { + HDF5ErrMapper::ToException( + "Error setting property for create intermediate groups"); + } + return err; +} + +inline herr_t h5p_get_create_intermediate_group(hid_t plist_id, unsigned* crt_intmd) { + herr_t err = H5Pget_create_intermediate_group(plist_id, crt_intmd); + if (err < 0) { + HDF5ErrMapper::ToException( + "Error getting property for create intermediate groups"); + } + return err; +} + +#ifdef H5_HAVE_PARALLEL +inline herr_t h5p_set_dxpl_mpio(hid_t dxpl_id, H5FD_mpio_xfer_t xfer_mode) { + herr_t err = H5Pset_dxpl_mpio(dxpl_id, xfer_mode); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting H5Pset_dxpl_mpio."); + } + return err; +} + +inline herr_t h5p_get_dxpl_mpio(hid_t dxpl_id, H5FD_mpio_xfer_t* xfer_mode) { + herr_t err = H5Pget_dxpl_mpio(dxpl_id, xfer_mode); + if (err < 0) { + HDF5ErrMapper::ToException("Error getting H5Pset_dxpl_mpio."); + } + return err; +} + +inline herr_t h5p_get_mpio_no_collective_cause(hid_t plist_id, + uint32_t* local_no_collective_cause, + uint32_t* global_no_collective_cause) { + herr_t err = H5Pget_mpio_no_collective_cause(plist_id, + local_no_collective_cause, + global_no_collective_cause); + if (err < 0) { + HDF5ErrMapper::ToException("Failed to check mpio_no_collective_cause."); + } + return err; +} + +#endif + +inline herr_t h5p_set_link_creation_order(hid_t plist_id, unsigned crt_order_flags) { + herr_t err = H5Pset_link_creation_order(plist_id, crt_order_flags); + if (err < 0) { + HDF5ErrMapper::ToException("Error setting LinkCreationOrder."); + } + return err; +} + +inline herr_t h5p_get_link_creation_order(hid_t plist_id, unsigned* crt_order_flags) { + herr_t err = H5Pget_link_creation_order(plist_id, crt_order_flags); + if (err < 0) { + HDF5ErrMapper::ToException( + "Error getting property for link creation order"); + } + return err; +} + +inline herr_t h5p_get_attr_phase_change(hid_t plist_id, + unsigned* max_compact, + unsigned* min_dense) { + herr_t err = H5Pget_attr_phase_change(plist_id, max_compact, min_dense); + if (err < 0) { + HDF5ErrMapper::ToException( + "Error getting property for attribute phase change"); + } + return err; +} + +inline herr_t h5p_set_attr_phase_change(hid_t plist_id, unsigned max_compact, unsigned min_dense) { + herr_t err = H5Pset_attr_phase_change(plist_id, max_compact, min_dense); + if (err < 0) { + HDF5ErrMapper::ToException( + "Error getting property for attribute phase change"); + } + return err; +} + + +} // namespace detail +} // namespace HighFive From c83f9505217a8083092ff6e019268cff61371b68 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 8 Dec 2023 10:08:01 +0100 Subject: [PATCH 14/97] Wrap all used H5{L,G} functions. (#888) --- include/highfive/bits/H5Node_traits_misc.hpp | 170 ++++++++----------- include/highfive/bits/h5g_wrapper.hpp | 46 +++++ include/highfive/bits/h5l_wrapper.hpp | 132 ++++++++++++++ 3 files changed, 247 insertions(+), 101 deletions(-) create mode 100644 include/highfive/bits/h5g_wrapper.hpp create mode 100644 include/highfive/bits/h5l_wrapper.hpp diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index 2cbda126a..fb7f0f06f 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -13,7 +13,6 @@ #include #include -#include #include #include @@ -26,6 +25,10 @@ #include "H5Selection_misc.hpp" #include "H5Slice_traits_misc.hpp" +#include "h5l_wrapper.hpp" +#include "h5g_wrapper.hpp" + + namespace HighFive { @@ -122,16 +125,11 @@ template inline Group NodeTraits::createGroup(const std::string& group_name, bool parents) { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - const auto hid = H5Gcreate2(static_cast(this)->getId(), - group_name.c_str(), - lcpl.getId(), - H5P_DEFAULT, - H5P_DEFAULT); - if (hid < 0) { - HDF5ErrMapper::ToException(std::string("Unable to create the group \"") + - group_name + "\":"); - } - return detail::make_group(hid); + return detail::make_group(detail::h5g_create2(static_cast(this)->getId(), + group_name.c_str(), + lcpl.getId(), + H5P_DEFAULT, + H5P_DEFAULT)); } template @@ -140,27 +138,18 @@ inline Group NodeTraits::createGroup(const std::string& group_name, bool parents) { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - const auto hid = H5Gcreate2(static_cast(this)->getId(), - group_name.c_str(), - lcpl.getId(), - createProps.getId(), - H5P_DEFAULT); - if (hid < 0) { - HDF5ErrMapper::ToException(std::string("Unable to create the group \"") + - group_name + "\":"); - } - return detail::make_group(hid); + return detail::make_group(detail::h5g_create2(static_cast(this)->getId(), + group_name.c_str(), + lcpl.getId(), + createProps.getId(), + H5P_DEFAULT)); } template inline Group NodeTraits::getGroup(const std::string& group_name) const { - const auto hid = - H5Gopen2(static_cast(this)->getId(), group_name.c_str(), H5P_DEFAULT); - if (hid < 0) { - HDF5ErrMapper::ToException(std::string("Unable to open the group \"") + - group_name + "\":"); - } - return detail::make_group(hid); + return detail::make_group(detail::h5g_open2(static_cast(this)->getId(), + group_name.c_str(), + H5P_DEFAULT)); } template @@ -174,24 +163,21 @@ inline DataType NodeTraits::getDataType(const std::string& type_name, template inline size_t NodeTraits::getNumberObjects() const { hsize_t res; - if (H5Gget_num_objs(static_cast(this)->getId(), &res) < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to count objects in existing group or file")); - } + detail::h5g_get_num_objs(static_cast(this)->getId(), &res); return static_cast(res); } template inline std::string NodeTraits::getObjectName(size_t index) const { return details::get_name([&](char* buffer, size_t length) { - return H5Lget_name_by_idx(static_cast(this)->getId(), - ".", - H5_INDEX_NAME, - H5_ITER_INC, - index, - buffer, - length, - H5P_DEFAULT); + return detail::h5l_get_name_by_idx(static_cast(this)->getId(), + ".", + H5_INDEX_NAME, + H5_ITER_INC, + index, + buffer, + length, + H5P_DEFAULT); }); } @@ -201,18 +187,14 @@ inline bool NodeTraits::rename(const std::string& src_path, bool parents) const { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - herr_t status = H5Lmove(static_cast(this)->getId(), - src_path.c_str(), - static_cast(this)->getId(), - dst_path.c_str(), - lcpl.getId(), - H5P_DEFAULT); - if (status < 0) { - HDF5ErrMapper::ToException(std::string("Unable to move link to \"") + - dst_path + "\":"); - return false; - } - return true; + herr_t err = detail::h5l_move(static_cast(this)->getId(), + src_path.c_str(), + static_cast(this)->getId(), + dst_path.c_str(), + lcpl.getId(), + H5P_DEFAULT); + + return err >= 0; } template @@ -223,23 +205,21 @@ inline std::vector NodeTraits::listObjectNames(IndexType size_t num_objs = getNumberObjects(); names.reserve(num_objs); - if (H5Literate(static_cast(this)->getId(), - static_cast(idx_type), - H5_ITER_INC, - NULL, - &details::internal_high_five_iterate, - static_cast(&iterateData)) < 0) { - HDF5ErrMapper::ToException(std::string("Unable to list objects in group")); - } - + detail::h5l_iterate(static_cast(this)->getId(), + static_cast(idx_type), + H5_ITER_INC, + NULL, + &details::internal_high_five_iterate, + static_cast(&iterateData)); return names; } template inline bool NodeTraits::_exist(const std::string& node_name, bool raise_errors) const { SilenceHDF5 silencer{}; - const auto val = - H5Lexists(static_cast(this)->getId(), node_name.c_str(), H5P_DEFAULT); + const auto val = detail::nothrow::h5l_exists(static_cast(this)->getId(), + node_name.c_str(), + H5P_DEFAULT); if (val < 0) { if (raise_errors) { HDF5ErrMapper::ToException("Invalid link for exist()"); @@ -269,11 +249,7 @@ inline bool NodeTraits::exist(const std::string& group_path) const { template inline void NodeTraits::unlink(const std::string& node_name) const { - const herr_t val = - H5Ldelete(static_cast(this)->getId(), node_name.c_str(), H5P_DEFAULT); - if (val < 0) { - HDF5ErrMapper::ToException(std::string("Invalid name for unlink() ")); - } + detail::h5l_delete(static_cast(this)->getId(), node_name.c_str(), H5P_DEFAULT); } @@ -297,13 +273,14 @@ static inline LinkType _convert_link_type(const H5L_type_t& ltype) noexcept { template inline LinkType NodeTraits::getLinkType(const std::string& node_name) const { H5L_info_t linkinfo; - if (H5Lget_info(static_cast(this)->getId(), - node_name.c_str(), - &linkinfo, - H5P_DEFAULT) < 0 || - linkinfo.type == H5L_TYPE_ERROR) { - HDF5ErrMapper::ToException(std::string("Unable to obtain info for link ") + - node_name); + detail::h5l_get_info(static_cast(this)->getId(), + node_name.c_str(), + &linkinfo, + H5P_DEFAULT); + + if (linkinfo.type == H5L_TYPE_ERROR) { + HDF5ErrMapper::ToException(std::string("Link type of \"") + node_name + + "\" is H5L_TYPE_ERROR"); } return _convert_link_type(linkinfo.type); } @@ -323,14 +300,11 @@ inline void NodeTraits::createSoftLink(const std::string& link_name, if (parents) { linkCreateProps.add(CreateIntermediateGroup{}); } - auto status = H5Lcreate_soft(obj_path.c_str(), - static_cast(this)->getId(), - link_name.c_str(), - linkCreateProps.getId(), - linkAccessProps.getId()); - if (status < 0) { - HDF5ErrMapper::ToException(std::string("Unable to create soft link: ")); - } + detail::h5l_create_soft(obj_path.c_str(), + static_cast(this)->getId(), + link_name.c_str(), + linkCreateProps.getId(), + linkAccessProps.getId()); } @@ -344,15 +318,12 @@ inline void NodeTraits::createExternalLink(const std::string& link_nam if (parents) { linkCreateProps.add(CreateIntermediateGroup{}); } - auto status = H5Lcreate_external(h5_file.c_str(), - obj_path.c_str(), - static_cast(this)->getId(), - link_name.c_str(), - linkCreateProps.getId(), - linkAccessProps.getId()); - if (status < 0) { - HDF5ErrMapper::ToException(std::string("Unable to create external link: ")); - } + detail::h5l_create_external(h5_file.c_str(), + obj_path.c_str(), + static_cast(this)->getId(), + link_name.c_str(), + linkCreateProps.getId(), + linkAccessProps.getId()); } template @@ -367,15 +338,12 @@ inline void NodeTraits::createHardLink(const std::string& link_name, if (parents) { linkCreateProps.add(CreateIntermediateGroup{}); } - auto status = H5Lcreate_hard(target_obj.getId(), - ".", - static_cast(this)->getId(), - link_name.c_str(), - linkCreateProps.getId(), - linkAccessProps.getId()); - if (status < 0) { - HDF5ErrMapper::ToException(std::string("Unable to create hard link: ")); - } + detail::h5l_create_hard(target_obj.getId(), + ".", + static_cast(this)->getId(), + link_name.c_str(), + linkCreateProps.getId(), + linkAccessProps.getId()); } diff --git a/include/highfive/bits/h5g_wrapper.hpp b/include/highfive/bits/h5g_wrapper.hpp new file mode 100644 index 000000000..eb77f9983 --- /dev/null +++ b/include/highfive/bits/h5g_wrapper.hpp @@ -0,0 +1,46 @@ +#pragma once + +#include +#include + +#include + +namespace HighFive { +namespace detail { + +inline hid_t h5g_create2(hid_t loc_id, + const char* name, + hid_t lcpl_id, + hid_t gcpl_id, + hid_t gapl_id) { + hid_t group_id = H5Gcreate2(loc_id, name, lcpl_id, gcpl_id, gapl_id); + if (group_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException(std::string("Unable to create the group \"") + + name + "\":"); + } + + return group_id; +} + +inline hid_t h5g_open2(hid_t loc_id, const char* name, hid_t gapl_id) { + hid_t group_id = H5Gopen2(loc_id, name, gapl_id); + if (group_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException(std::string("Unable to open the group \"") + + name + "\":"); + } + return group_id; +} + +inline herr_t h5g_get_num_objs(hid_t loc_id, hsize_t* num_objs) { + herr_t err = H5Gget_num_objs(loc_id, num_objs); + if (err < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to count objects in existing group or file")); + } + + return err; +} + + +} // namespace detail +} // namespace HighFive diff --git a/include/highfive/bits/h5l_wrapper.hpp b/include/highfive/bits/h5l_wrapper.hpp new file mode 100644 index 000000000..0009fdcfd --- /dev/null +++ b/include/highfive/bits/h5l_wrapper.hpp @@ -0,0 +1,132 @@ +#pragma once + +#include + +namespace HighFive { +namespace detail { + +inline herr_t h5l_create_external(const char* file_name, + const char* obj_name, + hid_t link_loc_id, + const char* link_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lcreate_external(file_name, obj_name, link_loc_id, link_name, lcpl_id, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to create external link: ")); + } + + return err; +} + +inline herr_t h5l_create_soft(const char* link_target, + hid_t link_loc_id, + const char* link_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lcreate_soft(link_target, link_loc_id, link_name, lcpl_id, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to create soft link: ")); + } + + return err; +} + +inline herr_t h5l_create_hard(hid_t cur_loc, + const char* cur_name, + hid_t dst_loc, + const char* dst_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lcreate_hard(cur_loc, cur_name, dst_loc, dst_name, lcpl_id, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to create hard link: ")); + } + + return err; +} + +inline herr_t h5l_get_info(hid_t loc_id, const char* name, H5L_info_t* linfo, hid_t lapl_id) { + herr_t err = H5Lget_info(loc_id, name, linfo, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to obtain info for link ")); + } + + return err; +} + +inline herr_t h5l_delete(hid_t loc_id, const char* name, hid_t lapl_id) { + herr_t err = H5Ldelete(loc_id, name, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Invalid name for unlink() ")); + } + + return err; +} + +inline htri_t h5l_exists(hid_t loc_id, const char* name, hid_t lapl_id) { + htri_t tri = H5Lexists(loc_id, name, lapl_id); + if (tri < 0) { + HDF5ErrMapper::ToException("Invalid link for exist()"); + } + + return tri; +} + +namespace nothrow { + +inline htri_t h5l_exists(hid_t loc_id, const char* name, hid_t lapl_id) { + return H5Lexists(loc_id, name, lapl_id); +} + +} // namespace nothrow + +inline herr_t h5l_iterate(hid_t grp_id, + H5_index_t idx_type, + H5_iter_order_t order, + hsize_t* idx, + H5L_iterate_t op, + void* op_data) { + herr_t err = H5Literate(grp_id, idx_type, order, idx, op, op_data); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to list objects in group")); + } + return err; +} + +inline herr_t h5l_move(hid_t src_loc, + const char* src_name, + hid_t dst_loc, + const char* dst_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lmove(src_loc, src_name, dst_loc, dst_name, lcpl_id, lapl_id); + + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to move link to \"") + + dst_name + "\":"); + } + return err; +} + +inline ssize_t h5l_get_name_by_idx(hid_t loc_id, + const char* group_name, + H5_index_t idx_type, + H5_iter_order_t order, + hsize_t n, + char* name, + size_t size, + hid_t lapl_id) { + ssize_t n_chars = + H5Lget_name_by_idx(loc_id, group_name, idx_type, order, n, name, size, lapl_id); + + if (n_chars < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to obtain link name from index.")); + } + + return n_chars; +} + +} // namespace detail +} // namespace HighFive From fe4060ffb06d39c3a9ce67fd6726db376205e209 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 11 Dec 2023 08:34:28 +0100 Subject: [PATCH 15/97] Wrap all used H5{F,O,R} functions. (#889) --- include/highfive/bits/H5File_misc.hpp | 28 +++------- include/highfive/bits/H5Node_traits_misc.hpp | 10 ++-- include/highfive/bits/H5Reference_misc.hpp | 16 ++---- include/highfive/bits/h5f_wrapper.hpp | 58 ++++++++++++++++++++ include/highfive/bits/h5o_wrapper.hpp | 19 +++++++ include/highfive/bits/h5r_wrapper.hpp | 42 ++++++++++++++ 6 files changed, 137 insertions(+), 36 deletions(-) create mode 100644 include/highfive/bits/h5f_wrapper.hpp create mode 100644 include/highfive/bits/h5o_wrapper.hpp create mode 100644 include/highfive/bits/h5r_wrapper.hpp diff --git a/include/highfive/bits/H5File_misc.hpp b/include/highfive/bits/H5File_misc.hpp index b90792a71..52ae59516 100644 --- a/include/highfive/bits/H5File_misc.hpp +++ b/include/highfive/bits/H5File_misc.hpp @@ -14,6 +14,7 @@ #include "../H5Utility.hpp" #include "H5Utils.hpp" +#include "h5f_wrapper.hpp" namespace HighFive { @@ -62,7 +63,7 @@ inline File::File(const std::string& filename, if (openOrCreate) silencer.reset(new SilenceHDF5()); - _hid = H5Fopen(filename.c_str(), openMode, fileAccessProps.getId()); + _hid = detail::nothrow::h5f_open(filename.c_str(), openMode, fileAccessProps.getId()); if (isValid()) return; // Done @@ -78,15 +79,14 @@ inline File::File(const std::string& filename, auto fcpl = fileCreateProps.getId(); auto fapl = fileAccessProps.getId(); - if ((_hid = H5Fcreate(filename.c_str(), createMode, fcpl, fapl)) < 0) { - HDF5ErrMapper::ToException(std::string("Unable to create file " + filename)); - } + _hid = detail::h5f_create(filename.c_str(), createMode, fcpl, fapl); } inline const std::string& File::getName() const noexcept { if (_filename.empty()) { - _filename = details::get_name( - [this](char* buffer, size_t length) { return H5Fget_name(getId(), buffer, length); }); + _filename = details::get_name([this](char* buffer, size_t length) { + return detail::h5f_get_name(getId(), buffer, length); + }); } return _filename; } @@ -122,27 +122,17 @@ inline hsize_t File::getFileSpacePageSize() const { #endif inline void File::flush() { - if (H5Fflush(_hid, H5F_SCOPE_GLOBAL) < 0) { - HDF5ErrMapper::ToException(std::string("Unable to flush file " + getName())); - } + detail::h5f_flush(_hid, H5F_SCOPE_GLOBAL); } inline size_t File::getFileSize() const { hsize_t sizeValue = 0; - if (H5Fget_filesize(_hid, &sizeValue) < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to retrieve size of file " + getName())); - } + detail::h5f_get_filesize(_hid, &sizeValue); return static_cast(sizeValue); } inline size_t File::getFreeSpace() const { - hssize_t unusedSize = H5Fget_freespace(_hid); - if (unusedSize < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to retrieve unused space of file " + getName())); - } - return static_cast(unusedSize); + return static_cast(detail::h5f_get_freespace(_hid)); } } // namespace HighFive diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index fb7f0f06f..b09bc3190 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -27,6 +27,7 @@ #include "h5l_wrapper.hpp" #include "h5g_wrapper.hpp" +#include "h5o_wrapper.hpp" namespace HighFive { @@ -349,12 +350,9 @@ inline void NodeTraits::createHardLink(const std::string& link_name, template inline Object NodeTraits::_open(const std::string& node_name) const { - const auto id = - H5Oopen(static_cast(this)->getId(), node_name.c_str(), H5P_DEFAULT); - if (id < 0) { - HDF5ErrMapper::ToException(std::string("Unable to open \"") + node_name + - "\":"); - } + const auto id = detail::h5o_open(static_cast(this)->getId(), + node_name.c_str(), + H5P_DEFAULT); return detail::make_object(id); } diff --git a/include/highfive/bits/H5Reference_misc.hpp b/include/highfive/bits/H5Reference_misc.hpp index 7c8db36fb..98e1fb88e 100644 --- a/include/highfive/bits/H5Reference_misc.hpp +++ b/include/highfive/bits/H5Reference_misc.hpp @@ -16,6 +16,8 @@ #include "../H5Object.hpp" +#include "h5r_wrapper.hpp" + namespace HighFive { inline Reference::Reference(const Object& location, const Object& object) @@ -25,10 +27,7 @@ inline Reference::Reference(const Object& location, const Object& object) } inline void Reference::create_ref(hobj_ref_t* refptr) const { - if (H5Rcreate(refptr, parent_id, obj_name.c_str(), H5R_OBJECT, -1) < 0) { - HDF5ErrMapper::ToException( - std::string("Unable to create the reference for \"") + obj_name + "\":"); - } + detail::h5r_create(refptr, parent_id, obj_name.c_str(), H5R_OBJECT, -1); } inline ObjectType Reference::getType(const Object& location) const { @@ -51,15 +50,10 @@ inline T Reference::dereference(const Object& location) const { } inline Object Reference::get_ref(const Object& location) const { - hid_t res; #if (H5Rdereference_vers == 2) - if ((res = H5Rdereference(location.getId(), H5P_DEFAULT, H5R_OBJECT, &href)) < 0) { - HDF5ErrMapper::ToException("Unable to dereference."); - } + hid_t res = detail::h5r_dereference(location.getId(), H5P_DEFAULT, H5R_OBJECT, &href); #else - if ((res = H5Rdereference(location.getId(), H5R_OBJECT, &href)) < 0) { - HDF5ErrMapper::ToException("Unable to dereference."); - } + hid_t res = detail::h5r_dereference(location.getId(), H5R_OBJECT, &href); #endif return Object(res); } diff --git a/include/highfive/bits/h5f_wrapper.hpp b/include/highfive/bits/h5f_wrapper.hpp new file mode 100644 index 000000000..600534b33 --- /dev/null +++ b/include/highfive/bits/h5f_wrapper.hpp @@ -0,0 +1,58 @@ +#pragma once + +#include +namespace HighFive { +namespace detail { +namespace nothrow { +inline hid_t h5f_open(const char* filename, unsigned flags, hid_t fapl_id) { + return H5Fopen(filename, flags, fapl_id); +} +} // namespace nothrow + +inline hid_t h5f_create(const char* filename, unsigned flags, hid_t fcpl_id, hid_t fapl_id) { + hid_t file_id = H5Fcreate(filename, flags, fcpl_id, fapl_id); + + if (file_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException(std::string("Failed to create file ") + filename); + } + return file_id; +} + +inline ssize_t h5f_get_name(hid_t obj_id, char* name, size_t size) { + ssize_t nread = H5Fget_name(obj_id, name, size); + if (nread < 0) { + HDF5ErrMapper::ToException(std::string("Failed to get file from id")); + } + + return nread; +} + +inline herr_t h5f_flush(hid_t object_id, H5F_scope_t scope) { + herr_t err = H5Fflush(object_id, scope); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Failed to flush file")); + } + + return err; +} + +inline herr_t h5f_get_filesize(hid_t file_id, hsize_t* size) { + herr_t err = H5Fget_filesize(file_id, size); + if (err < 0) { + HDF5ErrMapper::ToException(std::string("Unable to retrieve size of file")); + } + + return err; +} + +inline hssize_t h5f_get_freespace(hid_t file_id) { + hssize_t free_space = H5Fget_freespace(file_id); + if (free_space < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to retrieve unused space of file ")); + } + return free_space; +} + +} // namespace detail +} // namespace HighFive diff --git a/include/highfive/bits/h5o_wrapper.hpp b/include/highfive/bits/h5o_wrapper.hpp new file mode 100644 index 000000000..75b91bb6a --- /dev/null +++ b/include/highfive/bits/h5o_wrapper.hpp @@ -0,0 +1,19 @@ +#pragma once + +#include +#include + +namespace HighFive { +namespace detail { + +inline hid_t h5o_open(hid_t loc_id, const char* name, hid_t lapl_id) { + hid_t hid = H5Oopen(loc_id, name, lapl_id); + if (hid < 0) { + HDF5ErrMapper::ToException(std::string("Unable to open \"") + name + "\":"); + } + + return hid; +} + +} // namespace detail +} // namespace HighFive diff --git a/include/highfive/bits/h5r_wrapper.hpp b/include/highfive/bits/h5r_wrapper.hpp new file mode 100644 index 000000000..86552d395 --- /dev/null +++ b/include/highfive/bits/h5r_wrapper.hpp @@ -0,0 +1,42 @@ +#pragma once + +#include + +namespace HighFive { +namespace detail { +inline herr_t h5r_create(void* ref, + hid_t loc_id, + const char* name, + H5R_type_t ref_type, + hid_t space_id) { + herr_t err = H5Rcreate(ref, loc_id, name, ref_type, space_id); + if (err < 0) { + HDF5ErrMapper::ToException( + std::string("Unable to create the reference for \"") + name + "\":"); + } + + return err; +} + +#if (H5Rdereference_vers == 2) +inline hid_t h5r_dereference(hid_t obj_id, hid_t oapl_id, H5R_type_t ref_type, const void* ref) { + hid_t hid = H5Rdereference(obj_id, oapl_id, ref_type, ref); + if (hid < 0) { + HDF5ErrMapper::ToException("Unable to dereference."); + } + + return hid; +} +#else +inline hid_t h5r_dereference(hid_t dataset, H5R_type_t ref_type, const void* ref) { + hid_t hid = H5Rdereference(dataset, ref_type, ref); + if (hid < 0) { + HDF5ErrMapper::ToException("Unable to dereference."); + } + + return hid; +} +#endif + +} // namespace detail +} // namespace HighFive From 2af7cb5f6d0fbc0809b5e659f94857c6a6f02183 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 11 Dec 2023 17:43:57 +0100 Subject: [PATCH 16/97] Wrap all used H5E functions. (#890) --- include/highfive/H5Utility.hpp | 12 ++++--- include/highfive/bits/H5Exception_misc.hpp | 18 ++++++---- include/highfive/bits/h5_wrapper.hpp | 7 ++++ include/highfive/bits/h5e_wrapper.hpp | 39 ++++++++++++++++++++++ 4 files changed, 64 insertions(+), 12 deletions(-) create mode 100644 include/highfive/bits/h5e_wrapper.hpp diff --git a/include/highfive/H5Utility.hpp b/include/highfive/H5Utility.hpp index e33fb7993..7a17e14b0 100644 --- a/include/highfive/H5Utility.hpp +++ b/include/highfive/H5Utility.hpp @@ -9,11 +9,11 @@ #pragma once -#include #include #include #include +#include "bits/h5e_wrapper.hpp" #include "bits/H5Friends.hpp" namespace HighFive { @@ -25,13 +25,15 @@ class SilenceHDF5 { public: inline SilenceHDF5(bool enable = true) : _client_data(nullptr) { - H5Eget_auto2(H5E_DEFAULT, &_func, &_client_data); - if (enable) - H5Eset_auto2(H5E_DEFAULT, NULL, NULL); + detail::nothrow::h5e_get_auto2(H5E_DEFAULT, &_func, &_client_data); + + if (enable) { + detail::nothrow::h5e_set_auto2(H5E_DEFAULT, nullptr, nullptr); + } } inline ~SilenceHDF5() { - H5Eset_auto2(H5E_DEFAULT, _func, _client_data); + detail::nothrow::h5e_set_auto2(H5E_DEFAULT, _func, _client_data); } private: diff --git a/include/highfive/bits/H5Exception_misc.hpp b/include/highfive/bits/H5Exception_misc.hpp index f7382f2c2..16ec107e6 100644 --- a/include/highfive/bits/H5Exception_misc.hpp +++ b/include/highfive/bits/H5Exception_misc.hpp @@ -11,7 +11,8 @@ #include #include -#include +#include "h5_wrapper.hpp" +#include "h5e_wrapper.hpp" namespace HighFive { @@ -21,14 +22,14 @@ struct HDF5ErrMapper { auto** e_iter = static_cast(client_data); (void) n; - const char* major_err = H5Eget_major(err_desc->maj_num); - const char* minor_err = H5Eget_minor(err_desc->min_num); + const char* major_err = detail::nothrow::h5e_get_major(err_desc->maj_num); + const char* minor_err = detail::nothrow::h5e_get_minor(err_desc->min_num); std::ostringstream oss; oss << '(' << major_err << ") " << minor_err; - H5free_memory((void*) major_err); - H5free_memory((void*) minor_err); + detail::nothrow::h5_free_memory((void*) major_err); + detail::nothrow::h5_free_memory((void*) minor_err); auto* e = new ExceptionType(oss.str()); e->_err_major = err_desc->maj_num; @@ -45,8 +46,11 @@ struct HDF5ErrMapper { ExceptionType e(""); ExceptionType* e_iter = &e; - H5Ewalk2(err_stack, H5E_WALK_UPWARD, &HDF5ErrMapper::stackWalk, &e_iter); - H5Eclear2(err_stack); + detail::nothrow::h5e_walk2(err_stack, + H5E_WALK_UPWARD, + &HDF5ErrMapper::stackWalk, + &e_iter); + detail::nothrow::h5e_clear2(err_stack); const char* next_err_msg = (e.nextException() != NULL) ? (e.nextException()->what()) : (""); diff --git a/include/highfive/bits/h5_wrapper.hpp b/include/highfive/bits/h5_wrapper.hpp index cfde5b8e6..6f418e3f4 100644 --- a/include/highfive/bits/h5_wrapper.hpp +++ b/include/highfive/bits/h5_wrapper.hpp @@ -8,5 +8,12 @@ inline void h5_free_memory(void* mem) { throw DataTypeException("Could not free memory allocated by HDF5"); } } + +namespace nothrow { +inline herr_t h5_free_memory(void* mem) { + return H5free_memory(mem); +} +} // namespace nothrow + } // namespace detail } // namespace HighFive diff --git a/include/highfive/bits/h5e_wrapper.hpp b/include/highfive/bits/h5e_wrapper.hpp new file mode 100644 index 000000000..168b14b7c --- /dev/null +++ b/include/highfive/bits/h5e_wrapper.hpp @@ -0,0 +1,39 @@ +#pragma once + +#include +namespace HighFive { +namespace detail { +namespace nothrow { + + +inline void h5e_get_auto2(hid_t estack_id, H5E_auto2_t* func, void** client_data) { + H5Eget_auto2(estack_id, func, client_data); +} + +inline void h5e_set_auto2(hid_t estack_id, H5E_auto2_t func, void* client_data) { + H5Eset_auto2(estack_id, func, client_data); +} + +inline char* h5e_get_major(H5E_major_t maj) { + return H5Eget_major(maj); +} + +inline char* h5e_get_minor(H5E_minor_t min) { + return H5Eget_minor(min); +} + +inline herr_t h5e_walk2(hid_t err_stack, + H5E_direction_t direction, + H5E_walk2_t func, + void* client_data) { + return H5Ewalk2(err_stack, direction, func, client_data); +} + +inline herr_t h5e_clear2(hid_t err_stack) { + return H5Eclear2(err_stack); +} + + +} // namespace nothrow +} // namespace detail +} // namespace HighFive From 29f9a978e87a50f5f7a80610871cd62c9f306419 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 12 Dec 2023 10:28:19 +0100 Subject: [PATCH 17/97] Wrap all used H5I functions. (#863) --- include/highfive/bits/H5DataType_misc.hpp | 5 +- include/highfive/bits/H5Object_misc.hpp | 28 +++---- include/highfive/bits/H5Path_traits_misc.hpp | 12 +-- include/highfive/bits/H5Reference_misc.hpp | 5 +- include/highfive/bits/h5i_wrapper.hpp | 79 ++++++++++++++++++++ 5 files changed, 103 insertions(+), 26 deletions(-) create mode 100644 include/highfive/bits/h5i_wrapper.hpp diff --git a/include/highfive/bits/H5DataType_misc.hpp b/include/highfive/bits/H5DataType_misc.hpp index 698d8fa28..0da4411e9 100644 --- a/include/highfive/bits/H5DataType_misc.hpp +++ b/include/highfive/bits/H5DataType_misc.hpp @@ -23,6 +23,7 @@ #include "H5Inspector_misc.hpp" #include "h5t_wrapper.hpp" +#include "h5i_wrapper.hpp" namespace HighFive { @@ -69,8 +70,8 @@ inline StringType DataType::asStringType() const { throw DataTypeException("Invalid conversion to StringType."); } - if (isValid() && H5Iinc_ref(_hid) < 0) { - throw ObjectException("Reference counter increase failure"); + if (isValid()) { + detail::h5i_inc_ref(_hid); } return StringType(_hid); diff --git a/include/highfive/bits/H5Object_misc.hpp b/include/highfive/bits/H5Object_misc.hpp index f477d7fdf..c5a1f3999 100644 --- a/include/highfive/bits/H5Object_misc.hpp +++ b/include/highfive/bits/H5Object_misc.hpp @@ -12,6 +12,7 @@ #include "../H5Exception.hpp" #include "../H5Utility.hpp" +#include "h5i_wrapper.hpp" namespace HighFive { namespace detail { @@ -29,8 +30,8 @@ inline Object::Object(hid_t hid) inline Object::Object(const Object& other) : _hid(other._hid) { - if (other.isValid() && H5Iinc_ref(_hid) < 0) { - throw ObjectException("Reference counter increase failure"); + if (other.isValid()) { + detail::h5i_inc_ref(_hid); } } @@ -41,25 +42,28 @@ inline Object::Object(Object&& other) noexcept inline Object& Object::operator=(const Object& other) { if (this != &other) { - if (isValid()) - H5Idec_ref(_hid); + if ((*this).isValid()) { + detail::h5i_dec_ref(_hid); + } _hid = other._hid; - if (other.isValid() && H5Iinc_ref(_hid) < 0) { - throw ObjectException("Reference counter increase failure"); + if (other.isValid()) { + detail::h5i_inc_ref(_hid); } } return *this; } inline Object::~Object() { - if (isValid() && H5Idec_ref(_hid) < 0) { - HIGHFIVE_LOG_ERROR("HighFive::~Object: reference counter decrease failure"); + if (isValid()) { + if (detail::nothrow::h5i_dec_ref(_hid) < 0) { + HIGHFIVE_LOG_ERROR("Failed to decrease reference count of HID"); + } } } inline bool Object::isValid() const noexcept { - return (_hid != H5I_INVALID_HID) && (H5Iis_valid(_hid) != false); + return (_hid > 0) && (detail::nothrow::h5i_is_valid(_hid) > 0); } inline hid_t Object::getId() const noexcept { @@ -87,11 +91,7 @@ static inline ObjectType _convert_object_type(const H5I_type_t& h5type) { inline ObjectType Object::getType() const { // H5Iget_type is a very lightweight func which extracts the type from the id - H5I_type_t h5type; - if ((h5type = H5Iget_type(_hid)) == H5I_BADID) { - HDF5ErrMapper::ToException("Invalid hid or object type"); - } - return _convert_object_type(h5type); + return _convert_object_type(detail::h5i_get_type(_hid)); } inline ObjectInfo Object::getInfo() const { diff --git a/include/highfive/bits/H5Path_traits_misc.hpp b/include/highfive/bits/H5Path_traits_misc.hpp index 444e9294b..acde06d1e 100644 --- a/include/highfive/bits/H5Path_traits_misc.hpp +++ b/include/highfive/bits/H5Path_traits_misc.hpp @@ -21,20 +21,16 @@ inline PathTraits::PathTraits() { std::is_same::value, "PathTraits can only be applied to Group, DataSet and Attribute"); const auto& obj = static_cast(*this); - if (!obj.isValid()) { - return; + if (obj.isValid()) { + const hid_t file_id = detail::h5i_get_file_id(obj.getId()); + _file_obj.reset(new File(file_id)); } - const hid_t file_id = H5Iget_file_id(obj.getId()); - if (file_id < 0) { - HDF5ErrMapper::ToException("getFile(): Could not obtain file of object"); - } - _file_obj.reset(new File(file_id)); } template inline std::string PathTraits::getPath() const { return details::get_name([this](char* buffer, size_t length) { - return H5Iget_name(static_cast(*this).getId(), buffer, length); + return detail::h5i_get_name(static_cast(*this).getId(), buffer, length); }); } diff --git a/include/highfive/bits/H5Reference_misc.hpp b/include/highfive/bits/H5Reference_misc.hpp index 98e1fb88e..c73deee2a 100644 --- a/include/highfive/bits/H5Reference_misc.hpp +++ b/include/highfive/bits/H5Reference_misc.hpp @@ -22,8 +22,9 @@ namespace HighFive { inline Reference::Reference(const Object& location, const Object& object) : parent_id(location.getId()) { - obj_name = details::get_name( - [&](char* buffer, size_t length) { return H5Iget_name(object.getId(), buffer, length); }); + obj_name = details::get_name([&](char* buffer, size_t length) { + return detail::h5i_get_name(object.getId(), buffer, length); + }); } inline void Reference::create_ref(hobj_ref_t* refptr) const { diff --git a/include/highfive/bits/h5i_wrapper.hpp b/include/highfive/bits/h5i_wrapper.hpp new file mode 100644 index 000000000..c81290b74 --- /dev/null +++ b/include/highfive/bits/h5i_wrapper.hpp @@ -0,0 +1,79 @@ +#pragma once + +#include + +namespace HighFive { +namespace detail { +inline int h5i_inc_ref(hid_t id) { + auto count = H5Iinc_ref(id); + + if (count < 0) { + throw ObjectException("Failed to increase reference count of HID"); + } + + return count; +} + +namespace nothrow { + +inline int h5i_dec_ref(hid_t id) { + return H5Idec_ref(id); +} + +} // namespace nothrow + +inline int h5i_dec_ref(hid_t id) { + int count = H5Idec_ref(id); + if (count < 0) { + throw ObjectException("Failed to decrease reference count of HID"); + } + + return count; +} + +namespace nothrow { +inline htri_t h5i_is_valid(hid_t id) { + return H5Iis_valid(id); +} + +} // namespace nothrow + +inline htri_t h5i_is_valid(hid_t id) { + htri_t tri = H5Iis_valid(id); + if (tri < 0) { + throw ObjectException("Failed to check if HID is valid"); + } + + return tri; +} + +inline H5I_type_t h5i_get_type(hid_t id) { + H5I_type_t type = H5Iget_type(id); + if (type == H5I_BADID) { + HDF5ErrMapper::ToException("Failed to get type of HID"); + } + + return type; +} + +template +inline hid_t h5i_get_file_id(hid_t id) { + hid_t file_id = H5Iget_file_id(id); + if (file_id < 0) { + HDF5ErrMapper::ToException("Failed not obtain file HID of object"); + } + + return file_id; +} + +inline ssize_t h5i_get_name(hid_t id, char* name, size_t size) { + ssize_t n_chars = H5Iget_name(id, name, size); + if (n_chars < 0) { + HDF5ErrMapper::ToException("Failed to get name of HID."); + } + + return n_chars; +} + +} // namespace detail +} // namespace HighFive From c660174d43302b43616b87233631324eb96c8daf Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Wed, 13 Dec 2023 10:26:42 +0100 Subject: [PATCH 18/97] Bump CI to 1.12.3 (#891) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 04c4ea154..8730ed410 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -91,7 +91,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - hdf5_version : [ hdf5-1_8_23, hdf5-1_10_11, hdf5-1_12_2, hdf5-1_14_3 ] + hdf5_version : [ hdf5-1_8_23, hdf5-1_10_11, hdf5-1_12_3, hdf5-1_14_3 ] steps: - uses: actions/checkout@v3 From 9e5cb5b672a01e9c189cc102ab6ee89a371571f4 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 18 Dec 2023 10:14:19 +0100 Subject: [PATCH 19/97] Restructure dependency handling. (#898) The file containing the specializations of traits for specific optional dependencies have been moved to their own files. This allows us to move to macro free dependency handling, simply by removing certain guarded includes. --- include/highfive/bits/H5DataType_misc.hpp | 23 +- include/highfive/bits/H5Inspector_decl.hpp | 27 +++ include/highfive/bits/H5Inspector_misc.hpp | 249 +-------------------- include/highfive/boost.hpp | 164 ++++++++++++++ include/highfive/eigen.hpp | 93 ++++++++ include/highfive/half_float.hpp | 21 ++ src/examples/create_dataset_half_float.cpp | 4 - tests/unit/tests_high_five.hpp | 2 - 8 files changed, 316 insertions(+), 267 deletions(-) create mode 100644 include/highfive/bits/H5Inspector_decl.hpp create mode 100644 include/highfive/boost.hpp create mode 100644 include/highfive/eigen.hpp create mode 100644 include/highfive/half_float.hpp diff --git a/include/highfive/bits/H5DataType_misc.hpp b/include/highfive/bits/H5DataType_misc.hpp index 0da4411e9..e29c99b0e 100644 --- a/include/highfive/bits/H5DataType_misc.hpp +++ b/include/highfive/bits/H5DataType_misc.hpp @@ -17,10 +17,6 @@ #include -#ifdef H5_USE_HALF_FLOAT -#include -#endif - #include "H5Inspector_misc.hpp" #include "h5t_wrapper.hpp" #include "h5i_wrapper.hpp" @@ -172,21 +168,6 @@ inline AtomicType::AtomicType() { } // half-float, float, double and long double mapping -#ifdef H5_USE_HALF_FLOAT -using float16_t = half_float::half; - -template <> -inline AtomicType::AtomicType() { - _hid = detail::h5t_copy(H5T_NATIVE_FLOAT); - // Sign position, exponent position, exponent size, mantissa position, mantissa size - detail::h5t_set_fields(_hid, 15, 10, 5, 0, 10); - // Total datatype size (in bytes) - detail::h5t_set_size(_hid, 2); - // Floating point exponent bias - detail::h5t_set_ebias(_hid, 15); -} -#endif - template <> inline AtomicType::AtomicType() { _hid = detail::h5t_copy(H5T_NATIVE_FLOAT); @@ -539,3 +520,7 @@ inline DataType create_datatype() { } } // namespace HighFive + +#ifdef H5_USE_HALF_FLOAT +#include +#endif diff --git a/include/highfive/bits/H5Inspector_decl.hpp b/include/highfive/bits/H5Inspector_decl.hpp new file mode 100644 index 000000000..434545a60 --- /dev/null +++ b/include/highfive/bits/H5Inspector_decl.hpp @@ -0,0 +1,27 @@ +#pragma once + +#include +#include +#include +#include + +namespace HighFive { + +inline size_t compute_total_size(const std::vector& dims) { + return std::accumulate(dims.begin(), dims.end(), size_t{1u}, std::multiplies()); +} + +template +using unqualified_t = typename std::remove_const::type>::type; + + +namespace details { + +template +struct type_helper; + +template +struct inspector; + +} // namespace details +} // namespace HighFive diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 98da8affd..54bac1b0d 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -21,19 +21,10 @@ #include "string_padding.hpp" -#ifdef H5_USE_BOOST -#include -// starting Boost 1.64, serialization header must come before ublas -#include -#include -#endif -#ifdef H5_USE_EIGEN -#include -#endif +#include "H5Inspector_decl.hpp" namespace HighFive { - namespace details { inline bool checkDimensions(const std::vector& dims, size_t n_dim_requested) { @@ -125,13 +116,6 @@ inline std::vector squeezeDimensions(const std::vector& dims, } // namespace details -inline size_t compute_total_size(const std::vector& dims) { - return std::accumulate(dims.begin(), dims.end(), size_t{1u}, std::multiplies()); -} - -template -using unqualified_t = typename std::remove_const::type>::type; - /***** inspector { using type = T @@ -632,232 +616,13 @@ struct inspector { } }; -#ifdef H5_USE_EIGEN -template -struct inspector> { - using type = Eigen::Matrix; - using value_type = T; - using base_type = typename inspector::base_type; - using hdf5_type = base_type; - - static constexpr size_t ndim = 2; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; - static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - inspector::is_trivially_copyable; - - - static void assert_not_buggy(Eigen::Index nrows, Eigen::Index ncols) { - if (nrows > 1 && ncols > 1) { - throw std::runtime_error( - "HighFive has been broken for Eigen::Matrix. Please check " - "https://github.com/BlueBrain/HighFive/issues/532."); - } - } - - static std::vector getDimensions(const type& val) { - assert_not_buggy(val.rows(), val.cols()); - - std::vector sizes{static_cast(val.rows()), static_cast(val.cols())}; - auto s = inspector::getDimensions(val.data()[0]); - sizes.insert(sizes.end(), s.begin(), s.end()); - return sizes; - } - - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - - static void prepare(type& val, const std::vector& dims) { - if (dims[0] != static_cast(val.rows()) || - dims[1] != static_cast(val.cols())) { - val.resize(static_cast(dims[0]), - static_cast(dims[1])); - } - - assert_not_buggy(val.rows(), val.cols()); - } - - static hdf5_type* data(type& val) { - assert_not_buggy(val.rows(), val.cols()); - return inspector::data(*val.data()); - } - - static const hdf5_type* data(const type& val) { - assert_not_buggy(val.rows(), val.cols()); - return inspector::data(*val.data()); - } - - static void serialize(const type& val, hdf5_type* m) { - assert_not_buggy(val.rows(), val.cols()); - std::memcpy(m, val.data(), static_cast(val.size()) * sizeof(hdf5_type)); - } - - static void unserialize(const hdf5_type* vec_align, - const std::vector& dims, - type& val) { - assert_not_buggy(val.rows(), val.cols()); - if (dims.size() < 2) { - std::ostringstream os; - os << "Impossible to pair DataSet with " << dims.size() - << " dimensions into an eigen-matrix."; - throw DataSpaceException(os.str()); - } - std::memcpy(val.data(), vec_align, compute_total_size(dims) * sizeof(hdf5_type)); - } -}; -#endif +} // namespace details +} // namespace HighFive #ifdef H5_USE_BOOST -template -struct inspector> { - using type = boost::multi_array; - using value_type = T; - using base_type = typename inspector::base_type; - using hdf5_type = typename inspector::hdf5_type; - - static constexpr size_t ndim = Dims; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; - static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - inspector::is_trivially_copyable; - - static std::vector getDimensions(const type& val) { - std::vector sizes; - for (size_t i = 0; i < ndim; ++i) { - sizes.push_back(val.shape()[i]); - } - auto s = inspector::getDimensions(val.data()[0]); - sizes.insert(sizes.end(), s.begin(), s.end()); - return sizes; - } - - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - - static void prepare(type& val, const std::vector& dims) { - if (dims.size() < ndim) { - std::ostringstream os; - os << "Only '" << dims.size() << "' given but boost::multi_array is of size '" << ndim - << "'."; - throw DataSpaceException(os.str()); - } - boost::array ext; - std::copy(dims.begin(), dims.begin() + ndim, ext.begin()); - val.resize(ext); - std::vector next_dims(dims.begin() + Dims, dims.end()); - std::size_t size = std::accumulate(dims.begin(), - dims.begin() + Dims, - std::size_t{1}, - std::multiplies()); - for (size_t i = 0; i < size; ++i) { - inspector::prepare(*(val.origin() + i), next_dims); - } - } - - static hdf5_type* data(type& val) { - return inspector::data(*val.data()); - } - - static const hdf5_type* data(const type& val) { - return inspector::data(*val.data()); - } - - template - static void serialize(const type& val, It m) { - size_t size = val.num_elements(); - size_t subsize = inspector::getSizeVal(*val.origin()); - for (size_t i = 0; i < size; ++i) { - inspector::serialize(*(val.origin() + i), m + i * subsize); - } - } - - template - static void unserialize(It vec_align, const std::vector& dims, type& val) { - std::vector next_dims(dims.begin() + ndim, dims.end()); - size_t subsize = compute_total_size(next_dims); - for (size_t i = 0; i < val.num_elements(); ++i) { - inspector::unserialize(vec_align + i * subsize, - next_dims, - *(val.origin() + i)); - } - } -}; - -template -struct inspector> { - using type = boost::numeric::ublas::matrix; - using value_type = unqualified_t; - using base_type = typename inspector::base_type; - using hdf5_type = typename inspector::hdf5_type; - - static constexpr size_t ndim = 2; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; - static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - inspector::is_trivially_copyable; - - static std::vector getDimensions(const type& val) { - std::vector sizes{val.size1(), val.size2()}; - auto s = inspector::getDimensions(val(0, 0)); - sizes.insert(sizes.end(), s.begin(), s.end()); - return sizes; - } - - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - - static void prepare(type& val, const std::vector& dims) { - if (dims.size() < ndim) { - std::ostringstream os; - os << "Impossible to pair DataSet with " << dims.size() << " dimensions into a " << ndim - << " boost::numeric::ublas::matrix"; - throw DataSpaceException(os.str()); - } - val.resize(dims[0], dims[1], false); - } - - static hdf5_type* data(type& val) { - return inspector::data(val(0, 0)); - } - - static const hdf5_type* data(const type& val) { - return inspector::data(val(0, 0)); - } - - static void serialize(const type& val, hdf5_type* m) { - size_t size = val.size1() * val.size2(); - size_t subsize = inspector::getSizeVal(val(0, 0)); - for (size_t i = 0; i < size; ++i) { - inspector::serialize(*(&val(0, 0) + i), m + i * subsize); - } - } - - static void unserialize(const hdf5_type* vec_align, - const std::vector& dims, - type& val) { - std::vector next_dims(dims.begin() + ndim, dims.end()); - size_t subsize = compute_total_size(next_dims); - size_t size = val.size1() * val.size2(); - for (size_t i = 0; i < size; ++i) { - inspector::unserialize(vec_align + i * subsize, - next_dims, - *(&val(0, 0) + i)); - } - } -}; +#include #endif -} // namespace details -} // namespace HighFive +#ifdef H5_USE_EIGEN +#include +#endif diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp new file mode 100644 index 000000000..8992159a2 --- /dev/null +++ b/include/highfive/boost.hpp @@ -0,0 +1,164 @@ +#pragma once +#ifdef H5_USE_BOOST + +#include "bits/H5Inspector_decl.hpp" +#include "H5Exception.hpp" + +#include +// starting Boost 1.64, serialization header must come before ublas +#include +#include + +namespace HighFive { +namespace details { + +template +struct inspector> { + using type = boost::multi_array; + using value_type = T; + using base_type = typename inspector::base_type; + using hdf5_type = typename inspector::hdf5_type; + + static constexpr size_t ndim = Dims; + static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && + inspector::is_trivially_copyable; + + static std::vector getDimensions(const type& val) { + std::vector sizes; + for (size_t i = 0; i < ndim; ++i) { + sizes.push_back(val.shape()[i]); + } + auto s = inspector::getDimensions(val.data()[0]); + sizes.insert(sizes.end(), s.begin(), s.end()); + return sizes; + } + + static size_t getSizeVal(const type& val) { + return compute_total_size(getDimensions(val)); + } + + static size_t getSize(const std::vector& dims) { + return compute_total_size(dims); + } + + static void prepare(type& val, const std::vector& dims) { + if (dims.size() < ndim) { + std::ostringstream os; + os << "Only '" << dims.size() << "' given but boost::multi_array is of size '" << ndim + << "'."; + throw DataSpaceException(os.str()); + } + boost::array ext; + std::copy(dims.begin(), dims.begin() + ndim, ext.begin()); + val.resize(ext); + std::vector next_dims(dims.begin() + Dims, dims.end()); + std::size_t size = std::accumulate(dims.begin(), + dims.begin() + Dims, + std::size_t{1}, + std::multiplies()); + for (size_t i = 0; i < size; ++i) { + inspector::prepare(*(val.origin() + i), next_dims); + } + } + + static hdf5_type* data(type& val) { + return inspector::data(*val.data()); + } + + static const hdf5_type* data(const type& val) { + return inspector::data(*val.data()); + } + + template + static void serialize(const type& val, It m) { + size_t size = val.num_elements(); + size_t subsize = inspector::getSizeVal(*val.origin()); + for (size_t i = 0; i < size; ++i) { + inspector::serialize(*(val.origin() + i), m + i * subsize); + } + } + + template + static void unserialize(It vec_align, const std::vector& dims, type& val) { + std::vector next_dims(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(next_dims); + for (size_t i = 0; i < val.num_elements(); ++i) { + inspector::unserialize(vec_align + i * subsize, + next_dims, + *(val.origin() + i)); + } + } +}; + +template +struct inspector> { + using type = boost::numeric::ublas::matrix; + using value_type = unqualified_t; + using base_type = typename inspector::base_type; + using hdf5_type = typename inspector::hdf5_type; + + static constexpr size_t ndim = 2; + static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && + inspector::is_trivially_copyable; + + static std::vector getDimensions(const type& val) { + std::vector sizes{val.size1(), val.size2()}; + auto s = inspector::getDimensions(val(0, 0)); + sizes.insert(sizes.end(), s.begin(), s.end()); + return sizes; + } + + static size_t getSizeVal(const type& val) { + return compute_total_size(getDimensions(val)); + } + + static size_t getSize(const std::vector& dims) { + return compute_total_size(dims); + } + + static void prepare(type& val, const std::vector& dims) { + if (dims.size() < ndim) { + std::ostringstream os; + os << "Impossible to pair DataSet with " << dims.size() << " dimensions into a " << ndim + << " boost::numeric::ublas::matrix"; + throw DataSpaceException(os.str()); + } + val.resize(dims[0], dims[1], false); + } + + static hdf5_type* data(type& val) { + return inspector::data(val(0, 0)); + } + + static const hdf5_type* data(const type& val) { + return inspector::data(val(0, 0)); + } + + static void serialize(const type& val, hdf5_type* m) { + size_t size = val.size1() * val.size2(); + size_t subsize = inspector::getSizeVal(val(0, 0)); + for (size_t i = 0; i < size; ++i) { + inspector::serialize(*(&val(0, 0) + i), m + i * subsize); + } + } + + static void unserialize(const hdf5_type* vec_align, + const std::vector& dims, + type& val) { + std::vector next_dims(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(next_dims); + size_t size = val.size1() * val.size2(); + for (size_t i = 0; i < size; ++i) { + inspector::unserialize(vec_align + i * subsize, + next_dims, + *(&val(0, 0) + i)); + } + } +}; + +} // namespace details +} // namespace HighFive + +#endif diff --git a/include/highfive/eigen.hpp b/include/highfive/eigen.hpp new file mode 100644 index 000000000..c47095dde --- /dev/null +++ b/include/highfive/eigen.hpp @@ -0,0 +1,93 @@ +#pragma once +#ifdef H5_USE_EIGEN + +#include "bits/H5Inspector_decl.hpp" +#include "H5Exception.hpp" + +#include + + +namespace HighFive { +namespace details { + +template +struct inspector> { + using type = Eigen::Matrix; + using value_type = T; + using base_type = typename inspector::base_type; + using hdf5_type = base_type; + + static constexpr size_t ndim = 2; + static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && + inspector::is_trivially_copyable; + + + static void assert_not_buggy(Eigen::Index nrows, Eigen::Index ncols) { + if (nrows > 1 && ncols > 1) { + throw std::runtime_error( + "HighFive has been broken for Eigen::Matrix. Please check " + "https://github.com/BlueBrain/HighFive/issues/532."); + } + } + + static std::vector getDimensions(const type& val) { + assert_not_buggy(val.rows(), val.cols()); + + std::vector sizes{static_cast(val.rows()), static_cast(val.cols())}; + auto s = inspector::getDimensions(val.data()[0]); + sizes.insert(sizes.end(), s.begin(), s.end()); + return sizes; + } + + static size_t getSizeVal(const type& val) { + return compute_total_size(getDimensions(val)); + } + + static size_t getSize(const std::vector& dims) { + return compute_total_size(dims); + } + + static void prepare(type& val, const std::vector& dims) { + if (dims[0] != static_cast(val.rows()) || + dims[1] != static_cast(val.cols())) { + val.resize(static_cast(dims[0]), + static_cast(dims[1])); + } + + assert_not_buggy(val.rows(), val.cols()); + } + + static hdf5_type* data(type& val) { + assert_not_buggy(val.rows(), val.cols()); + return inspector::data(*val.data()); + } + + static const hdf5_type* data(const type& val) { + assert_not_buggy(val.rows(), val.cols()); + return inspector::data(*val.data()); + } + + static void serialize(const type& val, hdf5_type* m) { + assert_not_buggy(val.rows(), val.cols()); + std::memcpy(m, val.data(), static_cast(val.size()) * sizeof(hdf5_type)); + } + + static void unserialize(const hdf5_type* vec_align, + const std::vector& dims, + type& val) { + assert_not_buggy(val.rows(), val.cols()); + if (dims.size() < 2) { + std::ostringstream os; + os << "Impossible to pair DataSet with " << dims.size() + << " dimensions into an eigen-matrix."; + throw DataSpaceException(os.str()); + } + std::memcpy(val.data(), vec_align, compute_total_size(dims) * sizeof(hdf5_type)); + } +}; + +} // namespace details +} // namespace HighFive + +#endif diff --git a/include/highfive/half_float.hpp b/include/highfive/half_float.hpp new file mode 100644 index 000000000..998e693ff --- /dev/null +++ b/include/highfive/half_float.hpp @@ -0,0 +1,21 @@ +#pragma once +#ifdef H5_USE_HALF_FLOAT + +#include + +namespace HighFive { +using float16_t = half_float::half; + +template <> +inline AtomicType::AtomicType() { + _hid = detail::h5t_copy(H5T_NATIVE_FLOAT); + // Sign position, exponent position, exponent size, mantissa position, mantissa size + detail::h5t_set_fields(_hid, 15, 10, 5, 0, 10); + // Total datatype size (in bytes) + detail::h5t_set_size(_hid, 2); + // Floating point exponent bias + detail::h5t_set_ebias(_hid, 15); +} +} // namespace HighFive + +#endif diff --git a/src/examples/create_dataset_half_float.cpp b/src/examples/create_dataset_half_float.cpp index 2b720cd18..837c58704 100644 --- a/src/examples/create_dataset_half_float.cpp +++ b/src/examples/create_dataset_half_float.cpp @@ -7,8 +7,6 @@ * */ -#ifdef H5_USE_HALF_FLOAT - #include #include #include @@ -45,5 +43,3 @@ int main(void) { return 0; } - -#endif diff --git a/tests/unit/tests_high_five.hpp b/tests/unit/tests_high_five.hpp index 0ebd58c44..9d259c8d1 100644 --- a/tests/unit/tests_high_five.hpp +++ b/tests/unit/tests_high_five.hpp @@ -43,8 +43,6 @@ using base_test_types = std::tuple; #ifdef H5_USE_HALF_FLOAT -#include - using float16_t = half_float::half; using numerical_test_types = decltype(std::tuple_cat(std::declval(), std::tuple())); From 5e1a92f4c39c5b1534a1e731b76d4e76c5c57a05 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 18 Dec 2023 10:44:10 +0100 Subject: [PATCH 20/97] Add named ctors for scalar and null dataspaces. (#899) --- include/highfive/H5DataSpace.hpp | 18 ++++++++++++++++++ include/highfive/bits/H5Dataspace_misc.hpp | 8 ++++++++ include/highfive/bits/h5s_wrapper.hpp | 9 +++++++++ tests/unit/tests_high_five_base.cpp | 14 ++++++++++++++ 4 files changed, 49 insertions(+) diff --git a/include/highfive/H5DataSpace.hpp b/include/highfive/H5DataSpace.hpp index 95d04dbbb..7c7c5860a 100644 --- a/include/highfive/H5DataSpace.hpp +++ b/include/highfive/H5DataSpace.hpp @@ -145,6 +145,24 @@ class DataSpace: public Object { /// \since 1.3 explicit DataSpace(DataspaceType space_type); + /// \brief Create a scalar DataSpace. + /// + /// \code{.cpp} + /// auto dataspace = DataSpace::Scalar(); + /// \endcode + /// + /// \since 2.9 + static DataSpace Scalar(); + + /// \brief Create a null DataSpace. + /// + /// \code{.cpp} + /// auto dataspace = DataSpace::Null(); + /// \endcode + /// + /// \since 2.9 + static DataSpace Null(); + /// \brief Create a copy of the DataSpace which will have different id. /// /// \code{.cpp} diff --git a/include/highfive/bits/H5Dataspace_misc.hpp b/include/highfive/bits/H5Dataspace_misc.hpp index 03fb4a950..ceae1e531 100644 --- a/include/highfive/bits/H5Dataspace_misc.hpp +++ b/include/highfive/bits/H5Dataspace_misc.hpp @@ -42,6 +42,14 @@ inline DataSpace::DataSpace(const IT begin, const IT end) { _hid = detail::h5s_create_simple(int(real_dims.size()), real_dims.data(), nullptr); } +inline DataSpace DataSpace::Scalar() { + return DataSpace(DataSpace::dataspace_scalar); +} + +inline DataSpace DataSpace::Null() { + return DataSpace(DataSpace::dataspace_null); +} + inline DataSpace::DataSpace(const std::vector& dims, const std::vector& maxdims) { if (dims.size() != maxdims.size()) { throw DataSpaceException("dims and maxdims must be the same length."); diff --git a/include/highfive/bits/h5s_wrapper.hpp b/include/highfive/bits/h5s_wrapper.hpp index 32b872b6e..03edf8005 100644 --- a/include/highfive/bits/h5s_wrapper.hpp +++ b/include/highfive/bits/h5s_wrapper.hpp @@ -101,6 +101,15 @@ inline hssize_t h5s_get_simple_extent_npoints(hid_t space_id) { return nelements; } +inline H5S_class_t h5s_get_simple_extent_type(hid_t space_id) { + H5S_class_t cls = H5Sget_simple_extent_type(space_id); + if (cls == H5S_NO_CLASS) { + HDF5ErrMapper::ToException("Unable to get class of simple dataspace."); + } + + return cls; +} + } // namespace detail } // namespace HighFive diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 6f1953f4d..163535b55 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -775,11 +775,25 @@ TEST_CASE("DataSpace::getElementCount") { SECTION("null") { auto space = DataSpace(DataSpace::dataspace_null); CHECK(space.getElementCount() == 0); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_NULL); + } + + SECTION("null named ctor") { + auto space = DataSpace::Null(); + CHECK(space.getElementCount() == 0); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_NULL); } SECTION("scalar") { auto space = DataSpace(DataSpace::dataspace_scalar); CHECK(space.getElementCount() == 1); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_SCALAR); + } + + SECTION("scalar named ctor") { + auto space = DataSpace::Scalar(); + CHECK(space.getElementCount() == 1); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_SCALAR); } SECTION("simple, empty (1D)") { From 065234dafead4b251d69f54e9f7e9e5cad254af2 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 19 Dec 2023 14:21:15 +0100 Subject: [PATCH 21/97] Refactor auto-update scripts. (#894) --- .github/create_submodule_update_pr.sh | 70 +++++++++++++++++++ .../check_doxygen_awesome_version.yml | 31 +------- 2 files changed, 71 insertions(+), 30 deletions(-) create mode 100644 .github/create_submodule_update_pr.sh diff --git a/.github/create_submodule_update_pr.sh b/.github/create_submodule_update_pr.sh new file mode 100644 index 000000000..d43dc1d77 --- /dev/null +++ b/.github/create_submodule_update_pr.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +# Usage: +# $0 PACKAGE_NAME VERSION +# +# Before calling this script, run the commands to update the dependency. If +# there dependency shouldn't update then the script must not modify the repo. +# +# When the repo is in the updated state, run this script. It will commit +# everything and create a PR. +# +# The PR title is `Update ${PACKAGE_NAME} to ${VERSION}` the script checks for +# this string and only creates a new PR if the string isn't the title of an +# existing PR. +# +# PACKAGE_NAME is an identifier of the package, no spaces. Doesn't need to be +# the exact name of the dependency. +# +# VERSION an identifier of the next version of the package, no spaces. This +# variable must be the same if the version if the same and different if +# the version is different. However, it doesn't have to be a `x.y.z` it +# could be a Git SHA, or something else. + +set -eu + +PACKAGE_NAME=$1 +VERSION=$2 +BRANCH=update-${PACKAGE_NAME}-${VERSION} +COMMIT_MESSAGE="Update ${PACKAGE_NAME} to ${VERSION}" + +if [[ -z "${PACKAGE_NAME}" ]] +then + echo "Empty PACKAGE_NAME." + exit -1 +fi + +if [[ -z "${VERSION}" ]] +then + echo "Empty VERSION." + exit -1 +fi + + +# NOTE: In a later runs of CI we will search for PR with this exact +# title. Only if no such PR exists will the script create a +# new PR. +PR_TITLE="Update ${PACKAGE_NAME} to ${VERSION}" + +if [[ -z "$(git status --porcelain)" ]] +then + echo "No differences detected: ${PACKAGE_NAME} is up-to-date." + exit 0 +fi + +if [[ -z "$(gh pr list --state all --search "${PR_TITLE}")" ]] +then + + git checkout -b $BRANCH + git config user.name github-actions + git config user.email github-actions@github.com + git commit -a -m "${COMMIT_MESSAGE}" + + git push -u origin ${BRANCH} + gh pr create \ + --title "${PR_TITLE}" \ + --body "This PR was generated by a Github Actions workflow." + +else + echo "Old PR detected: didn't create a new one." +fi diff --git a/.github/workflows/check_doxygen_awesome_version.yml b/.github/workflows/check_doxygen_awesome_version.yml index 233577ef8..2e4df2833 100644 --- a/.github/workflows/check_doxygen_awesome_version.yml +++ b/.github/workflows/check_doxygen_awesome_version.yml @@ -17,33 +17,4 @@ jobs: run: | VERSION=$(doc/doxygen-awesome-css/update_doxygen_awesome.sh "$(mktemp -d)") - BRANCH=update-doxygen-awesome-${VERSION} - COMMIT_MESSAGE="Update doxygen-awesome to ${VERSION}" - - # NOTE: In a later runs of CI we will search for PR with this exact - # title. Only if no such PR exists will the script create a - # new PR. - PR_TITLE="[docs] Update doxygen-awesome to ${VERSION}" - - if [[ -z "$(git status --porcelain)" ]] - then - echo "No differences detected: doxygen-awesome is up-to-date." - exit 0 - fi - - if [[ -z "$(gh pr list --state all --search "${PR_TITLE}")" ]] - then - - git checkout -b $BRANCH - git config user.name github-actions - git config user.email github-actions@github.com - git commit -a -m "${COMMIT_MESSAGE}" - - git push -u origin ${BRANCH} - gh pr create \ - --title "${PR_TITLE}" \ - --body "This PR was generated by a Github Actions workflow." - - else - echo "Old PR detected: didn't create a new one." - fi + .github/create_submodule_update_pr.sh doxygen-awesome ${VERSION} From 76382329c47c94924eb97a2100a0116494456bee Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 19 Dec 2023 15:04:39 +0100 Subject: [PATCH 22/97] Improve testing infrastructure. (#871) Adds/reimplements abstractions for the following: * Create multi-dimensional array filled with suitable values. * Traits for accessing values. * Traits for hiding the difference of DataSet and Attribute. * Useful utilities such as `ravel`, `unravel` and `flat_size`. --- doc/developer_guide.md | 131 ++++++ tests/unit/create_traits.hpp | 70 ++++ tests/unit/data_generator.hpp | 463 ++++++++++++++++++++++ tests/unit/supported_types.hpp | 108 +++++ tests/unit/test_all_types.cpp | 246 ++++++++++++ tests/unit/tests_high_five_multi_dims.cpp | 2 +- 6 files changed, 1019 insertions(+), 1 deletion(-) create mode 100644 tests/unit/create_traits.hpp create mode 100644 tests/unit/data_generator.hpp create mode 100644 tests/unit/supported_types.hpp diff --git a/doc/developer_guide.md b/doc/developer_guide.md index 3017289b5..fc388f3b5 100644 --- a/doc/developer_guide.md +++ b/doc/developer_guide.md @@ -91,3 +91,134 @@ release. Once this is done perform a final round of updates: * Update BlueBrain Spack recipe to use the archive and not the Git commit. * Update the upstream Spack recipe. +## Writing Tests +### Generate Multi-Dimensional Test Data +Input array of any dimension and type can be generated using the template class +`DataGenerator`. For example: +``` +auto dims = std::vector{4, 2}; +auto values = testing::DataGenerator>::create(dims); +``` +Generates an `std::vector>` initialized with suitable +values. + +If "suitable" isn't specific enough, one can specify a callback: +``` +auto callback = [](const std::vector& indices) { + return 42.0; +} + +auto values = testing::DataGenerator>::create(dims, callback); +``` + +The `dims` can be generated via `testing::DataGenerator::default_dims` or by +using `testing::DataGenerator::sanitize_dims`. Remember, that certain +containers are fixed size and that we often compute the number of elements by +multiplying the dims. + +### Generate Scalar Test Data +To generate a single "suitable" element use template class `DefaultValues`, e.g. +``` +auto default_values = testing::DefaultValues(); +auto x = testing::DefaultValues(indices); +``` + +### Accessing Elements +To access a particular element from an unknown container use the following trait: +``` +using trait = testing::ContainerTraits>; +// auto x = values[1][0]; +auto x = trait::get(values, {1, 0}); + +// values[1][0] = 42.0; +trait::set(values, {1, 0}, 42.0); +``` + +### Utilities For Multi-Dimensional Arrays +Use `testing::DataGenerator::allocate` to allocate an array (without filling +it) and `testing::copy` to copy an array from one type to another. There's +`testing::ravel`, `testing::unravel` and `testing::flat_size` to compute the +position in a flat array from a multi-dimensional index, the reverse and the +number of element in the multi-dimensional array. + +### Deduplicating DataSet and Attribute +Due to how HighFive is written testing `DataSet` and `Attribute` often requires +duplicating the entire test code because somewhere a `createDataSet` must be +replaced with `createAttribute`. Use `testing::AttributeCreateTraits` and +`testing::DataSetCreateTraits`. For example, +``` +template +void check_write(...) { + // Same as one of: + // file.createDataSet(name, values); + // file.createAttribute(name, values); + CreateTraits::create(file, name, values); +} +``` + +### Test Organization +#### Multi-Dimensional Arrays +All tests for reading/writing whole multi-dimensional arrays to datasets or +attributes belong in `tests/unit/tests_high_five_multi_dimensional.cpp`. This +includes write/read cycles; checking all the generic edges cases, e.g. empty +arrays and mismatching sizes; and checking non-reallocation. + +Read/Write cycles are implemented in two distinct checks. One for writing and +another for reading. When checking writing we read with a "trusted" +multi-dimensional array (a nested `std::vector`), and vice-versa when checking +reading. This matters because certain bugs, like writing a column major array +as if it were row-major can't be caught if one reads it back into a +column-major array. + +Remember, `std::vector` is very different from all other `std::vector`s. + +Every container `template C;` should at least be checked with all of +the following `T`s that are supported by the container: `bool`, `double`, +`std::string`, `std::vector`, `std::array`. The reason is `bool` and +`std::string` are special, `double` is just a POD, `std::vector` requires +dynamic memory allocation and `std::array` is statically allocated. + +Similarly, each container should be put inside an `std::vector` and an +`std::array`. + +#### Scalar Data Set +Write-read cycles for scalar values should be implemented in +`tests/unit/tests_high_five_scalar.cpp`. + +#### Data Types +Unit-tests related to checking that `DataType` API, go in +`tests/unit/tests_high_data_type.cpp`. + +#### Selections +Anything selection related goes in `tests/unit/test_high_five_selection.cpp`. +This includes things like `ElementSet` and `HyperSlab`. + +#### Strings +Regular write-read cycles for strings are performed along with the other types, +see above. This should cover compatibility of `std::string` with all +containers. However, additional testing is required, e.g. character set, +padding, fixed vs. variable length. These all go in +`tests/unit/test_high_five_string.cpp`. + +#### Specific Tests For Optional Containers +If containers, e.g. `Eigen::Matrix` require special checks those go in files +called `tests/unit/test_high_five_*.cpp` where `*` is `eigen` for Eigen. + +#### Memory Layout Assumptions +In HighFive we make assumptions about the memory layout of certain types. For +example, we assume that +``` +auto array = std::vector>(n); +doube * ptr = (double*) array.data(); +``` +is a sensible thing to do. We assume similar about `bool` and +`details::Boolean`. These types of tests go into +`tests/unit/tests_high_five_memory_layout.cpp`. + +#### H5Easy +Anything `H5Easy` related goes in files with the appropriate name. + +#### Everything Else +What's left goes in `tests/unit/test_high_five_base.cpp`. This covers opening +files, groups, dataset or attributes; checking certain pathological edge cases; +etc. diff --git a/tests/unit/create_traits.hpp b/tests/unit/create_traits.hpp new file mode 100644 index 000000000..959fcdeb1 --- /dev/null +++ b/tests/unit/create_traits.hpp @@ -0,0 +1,70 @@ +#pragma once + +namespace HighFive { +namespace testing { + +/// \brief Trait for `createAttribute`. +/// +/// The point of these is to simplify testing. The typical issue is that we +/// need to write the tests twice, one with `createDataSet` and then again with +/// `createAttribute`. This trait allows us to inject this difference. +struct AttributeCreateTraits { + using type = Attribute; + + template + static Attribute get(Hi5& hi5, const std::string& name) { + return hi5.getAttribute(name); + } + + + template + static Attribute create(Hi5& hi5, const std::string& name, const Container& container) { + return hi5.createAttribute(name, container); + } + + template + static Attribute create(Hi5& hi5, + const std::string& name, + const DataSpace& dataspace, + const DataType& datatype) { + return hi5.createAttribute(name, dataspace, datatype); + } + + template + static Attribute create(Hi5& hi5, const std::string& name, const DataSpace& dataspace) { + auto datatype = create_datatype(); + return hi5.template createAttribute(name, dataspace); + } +}; + +/// \brief Trait for `createDataSet`. +struct DataSetCreateTraits { + using type = DataSet; + + template + static DataSet get(Hi5& hi5, const std::string& name) { + return hi5.getDataSet(name); + } + + template + static DataSet create(Hi5& hi5, const std::string& name, const Container& container) { + return hi5.createDataSet(name, container); + } + + template + static DataSet create(Hi5& hi5, + const std::string& name, + const DataSpace& dataspace, + const DataType& datatype) { + return hi5.createDataSet(name, dataspace, datatype); + } + + template + static DataSet create(Hi5& hi5, const std::string& name, const DataSpace& dataspace) { + auto datatype = create_datatype(); + return hi5.template createDataSet(name, dataspace); + } +}; + +} // namespace testing +} // namespace HighFive diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp new file mode 100644 index 000000000..f0b0d2625 --- /dev/null +++ b/tests/unit/data_generator.hpp @@ -0,0 +1,463 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +#ifdef H5_USE_BOOST +#include +#endif + +#include + +namespace HighFive { +namespace testing { + +std::vector lstrip(const std::vector& indices, size_t n) { + std::vector subindices(indices.size() - n); + for (size_t i = 0; i < subindices.size(); ++i) { + subindices[i] = indices[i + n]; + } + + return subindices; +} + +size_t ravel(std::vector& indices, const std::vector dims) { + size_t rank = dims.size(); + size_t linear_index = 0; + size_t ld = 1; + for (size_t kk = 0; kk < rank; ++kk) { + auto k = rank - 1 - kk; + linear_index += indices[k] * ld; + ld *= dims[k]; + } + + return linear_index; +} + +std::vector unravel(size_t flat_index, const std::vector dims) { + size_t rank = dims.size(); + size_t ld = 1; + std::vector indices(rank); + for (size_t kk = 0; kk < rank; ++kk) { + auto k = rank - 1 - kk; + indices[k] = (flat_index / ld) % dims[k]; + ld *= dims[k]; + } + + return indices; +} + +static size_t flat_size(const std::vector& dims) { + size_t n = 1; + for (auto d: dims) { + n *= d; + } + + return n; +} + +template +struct ContainerTraits; + +// -- Scalar basecases --------------------------------------------------------- +template +struct ScalarContainerTraits { + using container_type = T; + using base_type = T; + + static void set(container_type& array, std::vector /* indices */, base_type value) { + array = value; + } + + static const base_type& get(const container_type& array, std::vector /* indices */) { + return array; + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector& /* dims */) { + return container_type{}; + } + + static void sanitize_dims(std::vector& /* dims */, size_t /* axis */) {} +}; + +template +struct ContainerTraits::value>::type> + : public ScalarContainerTraits {}; + +template +struct ContainerTraits::value>::type> + : public ScalarContainerTraits {}; + +template <> +struct ContainerTraits: public ScalarContainerTraits {}; + +// -- STL ---------------------------------------------------------------------- +template <> +struct ContainerTraits> { + using container_type = std::vector; + using value_type = bool; + using base_type = bool; + + static void set(container_type& array, + const std::vector& indices, + const base_type& value) { + array[indices[0]] = value; + } + + static base_type get(const container_type& array, const std::vector& indices) { + return array[indices[0]]; + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector& dims) { + container_type array(dims[0]); + return array; + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + ContainerTraits::sanitize_dims(dims, axis + 1); + } +}; + +template +struct STLLikeContainerTraits { + using container_type = Container; + using value_type = ValueType; + using base_type = typename ContainerTraits::base_type; + + static void set(container_type& array, + const std::vector& indices, + const base_type& value) { + return ContainerTraits::set(array[indices[0]], lstrip(indices, 1), value); + } + + static base_type get(const container_type& array, const std::vector& indices) { + return ContainerTraits::get(array[indices[0]], lstrip(indices, 1)); + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector& dims) { + container_type array(dims[0]); + for (size_t i = 0; i < dims[0]; ++i) { + auto value = ContainerTraits::allocate(lstrip(dims, 1)); + ContainerTraits::assign(array[i], value); + } + + return array; + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + ContainerTraits::sanitize_dims(dims, axis + 1); + } +}; + +template +struct ContainerTraits>: public STLLikeContainerTraits> {}; + +template +struct ContainerTraits>: public STLLikeContainerTraits> { + private: + using super = STLLikeContainerTraits>; + + public: + using container_type = typename super::container_type; + using base_type = typename super::base_type; + using value_type = typename super::value_type; + + public: + static container_type allocate(const std::vector& dims) { + if (N != dims[0]) { + throw std::runtime_error("broken logic: static and runtime size don't match."); + } + + container_type array; + for (size_t i = 0; i < dims[0]; ++i) { + auto value = ContainerTraits::allocate(lstrip(dims, 1)); + ContainerTraits::assign(array[i], value); + } + + return array; + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + dims[axis] = N; + ContainerTraits::sanitize_dims(dims, axis + 1); + } +}; + +// -- Boost ------------------------------------------------------------------- +#ifdef H5_USE_BOOST +template +struct ContainerTraits> { + using container_type = typename boost::multi_array; + using value_type = T; + using base_type = typename ContainerTraits::base_type; + + static void set(container_type& array, + const std::vector& indices, + const base_type& value) { + auto i = std::vector(indices.begin(), indices.begin() + n); + return ContainerTraits::set(array(i), lstrip(indices, n), value); + } + + static base_type get(const container_type& array, const std::vector& indices) { + auto i = std::vector(indices.begin(), indices.begin() + n); + return ContainerTraits::get(array(i), lstrip(indices, n)); + } + + static void assign(container_type& dst, const container_type& src) { + auto const* const shape = src.shape(); + dst.resize(std::vector(shape, shape + n)); + dst = src; + } + + static container_type allocate(const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + n); + container_type array(local_dims); + + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + auto element = ContainerTraits::allocate(lstrip(dims, n)); + set(array, unravel(i, local_dims), element); + } + + return array; + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + ContainerTraits::sanitize_dims(dims, axis + n); + } +}; + +template +struct ContainerTraits> { + using container_type = typename boost::numeric::ublas::matrix; + using value_type = T; + using base_type = typename ContainerTraits::base_type; + + static void set(container_type& array, + const std::vector& indices, + const base_type& value) { + auto i = indices[0]; + auto j = indices[1]; + return ContainerTraits::set(array(i, j), lstrip(indices, 2), value); + } + + static base_type get(const container_type& array, const std::vector& indices) { + auto i = indices[0]; + auto j = indices[1]; + return ContainerTraits::get(array(i, j), lstrip(indices, 2)); + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + 2); + container_type array(local_dims[0], local_dims[1]); + + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + auto indices = unravel(i, local_dims); + auto element = ContainerTraits::allocate(lstrip(dims, 2)); + + ContainerTraits::assign(array(indices[0], indices[1]), element); + } + + return array; + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + ContainerTraits::sanitize_dims(dims, axis + 2); + } +}; + +#endif + +template +T copy(const C& src, const std::vector& dims) { + auto dst = ContainerTraits::allocate(dims); + for (size_t i = 0; i < flat_size(dims); ++i) { + auto indices = unravel(i, dims); + ContainerTraits::set(dst, indices, ContainerTraits::get(src, indices)); + } + + return dst; +} + +template +T default_real_value(const std::vector& indices, T shift, T base, T factor) { + auto value = T(0); + + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + auto sign = (std::is_signed::value) && (isum % 2 == 1) ? T(-1) : T(1); + + for (size_t k = 0; k < indices.size(); ++k) { + value += T(indices[k]) * T(std::pow(shift, T(k))) * base; + } + + return sign * value * factor; +} + +std::vector ascii_alphabet = {"a", "b", "c", "d", "e", "f"}; + +std::string default_string(size_t offset, size_t length, const std::vector& alphabet) { + std::string s = ""; + for (size_t k = 0; k < length; ++k) { + s += alphabet[(offset + k) % alphabet.size()]; + } + + return s; +} + +std::string default_fixed_length_ascii_string(const std::vector& indices, size_t length) { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return default_string(isum, length, ascii_alphabet); +} + +std::string default_variable_length_ascii_string(const std::vector& indices) { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return default_string(isum, isum, ascii_alphabet); +} + +template +struct DefaultValues; + +template +struct DefaultValues::value>::type> { + T operator()(const std::vector& indices) const { + auto eps = std::numeric_limits::epsilon(); + return default_real_value(indices, T(100.0), T(0.01), T(1.0) + T(8) * eps); + } +}; + +template +struct DefaultValues::value>::type> { + T operator()(const std::vector& indices) const { + return default_real_value(indices, T(100), T(1), T(1)); + } +}; + +template <> +struct DefaultValues { + char operator()(const std::vector& indices) const { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return char(isum % size_t(std::numeric_limits::max)); + } +}; + +template <> +struct DefaultValues { + unsigned char operator()(const std::vector& indices) const { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return (unsigned char) (isum % size_t(std::numeric_limits::max)); + } +}; + +template <> +struct DefaultValues { + std::string operator()(const std::vector& indices) const { + return default_variable_length_ascii_string(indices); + } +}; + +template <> +struct DefaultValues { + bool operator()(const std::vector& indices) const { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return (isum % 2) == 0; + } +}; + +template +struct MultiDimVector { + using type = std::vector::type>; +}; + +template +struct MultiDimVector { + using type = T; +}; + +template +class DataGenerator { + public: + constexpr static size_t rank = details::inspector::recursive_ndim; + using traits = ContainerTraits; + using base_type = typename traits::base_type; + using container_type = Container; + + public: + static container_type allocate(const std::vector& dims) { + return traits::allocate(dims); + } + + template + static container_type create(const std::vector& dims, F f) { + std::cout << "allocate " << std::endl; + auto array = allocate(dims); + std::cout << "initialize " << std::endl; + initialize(array, dims, f); + + return array; + } + + static container_type create(const std::vector& dims) { + return create(dims, DefaultValues()); + } + + static std::vector default_dims() { + using difference_type = std::vector::difference_type; + std::vector oversized{2, 3, 5, 7, 2, 3, 5, 7}; + std::vector dims(oversized.begin(), oversized.begin() + difference_type(rank)); + ContainerTraits::sanitize_dims(dims, /* axis = */ 0); + + return dims; + } + + static void sanitize_dims(std::vector& dims) { + ContainerTraits::sanitize_dims(dims, /* axis = */ 0); + } + + private: + template + static void initialize(C& array, const std::vector& dims, F f) { + std::vector indices(dims.size()); + initialize(array, dims, indices, 0, f); + } + + template + static void initialize(C& array, + const std::vector& dims, + std::vector& indices, + size_t axis, + F f) { + if (axis == indices.size()) { + auto value = f(indices); + traits::set(array, indices, value); + } else { + for (size_t i = 0; i < dims[axis]; ++i) { + indices[axis] = i; + initialize(array, dims, indices, axis + 1, f); + } + } + } +}; + +} // namespace testing +} // namespace HighFive diff --git a/tests/unit/supported_types.hpp b/tests/unit/supported_types.hpp new file mode 100644 index 000000000..f708303b1 --- /dev/null +++ b/tests/unit/supported_types.hpp @@ -0,0 +1,108 @@ + +#pragma once + +#include +#include +#include +#include + +#ifdef H5_USE_BOOST +#include +#endif + +namespace HighFive { +namespace testing { + +struct type_identity { + template + using type = T; +}; + +template +struct STDVector { + template + using type = std::vector>; +}; + +template +struct STDArray { + template + using type = std::array, n>; +}; + +#ifdef H5_USE_BOOST +template +struct BoostMultiArray { + template + using type = boost::multi_array, 4>; +}; + +template +struct BoostUblasMatrix { + template + using type = boost::numeric::ublas::matrix>; +}; +#endif + +template +struct ContainerProduct; + +template +struct ContainerProduct> { + using type = std::tuple...>; +}; + +template +struct ConcatenateTuples; + +template +struct ConcatenateTuples, std::tuple, Tuples...> { + using type = typename ConcatenateTuples, Tuples...>::type; +}; + +template +struct ConcatenateTuples> { + using type = std::tuple; +}; + +// clang-format off +using numeric_scalar_types = std::tuple< + int, + unsigned int, + long, + unsigned long, + unsigned char, + char, + float, + double, + long long, + unsigned long long +>; + +using scalar_types = typename ConcatenateTuples>::type; +using scalar_types_boost = typename ConcatenateTuples>::type; + +using supported_array_types = typename ConcatenateTuples< +#ifdef H5_USE_BOOST + typename ContainerProduct, scalar_types_boost>::type, + typename ContainerProduct>, scalar_types_boost>::type, + typename ContainerProduct>, scalar_types_boost>::type, + + typename ContainerProduct, scalar_types_boost>::type, + typename ContainerProduct>, scalar_types_boost>::type, + typename ContainerProduct>, scalar_types_boost>::type, +#endif + typename ContainerProduct, scalar_types>::type, + typename ContainerProduct>, scalar_types>::type, + typename ContainerProduct>>, scalar_types>::type, + typename ContainerProduct>>>, scalar_types>::type, + typename ContainerProduct, scalar_types>::type, + typename ContainerProduct>, scalar_types>::type, + typename ContainerProduct>, scalar_types>::type, + typename ContainerProduct>, scalar_types>::type +>::type; + +// clang-format on + +} // namespace testing +} // namespace HighFive diff --git a/tests/unit/test_all_types.cpp b/tests/unit/test_all_types.cpp index 23c8a27b3..e772fd1d7 100644 --- a/tests/unit/test_all_types.cpp +++ b/tests/unit/test_all_types.cpp @@ -7,11 +7,16 @@ * */ #include +#include #include #include +#include #include "tests_high_five.hpp" +#include "data_generator.hpp" +#include "create_traits.hpp" +#include "supported_types.hpp" using namespace HighFive; @@ -238,3 +243,244 @@ TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector", "[Types]", std::v } } #endif + +template +struct DiffMessageTrait; + +template +struct DiffMessageTrait::value>::type> { + static std::string diff(T a, T b) { + std::stringstream sstream; + sstream << std::scientific << " delta: " << a - b; + return sstream.str(); + } +}; + +template +struct DiffMessageTrait::value>::type> { + static std::string diff(T /* a */, T /* b */) { + return ""; + } +}; + +template +std::string diff_message(T a, T b) { + return DiffMessageTrait::diff(a, b); +} + +template +void compare_arrays(const Actual& actual, + const Expected& expected, + const std::vector& dims, + Comp comp) { + using actual_trait = testing::ContainerTraits; + using expected_trait = testing::ContainerTraits; + using base_type = typename actual_trait::base_type; + + auto n = testing::flat_size(dims); + + for (size_t i = 0; i < n; ++i) { + auto indices = testing::unravel(i, dims); + base_type actual_value = actual_trait::get(actual, indices); + base_type expected_value = expected_trait::get(expected, indices); + auto c = comp(actual_value, expected_value); + if (!c) { + std::stringstream sstream; + sstream << std::scientific << "i = " << i << ": " << actual_value + << " != " << expected_value << diff_message(actual_value, expected_value); + INFO(sstream.str()); + } + REQUIRE(c); + } +} + +template +void compare_arrays(const Actual& actual, + const Expected& expected, + const std::vector& dims) { + using base_type = typename testing::ContainerTraits::base_type; + compare_arrays(expected, actual, dims, [](base_type a, base_type b) { return a == b; }); +} + +template +void check_read_auto(const Expected& expected, const std::vector& dims, const Obj& obj) { + compare_arrays(obj.template read(), expected, dims); +} + +template +void check_read_preallocated(const Expected& expected, + const std::vector& dims, + const Obj& obj) { + auto actual = testing::DataGenerator::allocate(dims); + obj.read(actual); + + compare_arrays(actual, expected, dims); +} + +template +void check_read_regular(const std::string& file_name, const std::vector& dims) { + using traits = testing::DataGenerator; + using base_type = typename traits::base_type; + using reference_type = typename testing::MultiDimVector::type; + + auto file = File(file_name, File::Truncate); + auto expected = testing::copy(traits::create(dims), dims); + + auto dataspace = DataSpace(dims); + auto attr = testing::AttributeCreateTraits::create(file, "dset", dataspace); + attr.write(expected); + + auto dset = testing::DataSetCreateTraits::create(file, "attr", dataspace); + dset.write(expected); + + + SECTION("dset.read()") { + check_read_auto(expected, dims, dset); + } + + SECTION("dset.read(values)") { + check_read_preallocated(expected, dims, dset); + } + + SECTION("attr.read()") { + check_read_auto(expected, dims, attr); + } + + SECTION("attr.read(values)") { + check_read_preallocated(expected, dims, attr); + } +} + +template +void check_read_regular() { + const std::string file_name("rw_read_regular" + typeNameHelper() + ".h5"); + auto dims = testing::DataGenerator::default_dims(); + + check_read_regular(file_name, dims); +} + +TEMPLATE_LIST_TEST_CASE("TestReadRegular", "[read]", testing::supported_array_types) { + check_read_regular(); +} + +template +void check_writing(const std::vector& dims, Write write) { + using traits = testing::DataGenerator; + using base_type = typename traits::base_type; + using reference_type = typename testing::MultiDimVector::type; + + auto values = testing::DataGenerator::create(dims); + auto expected = testing::copy(values, dims); + + auto obj = write(values); + + auto actual = testing::DataGenerator::allocate(dims); + obj.read(actual); + + compare_arrays(actual, expected, dims); +} + +template +void check_write_auto(File& file, const std::string& name, const std::vector& dims) { + auto write_auto = [&](const Container& values) { + return CreateTraits::create(file, "auto_" + name, values); + }; + + check_writing(dims, write_auto); +} + +template +void check_write_deduce_type(File& file, const std::string& name, const std::vector& dims) { + auto write_two_phase_auto = [&](const Container& values) { + using traits = testing::ContainerTraits; + auto dataspace = DataSpace(dims); + auto h5 = CreateTraits::template create(file, + "two_phase_auto" + name, + dataspace); + h5.write(values); + return h5; + }; + check_writing(dims, write_two_phase_auto); +} + +template +void check_write_manual(File& file, const std::string& name, const std::vector& dims) { + auto write_two_phase = [&](const Container& values) { + using traits = testing::ContainerTraits; + auto datatype = create_datatype(); + auto dataspace = DataSpace(dims); + auto h5 = CreateTraits::create(file, "two_phase_" + name, dataspace, datatype); + h5.write(values); + return h5; + }; + check_writing(dims, write_two_phase); +} + +template +void check_write_regular(const std::string& file_name, const std::vector& dims) { + auto file = File(file_name, File::Truncate); + + SECTION("createDataSet(name, container)") { + check_write_auto(file, "dset", dims); + } + + SECTION("createDataSet(name, container)") { + check_write_deduce_type(file, "dset", dims); + } + + SECTION("createDataSet(name, container)") { + check_write_manual(file, "dset", dims); + } + + SECTION("createAttribute(name, container)") { + check_write_auto(file, "attr", dims); + } + + SECTION("createAttribute(name, container)") { + check_write_deduce_type(file, "attr", dims); + } + + SECTION("createAttribute(name, container)") { + check_write_manual(file, "attr", dims); + } +} + +template +void check_write_regular() { + std::string file_name("rw_write_regular" + typeNameHelper() + ".h5"); + auto dims = testing::DataGenerator::default_dims(); + check_write_regular(file_name, dims); +} + +TEMPLATE_LIST_TEST_CASE("TestWriteRegularSTDVector", "[write]", testing::supported_array_types) { + check_write_regular(); +} + +TEST_CASE("DataGeneratorDefaultDims", "[internal]") { + SECTION("std::array") { + auto dims = testing::DataGenerator>::default_dims(); + REQUIRE(dims.size() == 1); + CHECK(dims[0] == 3); + } + + SECTION("std::vector") { + auto dims = testing::DataGenerator>::default_dims(); + REQUIRE(dims.size() == 1); + CHECK(dims[0] > 0); + } + + SECTION("std::vector") { + auto dims = testing::DataGenerator>>::default_dims(); + REQUIRE(dims.size() == 2); + CHECK(dims[0] * dims[1] > 0); + } +} + +TEST_CASE("ravel", "[internal]") { + std::vector dims = {2, 4, 5}; + std::vector indices = {1, 2, 3}; + size_t flat_index = indices[2] + dims[2] * (indices[1] + dims[1] * indices[0]); + + CHECK(flat_index == testing::ravel(indices, dims)); + CHECK(indices == testing::unravel(flat_index, dims)); +} diff --git a/tests/unit/tests_high_five_multi_dims.cpp b/tests/unit/tests_high_five_multi_dims.cpp index 442f1c9cc..08fbea9ce 100644 --- a/tests/unit/tests_high_five_multi_dims.cpp +++ b/tests/unit/tests_high_five_multi_dims.cpp @@ -80,11 +80,11 @@ void readWriteArrayTest() { typename std::array tooSmall; CHECK_THROWS_AS(dataset.read(tooSmall), DataSpaceException); } + TEMPLATE_LIST_TEST_CASE("readWriteArray", "[template]", numerical_test_types) { readWriteArrayTest(); } - template void readWriteVectorNDTest(std::vector& ndvec, const std::vector& dims) { fillVec(ndvec, dims, ContentGenerate()); From 04ae6505951f13b1e595afe5f4bda33123636f31 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 21 Dec 2023 09:33:26 +0100 Subject: [PATCH 23/97] Guard functionality introduced in 1.10.0. (#905) --- include/highfive/H5PropertyList.hpp | 3 +++ include/highfive/bits/H5PropertyList_misc.hpp | 2 ++ include/highfive/bits/h5p_wrapper.hpp | 3 ++- 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/include/highfive/H5PropertyList.hpp b/include/highfive/H5PropertyList.hpp index 53b3c4a13..0ca4e333b 100644 --- a/include/highfive/H5PropertyList.hpp +++ b/include/highfive/H5PropertyList.hpp @@ -229,6 +229,8 @@ class MPIOFileAccess { MPI_Info _info; }; + +#if H5_VERSION_GE(1, 10, 0) /// /// \brief Use collective MPI-IO for metadata read and write. /// @@ -306,6 +308,7 @@ class MPIOCollectiveMetadataWrite { bool collective_; }; +#endif #endif /// diff --git a/include/highfive/bits/H5PropertyList_misc.hpp b/include/highfive/bits/H5PropertyList_misc.hpp index 55aabe58f..1fa2101f2 100644 --- a/include/highfive/bits/H5PropertyList_misc.hpp +++ b/include/highfive/bits/H5PropertyList_misc.hpp @@ -167,6 +167,7 @@ inline void MPIOFileAccess::apply(const hid_t list) const { detail::h5p_set_fapl_mpio(list, _comm, _info); } +#if H5_VERSION_GE(1, 10, 0) inline void MPIOCollectiveMetadata::apply(const hid_t plist) const { auto read = MPIOCollectiveMetadataRead{collective_read_}; auto write = MPIOCollectiveMetadataWrite{collective_write_}; @@ -224,6 +225,7 @@ inline MPIOCollectiveMetadataWrite::MPIOCollectiveMetadataWrite(bool collective) : collective_(collective) {} #endif +#endif inline FileVersionBounds::FileVersionBounds(H5F_libver_t low, H5F_libver_t high) : _low(low) diff --git a/include/highfive/bits/h5p_wrapper.hpp b/include/highfive/bits/h5p_wrapper.hpp index 821c2fbcd..fcfcbd456 100644 --- a/include/highfive/bits/h5p_wrapper.hpp +++ b/include/highfive/bits/h5p_wrapper.hpp @@ -95,6 +95,7 @@ inline herr_t h5p_set_fapl_mpio(hid_t fapl_id, MPI_Comm comm, MPI_Info info) { return err; } +#if H5_VERSION_GE(1, 10, 0) inline herr_t h5p_set_all_coll_metadata_ops(hid_t plist_id, hbool_t is_collective) { herr_t err = H5Pset_all_coll_metadata_ops(plist_id, is_collective); if (err < 0) { @@ -132,7 +133,7 @@ inline herr_t h5p_get_coll_metadata_write(hid_t plist_id, hbool_t* is_collective return err; } - +#endif #endif inline herr_t h5p_get_libver_bounds(hid_t plist_id, H5F_libver_t* low, H5F_libver_t* high) { From 9d50f3b619b2a9d7a6f06cba28d6fcf37b22be26 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 11 Jan 2024 15:03:47 +0100 Subject: [PATCH 24/97] Name ctor for empty property lists. (#904) Empty property lists have a valid HID, unlike default constructed properties which use H5P_DEFAULT. This only matters when interfacing with HDF5 directly. --- include/highfive/H5PropertyList.hpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/include/highfive/H5PropertyList.hpp b/include/highfive/H5PropertyList.hpp index 0ca4e333b..2368f5ca9 100644 --- a/include/highfive/H5PropertyList.hpp +++ b/include/highfive/H5PropertyList.hpp @@ -179,6 +179,18 @@ class PropertyList: public PropertyListBase { return static_cast&>(PropertyListBase::Default()); } + /// Return a property list created via a call to `H5Pcreate`. + /// + /// An empty property is needed when one wants `getId()` to immediately + /// point at a valid HID. This is important when interfacing directly with + /// HDF5 to set properties that haven't been wrapped by HighFive. + static PropertyList Empty() { + auto plist = PropertyList(); + plist._initializeIfNeeded(); + + return plist; + } + protected: void _initializeIfNeeded(); }; From 838321a7cdae7a45e59db35432140acf5d1a7dbc Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 12 Jan 2024 14:58:46 +0100 Subject: [PATCH 25/97] Fix permissions 'create_submodule_update_pr.sh'. (#909) --- .github/create_submodule_update_pr.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 .github/create_submodule_update_pr.sh diff --git a/.github/create_submodule_update_pr.sh b/.github/create_submodule_update_pr.sh old mode 100644 new mode 100755 From 0d604c85dd16927c86e4871a94e539be85ae1441 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 12 Jan 2024 15:03:39 +0100 Subject: [PATCH 26/97] 'inspector` guard for empty containers. (#913) --- include/highfive/bits/H5Inspector_misc.hpp | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 54bac1b0d..1613f87c3 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -385,19 +385,21 @@ struct inspector> { } static hdf5_type* data(type& val) { - return inspector::data(val[0]); + return val.empty() ? nullptr : inspector::data(val[0]); } static const hdf5_type* data(const type& val) { - return inspector::data(val[0]); + return val.empty() ? nullptr : inspector::data(val[0]); } template static void serialize(const type& val, It m) { - size_t subsize = inspector::getSizeVal(val[0]); - for (auto&& e: val) { - inspector::serialize(e, m); - m += subsize; + if (!val.empty()) { + size_t subsize = inspector::getSizeVal(val[0]); + for (auto&& e: val) { + inspector::serialize(e, m); + m += subsize; + } } } From fd663cc8b2aedc12b2eb894241875fe1c85c8104 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 12 Jan 2024 16:23:02 +0100 Subject: [PATCH 27/97] Avoid non-collective behaviour. (#912) --- include/highfive/bits/H5Slice_traits_misc.hpp | 12 --- tests/unit/tests_high_five_parallel.cpp | 77 +++++++++++++++++++ 2 files changed, 77 insertions(+), 12 deletions(-) diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 4dfb1ea5f..dd7e45b1d 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -185,14 +185,6 @@ inline void SliceTraits::read(T& array, const DataTransferProps& xfer_ } auto dims = mem_space.getDimensions(); - if (mem_space.getElementCount() == 0) { - auto effective_dims = details::squeezeDimensions(dims, - details::inspector::recursive_ndim); - - details::inspector::prepare(array, effective_dims); - return; - } - auto r = details::data_converter::get_reader(dims, array, file_datatype); read(r.getPointer(), buffer_info.data_type, xfer_props); // re-arrange results @@ -250,10 +242,6 @@ inline void SliceTraits::write(const T& buffer, const DataTransferProp const auto& slice = static_cast(*this); const DataSpace& mem_space = slice.getMemSpace(); - if (mem_space.getElementCount() == 0) { - return; - } - auto file_datatype = slice.getDataType(); const details::BufferInfo buffer_info( diff --git a/tests/unit/tests_high_five_parallel.cpp b/tests/unit/tests_high_five_parallel.cpp index 8b096205e..e2d7f948e 100644 --- a/tests/unit/tests_high_five_parallel.cpp +++ b/tests/unit/tests_high_five_parallel.cpp @@ -19,6 +19,7 @@ #include #include "tests_high_five.hpp" +#include "data_generator.hpp" using namespace HighFive; @@ -152,6 +153,82 @@ TEMPLATE_LIST_TEST_CASE("mpiSelectionArraySimpleCollectiveMD", "[template]", num } +TEST_CASE("ReadWriteHalfEmptyDatasets") { + int mpi_rank = -1; + MPI_Comm mpi_comm = MPI_COMM_WORLD; + MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); + + std::string filename = "rw_collective_some_empty.h5"; + std::string dset_name = "dset"; + + using container_t = std::vector>; + using traits = testing::ContainerTraits; + + auto dims = std::vector{5ul, 7ul}; + auto values = testing::DataGenerator::create(dims); + + if (mpi_rank == 0) { + auto file = HighFive::File(filename, HighFive::File::Truncate); + file.createDataSet(dset_name, values); + } + + MPI_Barrier(mpi_comm); + + bool collective_metadata = true; + bool collective_transfer = true; + + HighFive::FileAccessProps fapl; + fapl.add(HighFive::MPIOFileAccess{MPI_COMM_WORLD, MPI_INFO_NULL}); + fapl.add(HighFive::MPIOCollectiveMetadata{collective_metadata}); + + auto file = HighFive::File(filename, HighFive::File::Truncate, fapl); + file.createDataSet(dset_name, values); + auto dset = file.getDataSet(dset_name); + + HighFive::DataTransferProps dxpl; + dxpl.add(HighFive::UseCollectiveIO{collective_transfer}); + + auto hyperslab = HighFive::HyperSlab(); + auto subdims = std::vector(2, 0ul); + + if (mpi_rank == 0) { + subdims = std::vector{2ul, 4ul}; + hyperslab |= HighFive::RegularHyperSlab({0ul, 0ul}, subdims); + } + + SECTION("read") { + auto subvalues = dset.select(hyperslab, DataSpace(subdims)).template read(); + + for (size_t i = 0; i < subdims[0]; ++i) { + for (size_t j = 0; j < subdims[1]; ++j) { + REQUIRE(traits::get(subvalues, {i, j}) == traits::get(values, {i, j})); + } + } + } + + SECTION("write") { + auto subvalues = + testing::DataGenerator::create(subdims, [](const std::vector& d) { + auto default_values = testing::DefaultValues(); + return -1000.0 + default_values(d); + }); + dset.select(hyperslab, DataSpace(subdims)).write(subvalues, dxpl); + + MPI_Barrier(mpi_comm); + + if (mpi_rank == 0) { + auto modified_values = dset.read(); + + for (size_t i = 0; i < subdims[0]; ++i) { + for (size_t j = 0; j < subdims[1]; ++j) { + REQUIRE(traits::get(subvalues, {i, j}) == traits::get(modified_values, {i, j})); + } + } + } + } +} + + int main(int argc, char* argv[]) { MpiFixture mpi(argc, argv); From 1dd8664b3e6d3ab686946787a08430414fc1bd91 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 15 Jan 2024 10:28:23 +0100 Subject: [PATCH 28/97] Remove debug output. (#915) --- tests/unit/data_generator.hpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp index f0b0d2625..9a6712d53 100644 --- a/tests/unit/data_generator.hpp +++ b/tests/unit/data_generator.hpp @@ -409,9 +409,7 @@ class DataGenerator { template static container_type create(const std::vector& dims, F f) { - std::cout << "allocate " << std::endl; auto array = allocate(dims); - std::cout << "initialize " << std::endl; initialize(array, dims, f); return array; From 1299c556a992bea8aa03c3b75cca50560a5078b6 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 15 Jan 2024 10:28:32 +0100 Subject: [PATCH 29/97] Fix error message. (#914) --- include/highfive/bits/H5Attribute_misc.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 6ec780433..33295d40e 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -68,7 +68,7 @@ inline void Attribute::read(T& array) const { if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { std::ostringstream ss; - ss << "Impossible to read DataSet of dimensions " << mem_space.getNumberDimensions() + ss << "Impossible to read Attribute of dimensions " << mem_space.getNumberDimensions() << " into arrays of dimensions " << buffer_info.n_dimensions; throw DataSpaceException(ss.str()); } From ea2e0e3ddc829b62888c05d3890b83b466b92d1a Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Thu, 25 Jan 2024 11:08:09 +0100 Subject: [PATCH 30/97] Change the documentation badge for master (#925) * Change the documentation badge for master * Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3ea068015..e1c8fb1d1 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # HighFive - HDF5 header-only C++ Library -[![Doxygen -> gh-pages](https://github.com/BlueBrain/HighFive/workflows/gh-pages/badge.svg)](https://BlueBrain.github.io/HighFive) +[![Doxygen -> gh-pages](https://github.com/BlueBrain/HighFive/workflows/gh-pages/badge.svg?branch=master)](https://BlueBrain.github.io/HighFive/actions/workflows/gh-pages.yml?query=branch%3Amaster) [![codecov](https://codecov.io/gh/BlueBrain/HighFive/branch/master/graph/badge.svg?token=UBKxHEn7RS)](https://codecov.io/gh/BlueBrain/HighFive) [![HighFive_Integration_tests](https://github.com/BlueBrain/HighFive-testing/actions/workflows/integration.yml/badge.svg)](https://github.com/BlueBrain/HighFive-testing/actions/workflows/integration.yml) From 0d0259e823a0e8aee2f036ba738c703ac4a0721c Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 25 Jan 2024 11:42:57 +0100 Subject: [PATCH 31/97] Prepare `v2.9.0`. (#924) --- CHANGELOG.md | 20 ++++++++++++++++++++ CMakeLists.txt | 2 +- include/highfive/H5Version.hpp | 6 +++--- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a8cd8613..fcd0247e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,24 @@ # Changes +## Version 2.9.0 - 2024-01-25 +### New Features + - Add named ctors for scalar and null dataspaces. (#899) + - Named ctor for empty property lists. (#904) + +### Improvements + - Enable running tests in parallel. (#849) + - Wrap all used HDF5 function calls and always check status codes. (#863) + - Utilities for writing tests in a container independent manner. (#871) + - Improve test rigour. + +### Bug Fix + - Log messages were slightly misformatted. (#854) + - Fix bug in `enforce_ascii_hack`. (#856) + - Fix `create_datatype()`. (#869) + - Guard functionality introduced in 1.10.0. (#905) + - `inspector` guard for empty containers. (#913) + - Avoid non-collective behaviour. (#912) + + ## Version 2.8.0 - 2023-11-02 ### Important Change - `Eigen::Matrix` is (by default) stored with column-major index ordering. Under diff --git a/CMakeLists.txt b/CMakeLists.txt index d592f2d66..694960090 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,7 +5,7 @@ else() cmake_policy(VERSION 3.13) endif() -project(HighFive VERSION 2.8.0) +project(HighFive VERSION 2.9.0) configure_file(${CMAKE_CURRENT_SOURCE_DIR}/include/highfive/H5Version.hpp.in ${CMAKE_CURRENT_SOURCE_DIR}/include/highfive/H5Version.hpp) diff --git a/include/highfive/H5Version.hpp b/include/highfive/H5Version.hpp index dc238432c..bca2c3a83 100644 --- a/include/highfive/H5Version.hpp +++ b/include/highfive/H5Version.hpp @@ -9,7 +9,7 @@ #pragma once #define HIGHFIVE_VERSION_MAJOR 2 -#define HIGHFIVE_VERSION_MINOR 8 +#define HIGHFIVE_VERSION_MINOR 9 #define HIGHFIVE_VERSION_PATCH 0 /** \brief Concatenated representation of the HighFive version. @@ -24,10 +24,10 @@ * std::cout << STRINGIFY_VALUE(HIGHFIVE_VERSION) << "\n"; * \endcode */ -#define HIGHFIVE_VERSION 2.8.0 +#define HIGHFIVE_VERSION 2.9.0 /** \brief String representation of the HighFive version. * * \warning This macro only exists from 2.7.1 onwards. */ -#define HIGHFIVE_VERSION_STRING "2.8.0" +#define HIGHFIVE_VERSION_STRING "2.9.0" From 3ea8154d6d4551a8c94d4fcb5875346b1314cdad Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 29 Jan 2024 09:51:17 +0100 Subject: [PATCH 32/97] Remove unused specialization for createDataSet. (#927) This was likely needed to hide the bug that the specialization for `create_datatype` was missing. See, commit 9415a7c82. --- include/highfive/bits/H5Node_traits.hpp | 15 +------------ include/highfive/bits/H5Node_traits_misc.hpp | 23 +------------------- 2 files changed, 2 insertions(+), 36 deletions(-) diff --git a/include/highfive/bits/H5Node_traits.hpp b/include/highfive/bits/H5Node_traits.hpp index 493749bee..05e923a3b 100644 --- a/include/highfive/bits/H5Node_traits.hpp +++ b/include/highfive/bits/H5Node_traits.hpp @@ -53,20 +53,7 @@ class NodeTraits { /// \param accessProps A property list with data set access properties /// \param parents Create intermediate groups if needed. Default: true. /// \return DataSet Object - template ::base_type, details::Boolean>::value, - int>::type* = nullptr> - DataSet createDataSet(const std::string& dataset_name, - const DataSpace& space, - const DataSetCreateProps& createProps = DataSetCreateProps::Default(), - const DataSetAccessProps& accessProps = DataSetAccessProps::Default(), - bool parents = true); - - template ::base_type, details::Boolean>::value, - int>::type* = nullptr> + template DataSet createDataSet(const std::string& dataset_name, const DataSpace& space, const DataSetCreateProps& createProps = DataSetCreateProps::Default(), diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index b09bc3190..2db2b2aab 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -52,28 +52,7 @@ inline DataSet NodeTraits::createDataSet(const std::string& dataset_na } template -template ::base_type, details::Boolean>::value, - int>::type*> -inline DataSet NodeTraits::createDataSet(const std::string& dataset_name, - const DataSpace& space, - const DataSetCreateProps& createProps, - const DataSetAccessProps& accessProps, - bool parents) { - return createDataSet(dataset_name, - space, - create_and_check_datatype::base_type>(), - createProps, - accessProps, - parents); -} - -template -template ::base_type, details::Boolean>::value, - int>::type*> +template inline DataSet NodeTraits::createDataSet(const std::string& dataset_name, const DataSpace& space, const DataSetCreateProps& createProps, From fc9f3c8fba1f6961f1693e4d9980d74dad8cb7ed Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 30 Jan 2024 09:57:53 +0100 Subject: [PATCH 33/97] Update documentation: top-level header. (#931) --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e1c8fb1d1..bc0d2752e 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,8 @@ std::string filename = "/tmp/new_file.h5"; } ``` -**Note:** `H5File.hpp` is the top-level header of HighFive core which should be always included. +**Note:** As of 2.8.0, one can use `highfive/highfive.hpp` to include +everything HighFive. Prior to 2.8.0 one would include `highfive/H5File.hpp`. **Note:** For advanced usecases the dataset can be created without immediately writing to it. This is common in MPI-IO related patterns, or when growing a From 8c48479c4ddc3aad66f204c02dce98f0171b70cc Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 30 Jan 2024 09:58:59 +0100 Subject: [PATCH 34/97] Refactor BufferInfo::Operation as enum class. (#922) --- include/highfive/bits/H5Attribute_misc.hpp | 4 ++-- include/highfive/bits/H5ReadWrite_misc.hpp | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 33295d40e..cc235b500 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -64,7 +64,7 @@ inline void Attribute::read(T& array) const { const details::BufferInfo buffer_info( file_datatype, [this]() -> std::string { return this->getName(); }, - details::BufferInfo::read); + details::BufferInfo::Operation::read); if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { std::ostringstream ss; @@ -130,7 +130,7 @@ inline void Attribute::write(const T& buffer) { const details::BufferInfo buffer_info( file_datatype, [this]() -> std::string { return this->getName(); }, - details::BufferInfo::write); + details::BufferInfo::Operation::write); if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { std::ostringstream ss; diff --git a/include/highfive/bits/H5ReadWrite_misc.hpp b/include/highfive/bits/H5ReadWrite_misc.hpp index 4f6f15788..30c17961c 100644 --- a/include/highfive/bits/H5ReadWrite_misc.hpp +++ b/include/highfive/bits/H5ReadWrite_misc.hpp @@ -51,7 +51,7 @@ struct BufferInfo { using char_array_t = typename details::type_char_array::type; static constexpr bool is_char_array = details::type_char_array::is_char_array; - enum Operation { read, write }; + enum class Operation { read, write }; const Operation op; template @@ -145,12 +145,12 @@ BufferInfo::BufferInfo(const DataType& dtype, F getName, Operation _op) data_type.string() + " -> " + dtype.string()); } else if ((dtype.getClass() & data_type.getClass()) == DataTypeClass::Float) { HIGHFIVE_LOG_WARN_IF( - (op == read) && (dtype.getSize() > data_type.getSize()), + (op == Operation::read) && (dtype.getSize() > data_type.getSize()), getName() + "\": hdf5 dataset has higher floating point precision than data on read: " + dtype.string() + " -> " + data_type.string()); HIGHFIVE_LOG_WARN_IF( - (op == write) && (dtype.getSize() < data_type.getSize()), + (op == Operation::write) && (dtype.getSize() < data_type.getSize()), getName() + "\": data has higher floating point precision than hdf5 dataset on write: " + data_type.string() + " -> " + dtype.string()); From cd302e934795ce1c7a7f396d3fbdefe22f3a15ce Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 30 Jan 2024 10:05:03 +0100 Subject: [PATCH 35/97] Fix `inspector`. (#929) Adds `prepare` and `data` (non-cost) methods to `inspector`. This is needed to serialize a `int a[][] = {{1, 2, 3}, {4, 5, 6}};` without the `read(T*, ...)` overload. --- include/highfive/bits/H5Inspector_misc.hpp | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 1613f87c3..8cbb11c37 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -591,6 +591,21 @@ struct inspector { static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_copyable; + static void prepare(type& val, const std::vector& dims) { + if (dims.size() < 1) { + throw DataSpaceException("Invalid 'dims', must be at least 1 dimensional."); + } + + if (dims[0] != N) { + throw DataSpaceException("Dimensions mismatch."); + } + + std::vector next_dims(dims.begin() + 1, dims.end()); + for (size_t i = 0; i < dims[0]; ++i) { + inspector::prepare(val[i], next_dims); + } + } + static size_t getSizeVal(const type& val) { return compute_total_size(getDimensions(val)); } @@ -608,6 +623,10 @@ struct inspector { return inspector::data(val[0]); } + static hdf5_type* data(type& val) { + return inspector::data(val[0]); + } + /* it works because there is only T[][][] currently we will fix it one day */ static void serialize(const type& val, hdf5_type* m) { From 0c6cc44e5ad2a9aec9e57fe9679d2a4f20ef7e1a Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 30 Jan 2024 15:02:40 +0100 Subject: [PATCH 36/97] Deprecate `FixedLenStringArray`. (#932) --- doc/Doxyfile | 1 + doc/migration_guide.md | 16 ++ include/highfive/H5DataType.hpp | 6 + include/highfive/bits/H5DataType_misc.hpp | 6 +- include/highfive/bits/H5Inspector_misc.hpp | 6 +- include/highfive/bits/H5Node_traits.hpp | 3 +- include/highfive/bits/H5Node_traits_misc.hpp | 2 +- include/highfive/bits/H5Utils.hpp | 2 + include/highfive/bits/H5_definitions.hpp | 11 +- src/examples/read_write_fixedlen_string.cpp | 42 ----- tests/unit/CMakeLists.txt | 2 + tests/unit/deprecated/CMakeLists.txt | 10 + .../test_fixed_len_string_array.cpp | 172 ++++++++++++++++++ tests/unit/tests_high_five_base.cpp | 156 ---------------- 14 files changed, 228 insertions(+), 207 deletions(-) create mode 100644 doc/migration_guide.md delete mode 100644 src/examples/read_write_fixedlen_string.cpp create mode 100644 tests/unit/deprecated/CMakeLists.txt create mode 100644 tests/unit/deprecated/test_fixed_len_string_array.cpp diff --git a/doc/Doxyfile b/doc/Doxyfile index 6ebc393ec..d0cf7efb1 100644 --- a/doc/Doxyfile +++ b/doc/Doxyfile @@ -866,6 +866,7 @@ WARN_LOGFILE = INPUT = @CMAKE_CURRENT_SOURCE_DIR@/../include \ @CMAKE_CURRENT_SOURCE_DIR@/installation.md \ + @CMAKE_CURRENT_SOURCE_DIR@/migration_guide.md \ @CMAKE_CURRENT_SOURCE_DIR@/developer_guide.md \ @CMAKE_CURRENT_SOURCE_DIR@/../CHANGELOG.md \ @CMAKE_CURRENT_SOURCE_DIR@/../README.md diff --git a/doc/migration_guide.md b/doc/migration_guide.md new file mode 100644 index 000000000..e85002b15 --- /dev/null +++ b/doc/migration_guide.md @@ -0,0 +1,16 @@ +# Migration Guide +A collection of tips for migrating away from deprecated features. + +## Deprecation of `FixedLenStringArray`. +The issue with `FixedLenStringArray` is that it is unable to avoid copies. +Essentially, this class acts as a means to create a copy of the data in a +format suitable for writing fixed-length strings. Additionally, the class acts +as a tag for HighFive to overload on. The support of `std::string` in HighFive +has improved considerable. Since 2.8.0 we can write/read `std::string` to fixed +or variable length HDF5 strings. + +Therefore, this class serves no purpose anymore. Any occurrence of it can be +replaced with an `std::vector` (for example). + +If desired one can silence warnings by replacing `FixedLenStringArray` with +`deprecated::FixedLenStringArray`. diff --git a/include/highfive/H5DataType.hpp b/include/highfive/H5DataType.hpp index 0d596965f..b15f62165 100644 --- a/include/highfive/H5DataType.hpp +++ b/include/highfive/H5DataType.hpp @@ -342,6 +342,7 @@ template DataType create_and_check_datatype(); +namespace deprecated { /// /// \brief A structure representing a set of fixed-length strings /// @@ -460,6 +461,11 @@ class FixedLenStringArray { private: vector_t datavec; }; +} // namespace deprecated + +template +using FixedLenStringArray H5_DEPRECATED_USING("Use 'std::vector'.") = + deprecated::FixedLenStringArray; } // namespace HighFive diff --git a/include/highfive/bits/H5DataType_misc.hpp b/include/highfive/bits/H5DataType_misc.hpp index e29c99b0e..619e51e71 100644 --- a/include/highfive/bits/H5DataType_misc.hpp +++ b/include/highfive/bits/H5DataType_misc.hpp @@ -207,7 +207,7 @@ class AtomicType: public DataType { }; template -class AtomicType>: public DataType { +class AtomicType>: public DataType { public: inline AtomicType() : DataType(create_string(StrLen)) {} @@ -239,8 +239,7 @@ AtomicType::AtomicType() { } -// class FixedLenStringArray - +namespace deprecated { template inline FixedLenStringArray::FixedLenStringArray(const char array[][N], std::size_t length) { datavec.resize(length); @@ -283,6 +282,7 @@ template inline std::string FixedLenStringArray::getString(std::size_t i) const { return std::string(datavec[i].data()); } +} // namespace deprecated // Internal // Reference mapping diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 8cbb11c37..7ae90d84f 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -289,10 +289,10 @@ struct inspector: type_helper { }; template -struct inspector> { - using type = FixedLenStringArray; +struct inspector> { + using type = deprecated::FixedLenStringArray; using value_type = char*; - using base_type = FixedLenStringArray; + using base_type = deprecated::FixedLenStringArray; using hdf5_type = char; static constexpr size_t ndim = 1; diff --git a/include/highfive/bits/H5Node_traits.hpp b/include/highfive/bits/H5Node_traits.hpp index 05e923a3b..6f4a93ce6 100644 --- a/include/highfive/bits/H5Node_traits.hpp +++ b/include/highfive/bits/H5Node_traits.hpp @@ -79,8 +79,9 @@ class NodeTraits { template + H5_DEPRECATED("Use 'std::vector'.") DataSet createDataSet(const std::string& dataset_name, - const FixedLenStringArray& data, + const deprecated::FixedLenStringArray& data, const DataSetCreateProps& createProps = DataSetCreateProps::Default(), const DataSetAccessProps& accessProps = DataSetAccessProps::Default(), bool parents = true); diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index 2db2b2aab..a98600598 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -83,7 +83,7 @@ inline DataSet NodeTraits::createDataSet(const std::string& dataset_na template template inline DataSet NodeTraits::createDataSet(const std::string& dataset_name, - const FixedLenStringArray& data, + const deprecated::FixedLenStringArray& data, const DataSetCreateProps& createProps, const DataSetAccessProps& accessProps, bool parents) { diff --git a/include/highfive/bits/H5Utils.hpp b/include/highfive/bits/H5Utils.hpp index 2d9d24f88..b3f039e20 100644 --- a/include/highfive/bits/H5Utils.hpp +++ b/include/highfive/bits/H5Utils.hpp @@ -25,9 +25,11 @@ namespace HighFive { +namespace deprecated { // If ever used, recognize dimensions of FixedLenStringArray template class FixedLenStringArray; +} // namespace deprecated namespace details { // converter function for hsize_t -> size_t when hsize_t != size_t diff --git a/include/highfive/bits/H5_definitions.hpp b/include/highfive/bits/H5_definitions.hpp index 746723c88..ad4b95af2 100644 --- a/include/highfive/bits/H5_definitions.hpp +++ b/include/highfive/bits/H5_definitions.hpp @@ -5,10 +5,17 @@ #elif defined(_MSC_VER) #define H5_DEPRECATED(msg) __declspec(deprecated(#msg)) #else -#pragma message("WARNING: Compiler doesnt support deprecation") +#pragma message("WARNING: Compiler doesn't support deprecation") #define H5_DEPRECATED(msg) #endif +#if defined(__GNUC__) || defined(__clang__) +#define H5_DEPRECATED_USING(msg) H5_DEPRECATED((msg)) +#else +#pragma message("WARNING: Compiler doesn't support deprecating using statements.") +#define H5_DEPRECATED_USING(msg) +#endif + // Forward declarations @@ -38,8 +45,10 @@ class AtomicType; template class AnnotateTraits; +namespace deprecated { template class FixedLenStringArray; +} template class NodeTraits; diff --git a/src/examples/read_write_fixedlen_string.cpp b/src/examples/read_write_fixedlen_string.cpp deleted file mode 100644 index 60589637e..000000000 --- a/src/examples/read_write_fixedlen_string.cpp +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c), 2020, Blue Brain Project - * - * Distributed under the Boost Software License, Version 1.0. - * (See accompanying file LICENSE_1_0.txt or copy at - * http://www.boost.org/LICENSE_1_0.txt) - * - */ -#include -#include - -#include - -using namespace HighFive; - -// This examples shows how compile time constant strings work. -// -// Note, that as of version 2.8.0., writing `std::string` as fixed-length -// strings there's a simpler API. -int main() { - // Create a new file using the default property lists. - File file("create_dataset_string_example.h5", File::Truncate); - const char strings_fixed[][16] = {"abcabcabcabcabc", "123123123123123"}; - - // create a dataset ready to contains strings of the size of the vector - file.createDataSet("ds1", DataSpace(2)).write(strings_fixed); - - // Without specific type info this will create an int8 dataset - file.createDataSet("ds2", strings_fixed); - - // Now test the new interface type - FixedLenStringArray<10> arr{"0000000", "1111111"}; - auto ds = file.createDataSet("ds3", arr); - - // Read back truncating to 4 chars - FixedLenStringArray<4> array_back; - ds.read(array_back); - std::cout << "First item is '" << array_back[0] << "'\n" - << "Second item is '" << array_back[1] << "'\n"; - - return 0; -} diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index b8943067f..2f01bdd81 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -52,3 +52,5 @@ if(HIGHFIVE_TEST_SINGLE_INCLUDES) target_link_libraries("tests_include_${CLASS_NAME}" HighFive HighFiveWarnings) endforeach() endif() + +add_subdirectory(deprecated) diff --git a/tests/unit/deprecated/CMakeLists.txt b/tests/unit/deprecated/CMakeLists.txt new file mode 100644 index 000000000..5e515374b --- /dev/null +++ b/tests/unit/deprecated/CMakeLists.txt @@ -0,0 +1,10 @@ +foreach(test_name test_fixed_len_string_array) + add_executable(${test_name} "${test_name}.cpp") + + target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) + catch_discover_tests(${test_name}) + + if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU") + target_compile_options(${test_name} PRIVATE -Wno-deprecated-declarations) + endif() +endforeach() diff --git a/tests/unit/deprecated/test_fixed_len_string_array.cpp b/tests/unit/deprecated/test_fixed_len_string_array.cpp new file mode 100644 index 000000000..1d0c33aaa --- /dev/null +++ b/tests/unit/deprecated/test_fixed_len_string_array.cpp @@ -0,0 +1,172 @@ +#include + +#include +#include "../tests_high_five.hpp" + +namespace HighFive { + +TEST_CASE("HighFiveFixedLenStringArray") { + const std::string file_name("fixed_len_string_array.h5"); + + // Create a new file using the default property lists. + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + { // Dedicated FixedLenStringArray (now deprecated). + FixedLenStringArray<10> arr{"0000000", "1111111"}; + + // More API: test inserting something + arr.push_back("2222"); + auto ds = file.createDataSet("ds7", arr); // Short syntax ok + + // Recover truncating + FixedLenStringArray<4> array_back; + ds.read(array_back); + CHECK(array_back.size() == 3); + CHECK(array_back[0] == std::string("000")); + CHECK(array_back[1] == std::string("111")); + CHECK(array_back[2] == std::string("222")); + CHECK(array_back.getString(1) == "111"); + CHECK(array_back.front() == std::string("000")); + CHECK(array_back.back() == std::string("222")); + CHECK(array_back.data() == std::string("000")); + array_back.data()[0] = 'x'; + CHECK(array_back.data() == std::string("x00")); + + for (auto& raw_elem: array_back) { + raw_elem[1] = 'y'; + } + CHECK(array_back.getString(1) == "1y1"); + for (auto iter = array_back.cbegin(); iter != array_back.cend(); ++iter) { + CHECK((*iter)[1] == 'y'); + } + } +} + +template +static void check_fixed_len_string_array_contents(const FixedLenStringArray& array, + const std::vector& expected) { + REQUIRE(array.size() == expected.size()); + + for (size_t i = 0; i < array.size(); ++i) { + CHECK(array[i] == expected[i]); + } +} + + +TEST_CASE("HighFiveFixedLenStringArrayStructure") { + using fixed_array_t = FixedLenStringArray<10>; + // increment the characters of a string written in a std::array + auto increment_string = [](const fixed_array_t::value_type arr) { + fixed_array_t::value_type output(arr); + for (auto& c: output) { + if (c == 0) { + break; + } + ++c; + } + return output; + }; + + SECTION("create from std::vector (onpoint)") { + auto expected = std::vector{"000", "111"}; + auto actual = FixedLenStringArray<4>(expected); + check_fixed_len_string_array_contents(actual, expected); + } + + SECTION("create from std::vector (oversized)") { + auto expected = std::vector{"000", "111"}; + auto actual = FixedLenStringArray<8>(expected); + check_fixed_len_string_array_contents(actual, expected); + } + + SECTION("create from pointers (onpoint)") { + auto expected = std::vector{"000", "111"}; + auto actual = FixedLenStringArray<4>(expected.data(), expected.data() + expected.size()); + check_fixed_len_string_array_contents(actual, expected); + } + + SECTION("create from pointers (oversized)") { + auto expected = std::vector{"000", "111"}; + auto actual = FixedLenStringArray<8>(expected.data(), expected.data() + expected.size()); + check_fixed_len_string_array_contents(actual, expected); + } + + + SECTION("create from std::initializer_list (onpoint)") { + auto expected = std::vector{"000", "111"}; + auto actual = FixedLenStringArray<4>{"000", "111"}; + check_fixed_len_string_array_contents(actual, expected); + } + + SECTION("create from std::initializer_list (oversized)") { + auto expected = std::vector{"000", "111"}; + auto actual = FixedLenStringArray<8>{"000", "111"}; + check_fixed_len_string_array_contents(actual, expected); + } + + // manipulate FixedLenStringArray with std::copy + SECTION("compatible with std::copy") { + const fixed_array_t arr1{"0000000", "1111111"}; + fixed_array_t arr2{"0000000", "1111111"}; + std::copy(arr1.begin(), arr1.end(), std::back_inserter(arr2)); + CHECK(arr2.size() == 4); + } + + SECTION("compatible with std::transform") { + fixed_array_t arr; + { + const fixed_array_t arr1{"0000000", "1111111"}; + std::transform(arr1.begin(), arr1.end(), std::back_inserter(arr), increment_string); + } + CHECK(arr.size() == 2); + CHECK(arr[0] == std::string("1111111")); + CHECK(arr[1] == std::string("2222222")); + } + + SECTION("compatible with std::transform (reverse iterator)") { + fixed_array_t arr; + { + const fixed_array_t arr1{"0000000", "1111111"}; + std::copy(arr1.rbegin(), arr1.rend(), std::back_inserter(arr)); + } + CHECK(arr.size() == 2); + CHECK(arr[0] == std::string("1111111")); + CHECK(arr[1] == std::string("0000000")); + } + + SECTION("compatible with std::remove_copy_if") { + fixed_array_t arr2; + { + const fixed_array_t arr1{"0000000", "1111111"}; + std::remove_copy_if(arr1.begin(), + arr1.end(), + std::back_inserter(arr2), + [](const fixed_array_t::value_type& s) { + return std::strncmp(s.data(), "1111111", 7) == 0; + }); + } + CHECK(arr2.size() == 1); + CHECK(arr2[0] == std::string("0000000")); + } +} + +TEST_CASE("HighFiveFixedLenStringArrayAttribute") { + const std::string file_name("fixed_array_attr.h5"); + // Create a new file using the default property lists. + { + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + FixedLenStringArray<10> arr{"Hello", "world"}; + file.createAttribute("str", arr); + } + // Re-read it + { + File file(file_name); + FixedLenStringArray<8> arr; // notice the output strings can be smaller + file.getAttribute("str").read(arr); + CHECK(arr.size() == 2); + CHECK(arr[0] == std::string("Hello")); + CHECK(arr[1] == std::string("world")); + } +} + +} // namespace HighFive diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 163535b55..fefdcdd55 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -2405,36 +2405,6 @@ TEST_CASE("HighFiveFixedString") { file.createDataSet("ds6", DataSpace(1)).write(buffer); } - { // Dedicated FixedLenStringArray - FixedLenStringArray<10> arr{"0000000", "1111111"}; - - // More API: test inserting something - arr.push_back("2222"); - auto ds = file.createDataSet("ds7", arr); // Short syntax ok - - // Recover truncating - FixedLenStringArray<4> array_back; - ds.read(array_back); - CHECK(array_back.size() == 3); - CHECK(array_back[0] == std::string("000")); - CHECK(array_back[1] == std::string("111")); - CHECK(array_back[2] == std::string("222")); - CHECK(array_back.getString(1) == "111"); - CHECK(array_back.front() == std::string("000")); - CHECK(array_back.back() == std::string("222")); - CHECK(array_back.data() == std::string("000")); - array_back.data()[0] = 'x'; - CHECK(array_back.data() == std::string("x00")); - - for (auto& raw_elem: array_back) { - raw_elem[1] = 'y'; - } - CHECK(array_back.getString(1) == "1y1"); - for (auto iter = array_back.cbegin(); iter != array_back.cend(); ++iter) { - CHECK((*iter)[1] == 'y'); - } - } - { // Direct way of writing `std::string` as a fixed length // HDF5 string. @@ -2492,132 +2462,6 @@ TEST_CASE("HighFiveFixedString") { } } -template -static void check_fixed_len_string_array_contents(const FixedLenStringArray& array, - const std::vector& expected) { - REQUIRE(array.size() == expected.size()); - - for (size_t i = 0; i < array.size(); ++i) { - CHECK(array[i] == expected[i]); - } -} - -TEST_CASE("HighFiveFixedLenStringArrayStructure") { - using fixed_array_t = FixedLenStringArray<10>; - // increment the characters of a string written in a std::array - auto increment_string = [](const fixed_array_t::value_type arr) { - fixed_array_t::value_type output(arr); - for (auto& c: output) { - if (c == 0) { - break; - } - ++c; - } - return output; - }; - - SECTION("create from std::vector (onpoint)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<4>(expected); - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from std::vector (oversized)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<8>(expected); - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from pointers (onpoint)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<4>(expected.data(), expected.data() + expected.size()); - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from pointers (oversized)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<8>(expected.data(), expected.data() + expected.size()); - check_fixed_len_string_array_contents(actual, expected); - } - - - SECTION("create from std::initializer_list (onpoint)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<4>{"000", "111"}; - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from std::initializer_list (oversized)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<8>{"000", "111"}; - check_fixed_len_string_array_contents(actual, expected); - } - - // manipulate FixedLenStringArray with std::copy - SECTION("compatible with std::copy") { - const fixed_array_t arr1{"0000000", "1111111"}; - fixed_array_t arr2{"0000000", "1111111"}; - std::copy(arr1.begin(), arr1.end(), std::back_inserter(arr2)); - CHECK(arr2.size() == 4); - } - - SECTION("compatible with std::transform") { - fixed_array_t arr; - { - const fixed_array_t arr1{"0000000", "1111111"}; - std::transform(arr1.begin(), arr1.end(), std::back_inserter(arr), increment_string); - } - CHECK(arr.size() == 2); - CHECK(arr[0] == std::string("1111111")); - CHECK(arr[1] == std::string("2222222")); - } - - SECTION("compatible with std::transform (reverse iterator)") { - fixed_array_t arr; - { - const fixed_array_t arr1{"0000000", "1111111"}; - std::copy(arr1.rbegin(), arr1.rend(), std::back_inserter(arr)); - } - CHECK(arr.size() == 2); - CHECK(arr[0] == std::string("1111111")); - CHECK(arr[1] == std::string("0000000")); - } - - SECTION("compatible with std::remove_copy_if") { - fixed_array_t arr2; - { - const fixed_array_t arr1{"0000000", "1111111"}; - std::remove_copy_if(arr1.begin(), - arr1.end(), - std::back_inserter(arr2), - [](const fixed_array_t::value_type& s) { - return std::strncmp(s.data(), "1111111", 7) == 0; - }); - } - CHECK(arr2.size() == 1); - CHECK(arr2[0] == std::string("0000000")); - } -} - -TEST_CASE("HighFiveFixedLenStringArrayAttribute") { - const std::string file_name("fixed_array_attr.h5"); - // Create a new file using the default property lists. - { - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - FixedLenStringArray<10> arr{"Hello", "world"}; - file.createAttribute("str", arr); - } - // Re-read it - { - File file(file_name); - FixedLenStringArray<8> arr; // notice the output strings can be smaller - file.getAttribute("str").read(arr); - CHECK(arr.size() == 2); - CHECK(arr[0] == std::string("Hello")); - CHECK(arr[1] == std::string("world")); - } -} - TEST_CASE("HighFiveReference") { const std::string file_name("h5_ref_test.h5"); const std::string dataset1_name("dset1"); From d04789128123e1a643c6284e28087be1dafae2ee Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 30 Jan 2024 15:25:21 +0100 Subject: [PATCH 37/97] Internally, rename `dtype` to `file_data_type`. (#930) --- include/highfive/bits/H5ReadWrite_misc.hpp | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/include/highfive/bits/H5ReadWrite_misc.hpp b/include/highfive/bits/H5ReadWrite_misc.hpp index 30c17961c..05bb49888 100644 --- a/include/highfive/bits/H5ReadWrite_misc.hpp +++ b/include/highfive/bits/H5ReadWrite_misc.hpp @@ -131,29 +131,29 @@ struct string_type_checker { template template -BufferInfo::BufferInfo(const DataType& dtype, F getName, Operation _op) +BufferInfo::BufferInfo(const DataType& file_data_type, F getName, Operation _op) : op(_op) - , is_fixed_len_string(dtype.isFixedLenStr()) + , is_fixed_len_string(file_data_type.isFixedLenStr()) // In case we are using Fixed-len strings we need to subtract one dimension , n_dimensions(details::inspector::recursive_ndim - ((is_fixed_len_string && is_char_array) ? 1 : 0)) - , data_type( - string_type_checker::getDataType(create_datatype(), dtype)) { + , data_type(string_type_checker::getDataType(create_datatype(), + file_data_type)) { // We warn. In case they are really not convertible an exception will rise on read/write - if (dtype.getClass() != data_type.getClass()) { + if (file_data_type.getClass() != data_type.getClass()) { HIGHFIVE_LOG_WARN(getName() + "\": data and hdf5 dataset have different types: " + - data_type.string() + " -> " + dtype.string()); - } else if ((dtype.getClass() & data_type.getClass()) == DataTypeClass::Float) { + data_type.string() + " -> " + file_data_type.string()); + } else if ((file_data_type.getClass() & data_type.getClass()) == DataTypeClass::Float) { HIGHFIVE_LOG_WARN_IF( - (op == Operation::read) && (dtype.getSize() > data_type.getSize()), + (op == Operation::read) && (file_data_type.getSize() > data_type.getSize()), getName() + "\": hdf5 dataset has higher floating point precision than data on read: " + - dtype.string() + " -> " + data_type.string()); + file_data_type.string() + " -> " + data_type.string()); HIGHFIVE_LOG_WARN_IF( - (op == Operation::write) && (dtype.getSize() < data_type.getSize()), + (op == Operation::write) && (file_data_type.getSize() < data_type.getSize()), getName() + "\": data has higher floating point precision than hdf5 dataset on write: " + - data_type.string() + " -> " + dtype.string()); + data_type.string() + " -> " + file_data_type.string()); } } From b0b44556f305a97a6b105dfd2789a5a3897ddbd6 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 5 Feb 2024 08:41:52 +0100 Subject: [PATCH 38/97] Cosmetic changes. (#937) --- include/highfive/bits/H5Inspector_misc.hpp | 9 +++++---- tests/unit/tests_high_five_base.cpp | 8 ++++---- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 7ae90d84f..ac3872dda 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -77,7 +77,7 @@ inline std::vector squeezeDimensions(const std::vector& dims, if (n_dim_requested == 0) { if (!checkDimensions(dims, n_dim_requested)) { - throw std::invalid_argument(format_error_message()); + throw std::invalid_argument("Failed dimensions check: " + format_error_message()); } return {1ul}; @@ -85,7 +85,7 @@ inline std::vector squeezeDimensions(const std::vector& dims, auto n_dim = dims.size(); if (n_dim < n_dim_requested) { - throw std::invalid_argument(format_error_message()); + throw std::invalid_argument("Failed 'n_dim < n_dim_requested: " + format_error_message()); } if (n_dim_requested == 1ul) { @@ -95,7 +95,8 @@ inline std::vector squeezeDimensions(const std::vector& dims, if (non_singleton_dim == size_t(-1)) { non_singleton_dim = i; } else { - throw std::invalid_argument(format_error_message()); + throw std::invalid_argument("Failed one-dimensional: " + + format_error_message()); } } } @@ -106,7 +107,7 @@ inline std::vector squeezeDimensions(const std::vector& dims, size_t n_dim_excess = dims.size() - n_dim_requested; for (size_t i = 1; i <= n_dim_excess; ++i) { if (dims[n_dim - i] != 1) { - throw std::invalid_argument(format_error_message()); + throw std::invalid_argument("Failed stripping from back:" + format_error_message()); } } diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index fefdcdd55..c320f58c5 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -20,7 +20,6 @@ #include #include - #include #include #include @@ -907,8 +906,9 @@ TEST_CASE("HighFiveReadWriteShortcut") { const std::string dataset_name("dset"); std::vector vec; vec.resize(x_size); - for (unsigned i = 0; i < x_size; i++) + for (unsigned i = 0; i < x_size; i++) { vec[i] = i * 2; + } std::string at_contents("Contents of string"); int my_int = 3; std::vector> my_nested = {{1, 2}, {3, 4}}; @@ -945,7 +945,7 @@ TEST_CASE("HighFiveReadWriteShortcut") { } // Plain c arrays. 1D - { + SECTION("int-c-array") { int int_c_array[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; DataSet ds_int2 = file.createDataSet("/TmpCArrayInt", int_c_array); @@ -957,7 +957,7 @@ TEST_CASE("HighFiveReadWriteShortcut") { } // Plain c arrays. 2D - { + SECTION("char-c-array") { char char_c_2darray[][3] = {"aa", "bb", "cc", "12"}; DataSet ds_char2 = file.createDataSet("/TmpCArray2dchar", char_c_2darray); From 0435f09e4c9da993a713e99f39359aa804a017d0 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 5 Feb 2024 09:30:48 +0100 Subject: [PATCH 39/97] Add `read_raw` and prepare to deprecate `read(T*, ...)`. (#928) --- include/highfive/bits/H5Slice_traits.hpp | 34 ++++++++++++++++++- include/highfive/bits/H5Slice_traits_misc.hpp | 24 ++++++++++--- src/examples/read_write_raw_ptr.cpp | 2 +- tests/unit/tests_high_five_base.cpp | 4 +-- 4 files changed, 56 insertions(+), 8 deletions(-) diff --git a/include/highfive/bits/H5Slice_traits.hpp b/include/highfive/bits/H5Slice_traits.hpp index c753026c3..556683853 100644 --- a/include/highfive/bits/H5Slice_traits.hpp +++ b/include/highfive/bits/H5Slice_traits.hpp @@ -292,6 +292,7 @@ class SliceTraits { /// /// Read the entire dataset into a buffer + /// /// An exception is raised is if the numbers of dimension of the buffer and /// of the dataset are different. /// @@ -305,11 +306,13 @@ class SliceTraits { /// /// Read the entire dataset into a raw buffer /// + /// \deprecated Use `read_raw` instead. + /// /// No dimensionality checks will be performed, it is the user's /// responsibility to ensure that the right amount of space has been /// allocated. /// \param array: A buffer containing enough space for the data - /// \param dtype: The type of the data, in case it cannot be automatically guessed + /// \param dtype: The datatype of elements of the in memory buffer. /// \param xfer_props: Data Transfer properties template void read(T* array, @@ -319,6 +322,8 @@ class SliceTraits { /// /// Read the entire dataset into a raw buffer /// + /// \deprecated Use `read_raw` instead. + /// /// Same as `read(T*, const DataType&, const DataTransferProps&)`. However, /// this overload deduces the HDF5 datatype of the element of `array` from /// `T`. Note, that the file datatype is already fixed. @@ -328,6 +333,33 @@ class SliceTraits { template void read(T* array, const DataTransferProps& xfer_props = DataTransferProps()) const; + /// + /// Read the entire dataset into a raw buffer + /// + /// No dimensionality checks will be performed, it is the user's + /// responsibility to ensure that the right amount of space has been + /// allocated. + /// \param array: A buffer containing enough space for the data + /// \param dtype: The type of the data, in case it cannot be automatically guessed + /// \param xfer_props: Data Transfer properties + template + void read_raw(T* array, + const DataType& dtype, + const DataTransferProps& xfer_props = DataTransferProps()) const; + + /// + /// Read the entire dataset into a raw buffer + /// + /// Same as `read(T*, const DataType&, const DataTransferProps&)`. However, + /// this overload deduces the HDF5 datatype of the element of `array` from + /// `T`. Note, that the file datatype is already fixed. + /// + /// \param array: A buffer containing enough space for the data + /// \param xfer_props: Data Transfer properties + template + void read_raw(T* array, const DataTransferProps& xfer_props = DataTransferProps()) const; + + /// /// Write the integrality N-dimension buffer to this dataset /// An exception is raised is if the numbers of dimension of the buffer and diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index dd7e45b1d..88804f6d0 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -186,7 +186,7 @@ inline void SliceTraits::read(T& array, const DataTransferProps& xfer_ auto dims = mem_space.getDimensions(); auto r = details::data_converter::get_reader(dims, array, file_datatype); - read(r.getPointer(), buffer_info.data_type, xfer_props); + read_raw(r.getPointer(), buffer_info.data_type, xfer_props); // re-arrange results r.unserialize(array); @@ -207,12 +207,26 @@ inline void SliceTraits::read(T& array, const DataTransferProps& xfer_ } } - template template inline void SliceTraits::read(T* array, const DataType& mem_datatype, const DataTransferProps& xfer_props) const { + read_raw(array, mem_datatype, xfer_props); +} + +template +template +inline void SliceTraits::read(T* array, const DataTransferProps& xfer_props) const { + read_raw(array, xfer_props); +} + + +template +template +inline void SliceTraits::read_raw(T* array, + const DataType& mem_datatype, + const DataTransferProps& xfer_props) const { static_assert(!std::is_const::value, "read() requires a non-const structure to read data into"); @@ -226,13 +240,14 @@ inline void SliceTraits::read(T* array, static_cast(array)); } + template template -inline void SliceTraits::read(T* array, const DataTransferProps& xfer_props) const { +inline void SliceTraits::read_raw(T* array, const DataTransferProps& xfer_props) const { using element_type = typename details::inspector::base_type; const DataType& mem_datatype = create_and_check_datatype(); - read(array, mem_datatype, xfer_props); + read_raw(array, mem_datatype, xfer_props); } @@ -276,6 +291,7 @@ inline void SliceTraits::write_raw(const T* buffer, static_cast(buffer)); } + template template inline void SliceTraits::write_raw(const T* buffer, const DataTransferProps& xfer_props) { diff --git a/src/examples/read_write_raw_ptr.cpp b/src/examples/read_write_raw_ptr.cpp index b6cd9eda5..4c8b563cb 100644 --- a/src/examples/read_write_raw_ptr.cpp +++ b/src/examples/read_write_raw_ptr.cpp @@ -66,7 +66,7 @@ int main(void) { auto nd_array = std::vector(n_elements); // Finally, read into the memory by passing a raw pointer to the library. - dataset.read(nd_array.data()); + dataset.read_raw(nd_array.data()); } return 0; diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index c320f58c5..5b51b219f 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -2422,7 +2422,7 @@ TEST_CASE("HighFiveFixedString") { // Due to missing non-const overload of `data()` until C++17 we'll // read into something else instead (don't forget the '\0'). auto expected = std::vector(n_chars, '!'); - ds.read(expected.data(), datatype); + ds.read_raw(expected.data(), datatype); CHECK(expected.size() == value.size() + 1); for (size_t i = 0; i < value.size(); ++i) { @@ -2453,7 +2453,7 @@ TEST_CASE("HighFiveFixedString") { ds.write_raw(value.data(), datatype); auto expected = std::vector(value.size(), '-'); - ds.read(expected.data(), datatype); + ds.read_raw(expected.data(), datatype); CHECK(expected.size() == value.size()); for (size_t i = 0; i < value.size(); ++i) { From 5a95dd6859666280d03ab240507a439f4e0a281c Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 5 Feb 2024 09:43:04 +0100 Subject: [PATCH 40/97] Move strange test to `test_legacy.cpp`. (#936) This new groups of tests isolates tests the are counter-intuitive from the rest. This makes it easier to review them, and makes it clear that they're considered odd by someone else; and therefore might even be testing buggy behaviour. --- tests/unit/CMakeLists.txt | 2 +- tests/unit/test_legacy.cpp | 38 +++++++++++++++++++++++++++++ tests/unit/tests_high_five_base.cpp | 23 ----------------- 3 files changed, 39 insertions(+), 24 deletions(-) create mode 100644 tests/unit/test_legacy.cpp diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 2f01bdd81..a105e331e 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -7,7 +7,7 @@ if(MSVC) endif() ## Base tests -foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type) +foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_legacy) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) catch_discover_tests(${test_name}) diff --git a/tests/unit/test_legacy.cpp b/tests/unit/test_legacy.cpp new file mode 100644 index 000000000..7d7e67f26 --- /dev/null +++ b/tests/unit/test_legacy.cpp @@ -0,0 +1,38 @@ +// This file collects tests the require legacy behaviour of v2 (and older) to +// pass. Tests in this file could be bugs too. + +#include +#include +#include + +#include + +using namespace HighFive; + +TEST_CASE("HighFiveReadWriteConsts") { + // This test seems really strange. Essentially, it malloc's a 3**3 doubles. + // Then reinterpret_cast's the pointer to the first double (a `double *`) + // as a `double***`. And then uses `inspector` based code to write from the + // `double***`. + + const std::string file_name("3d_dataset_from_flat.h5"); + const std::string dataset_name("dset"); + const std::array DIMS{3, 3, 3}; + using datatype = int; + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + DataSpace dataspace = DataSpace(DIMS); + + DataSet dataset = file.createDataSet(dataset_name, dataspace); + std::vector const t1(DIMS[0] * DIMS[1] * DIMS[2], 1); + auto raw_3d_vec_const = reinterpret_cast(t1.data()); + dataset.write_raw(raw_3d_vec_const); + + std::vector>> result; + dataset.read(result); + for (const auto& vec2d: result) { + for (const auto& vec1d: vec2d) { + REQUIRE(vec1d == (std::vector{1, 1, 1})); + } + } +} diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 5b51b219f..c4c953ae1 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -2520,29 +2520,6 @@ TEST_CASE("HighFiveReference") { } } -TEST_CASE("HighFiveReadWriteConsts") { - const std::string file_name("3d_dataset_from_flat.h5"); - const std::string dataset_name("dset"); - const std::array DIMS{3, 3, 3}; - using datatype = int; - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - DataSpace dataspace = DataSpace(DIMS); - - DataSet dataset = file.createDataSet(dataset_name, dataspace); - std::vector const t1(DIMS[0] * DIMS[1] * DIMS[2], 1); - auto raw_3d_vec_const = reinterpret_cast(t1.data()); - dataset.write(raw_3d_vec_const); - - std::vector>> result; - dataset.read(result); - for (const auto& vec2d: result) { - for (const auto& vec1d: vec2d) { - REQUIRE(vec1d == (std::vector{1, 1, 1})); - } - } -} - TEST_CASE("HighFiveDataTypeClass") { auto Float = DataTypeClass::Float; auto String = DataTypeClass::String; From ceddd313deccde01ddadb3f5c90f6b22d6630127 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 6 Feb 2024 08:28:36 +0100 Subject: [PATCH 41/97] Add `Attribute::read_raw`. (#940) --- include/highfive/H5Attribute.hpp | 14 ++++++++++++-- include/highfive/bits/H5Attribute_misc.hpp | 14 ++++++++++++-- tests/unit/tests_high_five_base.cpp | 4 ++-- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/include/highfive/H5Attribute.hpp b/include/highfive/H5Attribute.hpp index 810d388ae..1af10cf04 100644 --- a/include/highfive/H5Attribute.hpp +++ b/include/highfive/H5Attribute.hpp @@ -113,6 +113,16 @@ class Attribute: public Object, public PathTraits { template void read(T& array) const; + /// \brief Read the attribute into a pre-allocated buffer. + /// \deprecated use `read(T&` or `read_raw`. + template + void read(T* array, const DataType& mem_datatype) const; + + /// \brief Read the attribute into a buffer. + /// \deprecated use `read(T&` or `read_raw`. + template + void read(T* array) const; + /// \brief Read the attribute into a pre-allocated buffer. /// \param array A pointer to the first byte of sufficient pre-allocated memory. /// \param mem_datatype The DataType of the array. @@ -132,7 +142,7 @@ class Attribute: public Object, public PathTraits { /// \endcode /// \since 2.2.2 template - void read(T* array, const DataType& mem_datatype) const; + void read_raw(T* array, const DataType& mem_datatype) const; /// \brief Read the attribute into a buffer. /// Behaves like Attribute::read(T*, const DataType&) const but @@ -154,7 +164,7 @@ class Attribute: public Object, public PathTraits { /// \endcode /// \since 2.2.2 template - void read(T* array) const; + void read_raw(T* array) const; /// \brief Write the value into the Attribute. /// diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index cc235b500..3b48c143a 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -83,7 +83,7 @@ inline void Attribute::read(T& array) const { } auto r = details::data_converter::get_reader(dims, array, file_datatype); - read(r.getPointer(), buffer_info.data_type); + read_raw(r.getPointer(), buffer_info.data_type); // re-arrange results r.unserialize(array); @@ -103,6 +103,11 @@ inline void Attribute::read(T& array) const { template inline void Attribute::read(T* array, const DataType& mem_datatype) const { + read_raw(array, mem_datatype); +} + +template +inline void Attribute::read_raw(T* array, const DataType& mem_datatype) const { static_assert(!std::is_const::value, "read() requires a non-const structure to read data into"); @@ -111,10 +116,15 @@ inline void Attribute::read(T* array, const DataType& mem_datatype) const { template inline void Attribute::read(T* array) const { + read_raw(array); +} + +template +inline void Attribute::read_raw(T* array) const { using element_type = typename details::inspector::base_type; const DataType& mem_datatype = create_and_check_datatype(); - read(array, mem_datatype); + read_raw(array, mem_datatype); } template diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index c4c953ae1..95a6e7d44 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -2113,7 +2113,7 @@ TEST_CASE("DirectWriteBool") { SECTION("WriteReadCycleAttribute") { auto attr = file.createAttribute("attr", dataspace, datatype); attr.write_raw(expected); - attr.read(actual); + attr.read_raw(actual); for (size_t i = 0; i < n; ++i) { REQUIRE(expected[i] == actual[i]); @@ -2123,7 +2123,7 @@ TEST_CASE("DirectWriteBool") { SECTION("WriteReadCycleDataSet") { auto dset = file.createAttribute("dset", dataspace, datatype); dset.write_raw(expected); - dset.read(actual); + dset.read_raw(actual); for (size_t i = 0; i < n; ++i) { REQUIRE(expected[i] == actual[i]); From 83b13287af30ddc3f7e90ebf88922ec7000c1520 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 7 Feb 2024 09:34:51 +0100 Subject: [PATCH 42/97] Migration instructions for `read_raw`. (#941) --- doc/migration_guide.md | 50 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/doc/migration_guide.md b/doc/migration_guide.md index e85002b15..c1d042b58 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -14,3 +14,53 @@ replaced with an `std::vector` (for example). If desired one can silence warnings by replacing `FixedLenStringArray` with `deprecated::FixedLenStringArray`. + +## Deprecation of `read(T*, ...)`. +A "raw read" is when the user allocates sufficient bytes and provides HighFive +with the pointer to the first byte. "Regular reads" take a detour via the +inspector and might resize the container, etc. + +The issue is that HighFive `v2` had the following two overloads: +``` +template +DataSet::read(T& x, /* skipped */); + +template +DataSet::read(T* x, /* skipped */); +``` +and the analogous for `Attribute`. + +The issue is that the second overload will also match things like `T**` and +`T[][]`. For example the following code used the removed overload: +``` +double x[2][3]; +dset.read(x); +``` +which is fine because is a contiguous sequence of doubles. It's equivalent to +following `v3` code: +``` +double x[2][3]; +dset.read_raw((double*) x); +``` + +### Accidental Raw Read +We consider the example above to be accidentally using a raw read, when it +could be performing a regular read. We suggest to not change the above, i.e. +``` +double x[2][3]; +dset.read(x); +``` +continues to be correct in `v3` and can check that the dimensions match. The +inspector recognizes `double[2][3]` as a contiguous array of doubles. +Therefore, it'll use the shallow-copy buffer and avoid the any additional +allocations or copies. + +### Intentional Raw Read +When genuinely performing a "raw read", one must replace `read` with +`read_raw`. For example: + +``` +double x = malloc(2*3 * sizeof(double)); +dset.read_raw(x); +``` +is correct in `v3`. From ff0e521821214181f992a8e83e1d0935d6a4de76 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 7 Feb 2024 10:04:13 +0100 Subject: [PATCH 43/97] Migration instruction for updating CMake code. (#943) --------- Co-authored-by: Matthias Wolf --- doc/migration_guide.md | 44 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/doc/migration_guide.md b/doc/migration_guide.md index c1d042b58..5276261e6 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -64,3 +64,47 @@ double x = malloc(2*3 * sizeof(double)); dset.read_raw(x); ``` is correct in `v3`. + +## Reworked CMake +In `v3` we completely rewrote the CMake code of HighFive. Since HighFive is a +header only library, it needs to perform two tasks: + +1. Copy the sources during installation. +2. Export a target that sets `-I ${HIGHFIVE_DIR}` and links with HDF5. + +We've removed all flags for optional dependencies, such as +`-DHIGHFIVE_USE_BOOST`. Instead user that want to read/write into/from +optionally supported containers, include a header with the corresponding name +and make sure to adjust their CMake code to link with the dependency. + +The C++ code should have: +``` +#include + +// Code the reads or write `boost::multi_array`. +``` +and the CMake code would have +``` +add_executable(app) + +# These lines might work, but depend on how exactly the user intends to use +# Boost. They are not specific to HighFive, but previously added automatically +# (and sometimes correctly) by HighFive. +find_package(Boost) +target_link_libraries(add PUBLIC boost::boost) + +# For HighFive there's two options for adding `-I ${HIGHFIVE_DIR}` and the +# flags for HDF5. +# +# Option 1: HighFive is install (systemwide) as a regular library: +find_package(HighFive) +target_link_libraries(app PUBLIC HighFive::HighFive) + +# Option 2: HighFive is vendored as part of the project: +add_subdirectory(third_party/HighFive) +target_link_libraries(app PUBLIC HighFive::HighFive) +``` + +There are extensive examples of project integration in `tests/cmake_integration`, +including how those projects in turn can be included in other projects. If these +examples don't help, please feel free to open an Issue. From cfe85576b7c23d889d5aa8d16090b9545407801f Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 7 Feb 2024 14:28:49 +0100 Subject: [PATCH 44/97] Migration instruction for `DataSpaceType`. (#944) --- doc/migration_guide.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/doc/migration_guide.md b/doc/migration_guide.md index 5276261e6..edd784a63 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -15,6 +15,7 @@ replaced with an `std::vector` (for example). If desired one can silence warnings by replacing `FixedLenStringArray` with `deprecated::FixedLenStringArray`. + ## Deprecation of `read(T*, ...)`. A "raw read" is when the user allocates sufficient bytes and provides HighFive with the pointer to the first byte. "Regular reads" take a detour via the @@ -108,3 +109,13 @@ target_link_libraries(app PUBLIC HighFive::HighFive) There are extensive examples of project integration in `tests/cmake_integration`, including how those projects in turn can be included in other projects. If these examples don't help, please feel free to open an Issue. + +## Type change `DataSpace::DataSpaceType`. +We've converted the `enum` `DataSpace::DataSpaceType` to an `enum class`. We've +added static `constexpr` members `dataspace_null` and `dataspace_scalar` to +`DataSpace`. This minimizes the risk of breaking user code. + +Note that objects of type `DataSpace::DataSpaceType` will no longer silently +convert to an integer. Including the two constants +`DataSpace::dataspace_{scalar,null}`. + From 7ba484914b601e7ec9a1cf0aae3ac68cda86bdae Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 8 Feb 2024 10:14:38 +0100 Subject: [PATCH 45/97] More migration to `read_raw`. (#945) --- include/highfive/h5easy_bits/H5Easy_Eigen.hpp | 4 ++-- include/highfive/h5easy_bits/H5Easy_opencv.hpp | 4 ++-- include/highfive/h5easy_bits/H5Easy_xtensor.hpp | 4 ++-- tests/unit/tests_high_five_base.cpp | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/include/highfive/h5easy_bits/H5Easy_Eigen.hpp b/include/highfive/h5easy_bits/H5Easy_Eigen.hpp index 5b5d3b9a5..a2e23bc0f 100644 --- a/include/highfive/h5easy_bits/H5Easy_Eigen.hpp +++ b/include/highfive/h5easy_bits/H5Easy_Eigen.hpp @@ -98,7 +98,7 @@ struct io_impl, T DataSet dataset = file.getDataSet(path); std::vector dims = shape(file, path, dataset, T::RowsAtCompileTime); T data(dims[0], dims[1]); - dataset.read(data.data()); + dataset.read_raw(data.data()); if (data.IsVectorAtCompileTime || data.IsRowMajor) { return data; } @@ -130,7 +130,7 @@ struct io_impl, T DataSpace dataspace = attribute.getSpace(); std::vector dims = shape(file, path, dataspace, T::RowsAtCompileTime); T data(dims[0], dims[1]); - attribute.read(data.data()); + attribute.read_raw(data.data()); if (data.IsVectorAtCompileTime || data.IsRowMajor) { return data; } diff --git a/include/highfive/h5easy_bits/H5Easy_opencv.hpp b/include/highfive/h5easy_bits/H5Easy_opencv.hpp index b640cd854..7be19f1e3 100644 --- a/include/highfive/h5easy_bits/H5Easy_opencv.hpp +++ b/include/highfive/h5easy_bits/H5Easy_opencv.hpp @@ -61,7 +61,7 @@ struct io_impl::value>::type> { DataSet dataset = file.getDataSet(path); std::vector dims = shape(file, path, dataset.getDimensions()); T data(dims[0], dims[1]); - dataset.read(reinterpret_cast(data.data)); + dataset.read_raw(reinterpret_cast(data.data)); return data; } @@ -89,7 +89,7 @@ struct io_impl::value>::type> { DataSpace dataspace = attribute.getSpace(); std::vector dims = shape(file, path, dataspace.getDimensions()); T data(dims[0], dims[1]); - attribute.read(reinterpret_cast(data.data)); + attribute.read_raw(reinterpret_cast(data.data)); return data; } }; diff --git a/include/highfive/h5easy_bits/H5Easy_xtensor.hpp b/include/highfive/h5easy_bits/H5Easy_xtensor.hpp index 6b0238c4d..9b737f03b 100644 --- a/include/highfive/h5easy_bits/H5Easy_xtensor.hpp +++ b/include/highfive/h5easy_bits/H5Easy_xtensor.hpp @@ -44,7 +44,7 @@ struct io_impl::value>::type> { DataSet dataset = file.getDataSet(path); std::vector dims = dataset.getDimensions(); T data = T::from_shape(dims); - dataset.read(data.data()); + dataset.read_raw(data.data()); return data; } @@ -73,7 +73,7 @@ struct io_impl::value>::type> { DataSpace dataspace = attribute.getSpace(); std::vector dims = dataspace.getDimensions(); T data = T::from_shape(dims); - attribute.read(data.data()); + attribute.read_raw(data.data()); return data; } }; diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 95a6e7d44..f7cf67532 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -2433,7 +2433,7 @@ TEST_CASE("HighFiveFixedString") { #if HIGHFIVE_CXX_STD >= 17 { auto expected = std::string(value.size(), '-'); - ds.read(expected.data(), datatype); + ds.read_raw(expected.data(), datatype); REQUIRE(expected == value); } From ffa7311ab0172f7b0f8ed41be8d39393a82194f3 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 8 Feb 2024 16:56:15 +0100 Subject: [PATCH 46/97] Start merging `v3` features. (#935) --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index bc0d2752e..668c90365 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,8 @@ +*Note:* In preparation of `v3` of HighFive, we've started merging breaking +changes into the main branch. More information and opportunity to comment can +be found at: +https://github.com/BlueBrain/HighFive/issues/864 + # HighFive - HDF5 header-only C++ Library [![Doxygen -> gh-pages](https://github.com/BlueBrain/HighFive/workflows/gh-pages/badge.svg?branch=master)](https://BlueBrain.github.io/HighFive/actions/workflows/gh-pages.yml?query=branch%3Amaster) From ab5b3f8f4ae59a10e271f457d31c657dbe6de989 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 8 Feb 2024 17:25:42 +0100 Subject: [PATCH 47/97] Remove `read(T*, ...)`. (#942) --- include/highfive/H5Attribute.hpp | 10 ------- include/highfive/bits/H5Attribute_misc.hpp | 10 ------- include/highfive/bits/H5Slice_traits.hpp | 30 ------------------- include/highfive/bits/H5Slice_traits_misc.hpp | 14 --------- 4 files changed, 64 deletions(-) diff --git a/include/highfive/H5Attribute.hpp b/include/highfive/H5Attribute.hpp index 1af10cf04..c34f9e49f 100644 --- a/include/highfive/H5Attribute.hpp +++ b/include/highfive/H5Attribute.hpp @@ -113,16 +113,6 @@ class Attribute: public Object, public PathTraits { template void read(T& array) const; - /// \brief Read the attribute into a pre-allocated buffer. - /// \deprecated use `read(T&` or `read_raw`. - template - void read(T* array, const DataType& mem_datatype) const; - - /// \brief Read the attribute into a buffer. - /// \deprecated use `read(T&` or `read_raw`. - template - void read(T* array) const; - /// \brief Read the attribute into a pre-allocated buffer. /// \param array A pointer to the first byte of sufficient pre-allocated memory. /// \param mem_datatype The DataType of the array. diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 3b48c143a..62f6ebd82 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -101,11 +101,6 @@ inline void Attribute::read(T& array) const { } } -template -inline void Attribute::read(T* array, const DataType& mem_datatype) const { - read_raw(array, mem_datatype); -} - template inline void Attribute::read_raw(T* array, const DataType& mem_datatype) const { static_assert(!std::is_const::value, @@ -114,11 +109,6 @@ inline void Attribute::read_raw(T* array, const DataType& mem_datatype) const { detail::h5a_read(getId(), mem_datatype.getId(), static_cast(array)); } -template -inline void Attribute::read(T* array) const { - read_raw(array); -} - template inline void Attribute::read_raw(T* array) const { using element_type = typename details::inspector::base_type; diff --git a/include/highfive/bits/H5Slice_traits.hpp b/include/highfive/bits/H5Slice_traits.hpp index 556683853..fd8c31d27 100644 --- a/include/highfive/bits/H5Slice_traits.hpp +++ b/include/highfive/bits/H5Slice_traits.hpp @@ -303,36 +303,6 @@ class SliceTraits { template void read(T& array, const DataTransferProps& xfer_props = DataTransferProps()) const; - /// - /// Read the entire dataset into a raw buffer - /// - /// \deprecated Use `read_raw` instead. - /// - /// No dimensionality checks will be performed, it is the user's - /// responsibility to ensure that the right amount of space has been - /// allocated. - /// \param array: A buffer containing enough space for the data - /// \param dtype: The datatype of elements of the in memory buffer. - /// \param xfer_props: Data Transfer properties - template - void read(T* array, - const DataType& dtype, - const DataTransferProps& xfer_props = DataTransferProps()) const; - - /// - /// Read the entire dataset into a raw buffer - /// - /// \deprecated Use `read_raw` instead. - /// - /// Same as `read(T*, const DataType&, const DataTransferProps&)`. However, - /// this overload deduces the HDF5 datatype of the element of `array` from - /// `T`. Note, that the file datatype is already fixed. - /// - /// \param array: A buffer containing enough space for the data - /// \param xfer_props: Data Transfer properties - template - void read(T* array, const DataTransferProps& xfer_props = DataTransferProps()) const; - /// /// Read the entire dataset into a raw buffer /// diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 88804f6d0..27c103ae2 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -207,20 +207,6 @@ inline void SliceTraits::read(T& array, const DataTransferProps& xfer_ } } -template -template -inline void SliceTraits::read(T* array, - const DataType& mem_datatype, - const DataTransferProps& xfer_props) const { - read_raw(array, mem_datatype, xfer_props); -} - -template -template -inline void SliceTraits::read(T* array, const DataTransferProps& xfer_props) const { - read_raw(array, xfer_props); -} - template template From 0b47bad40965c7c19d3d10dec7f75489cd03119a Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 8 Feb 2024 17:27:05 +0100 Subject: [PATCH 48/97] Macro free dependency handling. (#892) In a prior commit the sources were restructured such that each optional dependency is contained in separate files. This commit removes the macros that controlled which dependencies are included. To include the optional dependency Boost use: #include #include the order doesn't matter. --- include/highfive/bits/H5DataType_misc.hpp | 4 ---- include/highfive/bits/H5Inspector_misc.hpp | 8 -------- include/highfive/boost.hpp | 3 --- include/highfive/eigen.hpp | 3 --- include/highfive/half_float.hpp | 4 +--- src/examples/boost_multi_array_2D.cpp | 4 +--- src/examples/boost_multiarray_complex.cpp | 5 +---- src/examples/boost_ublas_double.cpp | 5 ++--- src/examples/create_dataset_half_float.cpp | 1 + tests/unit/CMakeLists.txt | 12 ++++++++++++ tests/unit/data_generator.hpp | 2 +- tests/unit/tests_high_five.hpp | 2 ++ tests/unit/tests_high_five_base.cpp | 9 +++++++++ tests/unit/tests_high_five_multi_dims.cpp | 1 + 14 files changed, 31 insertions(+), 32 deletions(-) diff --git a/include/highfive/bits/H5DataType_misc.hpp b/include/highfive/bits/H5DataType_misc.hpp index 619e51e71..8098a675c 100644 --- a/include/highfive/bits/H5DataType_misc.hpp +++ b/include/highfive/bits/H5DataType_misc.hpp @@ -520,7 +520,3 @@ inline DataType create_datatype() { } } // namespace HighFive - -#ifdef H5_USE_HALF_FLOAT -#include -#endif diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index ac3872dda..d85a4de12 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -640,11 +640,3 @@ struct inspector { } // namespace details } // namespace HighFive - -#ifdef H5_USE_BOOST -#include -#endif - -#ifdef H5_USE_EIGEN -#include -#endif diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp index 8992159a2..3dd4c9f9e 100644 --- a/include/highfive/boost.hpp +++ b/include/highfive/boost.hpp @@ -1,5 +1,4 @@ #pragma once -#ifdef H5_USE_BOOST #include "bits/H5Inspector_decl.hpp" #include "H5Exception.hpp" @@ -160,5 +159,3 @@ struct inspector> { } // namespace details } // namespace HighFive - -#endif diff --git a/include/highfive/eigen.hpp b/include/highfive/eigen.hpp index c47095dde..f91dab24c 100644 --- a/include/highfive/eigen.hpp +++ b/include/highfive/eigen.hpp @@ -1,5 +1,4 @@ #pragma once -#ifdef H5_USE_EIGEN #include "bits/H5Inspector_decl.hpp" #include "H5Exception.hpp" @@ -89,5 +88,3 @@ struct inspector> { } // namespace details } // namespace HighFive - -#endif diff --git a/include/highfive/half_float.hpp b/include/highfive/half_float.hpp index 998e693ff..dc2464c22 100644 --- a/include/highfive/half_float.hpp +++ b/include/highfive/half_float.hpp @@ -1,5 +1,4 @@ #pragma once -#ifdef H5_USE_HALF_FLOAT #include @@ -16,6 +15,5 @@ inline AtomicType::AtomicType() { // Floating point exponent bias detail::h5t_set_ebias(_hid, 15); } -} // namespace HighFive -#endif +} // namespace HighFive diff --git a/src/examples/boost_multi_array_2D.cpp b/src/examples/boost_multi_array_2D.cpp index 4bec1ec12..508c3a880 100644 --- a/src/examples/boost_multi_array_2D.cpp +++ b/src/examples/boost_multi_array_2D.cpp @@ -8,11 +8,9 @@ */ #include -#undef H5_USE_BOOST -#define H5_USE_BOOST - #include #include +#include using namespace HighFive; diff --git a/src/examples/boost_multiarray_complex.cpp b/src/examples/boost_multiarray_complex.cpp index 37481db62..34f18f551 100644 --- a/src/examples/boost_multiarray_complex.cpp +++ b/src/examples/boost_multiarray_complex.cpp @@ -9,12 +9,9 @@ #include #include -#undef H5_USE_BOOST -#define H5_USE_BOOST - #include -#include +#include typedef std::complex complex_t; diff --git a/src/examples/boost_ublas_double.cpp b/src/examples/boost_ublas_double.cpp index b025475b9..3889df680 100644 --- a/src/examples/boost_ublas_double.cpp +++ b/src/examples/boost_ublas_double.cpp @@ -8,11 +8,10 @@ */ #include -#undef H5_USE_BOOST -#define H5_USE_BOOST - #include +#include + // In some versions of Boost (starting with 1.64), you have to include the serialization header // before ublas #include diff --git a/src/examples/create_dataset_half_float.cpp b/src/examples/create_dataset_half_float.cpp index 837c58704..015776699 100644 --- a/src/examples/create_dataset_half_float.cpp +++ b/src/examples/create_dataset_half_float.cpp @@ -12,6 +12,7 @@ #include #include +#include const std::string FILE_NAME("create_dataset_half_float_example.h5"); const std::string DATASET_NAME("dset"); diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index a105e331e..18110d83f 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -46,6 +46,18 @@ option(HIGHFIVE_TEST_SINGLE_INCLUDES "Enable testing single includes" FALSE) if(HIGHFIVE_TEST_SINGLE_INCLUDES) file(GLOB public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) foreach(PUBLIC_HEADER ${public_headers}) + if(PUBLIC_HEADER STREQUAL "highfive/boost.hpp" AND NOT HIGHFIVE_USE_BOOST) + continue() + endif() + + if(PUBLIC_HEADER STREQUAL "highfive/half_float.hpp" AND NOT HIGHFIVE_USE_HALF_FLOAT) + continue() + endif() + + if(PUBLIC_HEADER STREQUAL "highfive/eigen.hpp" AND NOT HIGHFIVE_USE_EIGEN) + continue() + endif() + get_filename_component(CLASS_NAME ${PUBLIC_HEADER} NAME_WE) configure_file(tests_import_public_headers.cpp "tests_${CLASS_NAME}.cpp" @ONLY) add_executable("tests_include_${CLASS_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/tests_${CLASS_NAME}.cpp") diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp index 9a6712d53..f5dc681c5 100644 --- a/tests/unit/data_generator.hpp +++ b/tests/unit/data_generator.hpp @@ -8,7 +8,7 @@ #include #ifdef H5_USE_BOOST -#include +#include #endif #include diff --git a/tests/unit/tests_high_five.hpp b/tests/unit/tests_high_five.hpp index 9d259c8d1..fa0cfd714 100644 --- a/tests/unit/tests_high_five.hpp +++ b/tests/unit/tests_high_five.hpp @@ -43,6 +43,8 @@ using base_test_types = std::tuple; #ifdef H5_USE_HALF_FLOAT +#include + using float16_t = half_float::half; using numerical_test_types = decltype(std::tuple_cat(std::declval(), std::tuple())); diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index f7cf67532..03e30438c 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -27,6 +27,15 @@ #include #include "tests_high_five.hpp" +#ifdef H5_USE_BOOST +#include +#endif + +#ifdef H5_USE_EIGEN +#include +#endif + + using namespace HighFive; using Catch::Matchers::Equals; diff --git a/tests/unit/tests_high_five_multi_dims.cpp b/tests/unit/tests_high_five_multi_dims.cpp index 08fbea9ce..31757d6c5 100644 --- a/tests/unit/tests_high_five_multi_dims.cpp +++ b/tests/unit/tests_high_five_multi_dims.cpp @@ -15,6 +15,7 @@ #ifdef H5_USE_BOOST #include +#include #endif #include From c22ac21767d13fb0d76a4bda6e1af7dab816a20b Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 8 Feb 2024 17:38:29 +0100 Subject: [PATCH 49/97] Remove `#include`. (#903) The comments state the at some point boost serialization and boost numeric had an ordering requirement of certain headers. It claimed: // starting Boost 1.64, serialization header must come before ublas #include #include If this is still true then users must include the affected boost headers before including `highfive/boost.hpp`. However, we're no longer forcing the dependency onto all HighFive + Boost users. --- include/highfive/boost.hpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp index 3dd4c9f9e..a4364faf3 100644 --- a/include/highfive/boost.hpp +++ b/include/highfive/boost.hpp @@ -4,8 +4,6 @@ #include "H5Exception.hpp" #include -// starting Boost 1.64, serialization header must come before ublas -#include #include namespace HighFive { From 4914dce759ceb78e0a87b3b0950c4276c64ce232 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 9 Feb 2024 09:57:25 +0100 Subject: [PATCH 50/97] Fix typo in migration guide. (#951) --- doc/migration_guide.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/migration_guide.md b/doc/migration_guide.md index edd784a63..4aad2e67d 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -61,7 +61,7 @@ When genuinely performing a "raw read", one must replace `read` with `read_raw`. For example: ``` -double x = malloc(2*3 * sizeof(double)); +double* x = malloc(2*3 * sizeof(double)); dset.read_raw(x); ``` is correct in `v3`. From c54ee57fd461c3576af57a8838413de691ab8046 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 9 Feb 2024 11:09:51 +0100 Subject: [PATCH 51/97] Migration instructions for file driver. (#953) --- doc/migration_guide.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/doc/migration_guide.md b/doc/migration_guide.md index 4aad2e67d..2ffe9e257 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -119,3 +119,16 @@ Note that objects of type `DataSpace::DataSpaceType` will no longer silently convert to an integer. Including the two constants `DataSpace::dataspace_{scalar,null}`. +## Deprecation `FileDriver` and `MPIOFileDriver`. +These have been deprecated to stick more closely with familiar HDF5 concepts. +The `FileDriver` is synonymous to `FileAccessProps`; and `MPIOFileDriver` is +the same as: +``` +auto fapl = FileAccessProps{}; +fapl.add(MPIOFileAccess(mpi_comm, mpi_info)); +``` + +We felt that the savings in typing effort weren't worth introducing the concept +of a "file driver". Removing the concept hopefully makes it easier to add a +better abstraction for the handling of the property lists, when we discover +such an abstraction. From 066c188319964c7f8681c745b9c0a61add5d0ee7 Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Fri, 9 Feb 2024 11:13:25 +0100 Subject: [PATCH 52/97] Remove deprecated FileDriver/MPIOFileDriver (#949) --- include/highfive/H5File.hpp | 1 - include/highfive/H5FileDriver.hpp | 32 --------------------- include/highfive/bits/H5FileDriver_misc.hpp | 20 ------------- include/highfive/highfive.hpp | 1 - 4 files changed, 54 deletions(-) delete mode 100644 include/highfive/H5FileDriver.hpp delete mode 100644 include/highfive/bits/H5FileDriver_misc.hpp diff --git a/include/highfive/H5File.hpp b/include/highfive/H5File.hpp index 9b393e5a3..a8db5f2a1 100644 --- a/include/highfive/H5File.hpp +++ b/include/highfive/H5File.hpp @@ -10,7 +10,6 @@ #include -#include "H5FileDriver.hpp" #include "H5Object.hpp" #include "H5PropertyList.hpp" #include "bits/H5Annotate_traits.hpp" diff --git a/include/highfive/H5FileDriver.hpp b/include/highfive/H5FileDriver.hpp deleted file mode 100644 index 2cd4813a3..000000000 --- a/include/highfive/H5FileDriver.hpp +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c), 2017, Adrien Devresse - * - * Distributed under the Boost Software License, Version 1.0. - * (See accompanying file LICENSE_1_0.txt or copy at - * http://www.boost.org/LICENSE_1_0.txt) - * - */ -#pragma once - -#include "H5PropertyList.hpp" -#include "bits/H5_definitions.hpp" - -namespace HighFive { - -/// \brief file driver base concept -/// \deprecated Use FileAccessProps directly -class H5_DEPRECATED("Use FileAccessProps directly") FileDriver: public FileAccessProps {}; - -#ifdef H5_HAVE_PARALLEL -/// \brief MPIIO Driver for Parallel HDF5 -/// \deprecated Add MPIOFileAccess directly to FileAccessProps -class H5_DEPRECATED("Add MPIOFileAccess directly to FileAccessProps") MPIOFileDriver - : public FileAccessProps { - public: - inline MPIOFileDriver(MPI_Comm mpi_comm, MPI_Info mpi_info); -}; -#endif - -} // namespace HighFive - -#include "bits/H5FileDriver_misc.hpp" diff --git a/include/highfive/bits/H5FileDriver_misc.hpp b/include/highfive/bits/H5FileDriver_misc.hpp deleted file mode 100644 index a6331bd5a..000000000 --- a/include/highfive/bits/H5FileDriver_misc.hpp +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c), 2017-2018, Adrien Devresse - * Juan Hernando - * - * Distributed under the Boost Software License, Version 1.0. - * (See accompanying file LICENSE_1_0.txt or copy at - * http://www.boost.org/LICENSE_1_0.txt) - * - */ -#pragma once - -namespace HighFive { - -#ifdef H5_HAVE_PARALLEL -inline MPIOFileDriver::MPIOFileDriver(MPI_Comm comm, MPI_Info info) { - add(MPIOFileAccess(comm, info)); -} -#endif - -} // namespace HighFive diff --git a/include/highfive/highfive.hpp b/include/highfive/highfive.hpp index f5e20cae9..543fe80bc 100644 --- a/include/highfive/highfive.hpp +++ b/include/highfive/highfive.hpp @@ -5,7 +5,6 @@ #include #include #include -#include #include #include #include From ece5bae7029c770c6594b1273676f027e925dc91 Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Fri, 9 Feb 2024 11:55:46 +0100 Subject: [PATCH 53/97] Remove deprecated FixedLenStringArray (#946) --- include/highfive/H5DataType.hpp | 127 ------------- include/highfive/bits/H5DataType_misc.hpp | 52 ------ include/highfive/bits/H5Inspector_misc.hpp | 55 ------ include/highfive/bits/H5Node_traits.hpp | 8 - include/highfive/bits/H5Node_traits_misc.hpp | 13 -- include/highfive/bits/H5Utils.hpp | 6 - include/highfive/bits/H5_definitions.hpp | 5 - tests/unit/CMakeLists.txt | 2 - tests/unit/deprecated/CMakeLists.txt | 10 - .../test_fixed_len_string_array.cpp | 172 ------------------ 10 files changed, 450 deletions(-) delete mode 100644 tests/unit/deprecated/CMakeLists.txt delete mode 100644 tests/unit/deprecated/test_fixed_len_string_array.cpp diff --git a/include/highfive/H5DataType.hpp b/include/highfive/H5DataType.hpp index b15f62165..efc924a17 100644 --- a/include/highfive/H5DataType.hpp +++ b/include/highfive/H5DataType.hpp @@ -340,133 +340,6 @@ DataType create_datatype(); /// \brief Create a DataType instance representing type T and perform a sanity check on its size template DataType create_and_check_datatype(); - - -namespace deprecated { -/// -/// \brief A structure representing a set of fixed-length strings -/// -/// Although fixed-len arrays can be created 'raw' without the need for -/// this structure, to retrieve results efficiently it must be used. -/// -/// \tparam N Size of the string in bytes, including the null character. Note, -/// that all string must be null-terminated. -/// -template -class FixedLenStringArray { - public: - FixedLenStringArray() = default; - - /// - /// \brief Create a FixedStringArray from a raw contiguous buffer. - /// - /// The argument `n_strings` specifies the number of strings. - /// - FixedLenStringArray(const char array[][N], std::size_t n_strings); - - /// - /// \brief Create a FixedStringArray from a sequence of strings. - /// - /// Such conversion involves a copy, original vector is not modified - /// - explicit FixedLenStringArray(const std::vector& vec); - - FixedLenStringArray(const std::string* iter_begin, const std::string* iter_end); - - FixedLenStringArray(const std::initializer_list&); - - /// - /// \brief Append an std::string to the buffer structure - /// - void push_back(const std::string&); - - void push_back(const std::array&); - - /// - /// \brief Retrieve a string from the structure as std::string - /// - std::string getString(std::size_t index) const; - - // Container interface - inline const char* operator[](std::size_t i) const noexcept { - return datavec[i].data(); - } - inline const char* at(std::size_t i) const { - return datavec.at(i).data(); - } - inline bool empty() const noexcept { - return datavec.empty(); - } - inline std::size_t size() const noexcept { - return datavec.size(); - } - inline void resize(std::size_t n) { - datavec.resize(n); - } - inline const char* front() const { - return datavec.front().data(); - } - inline const char* back() const { - return datavec.back().data(); - } - inline char* data() noexcept { - return datavec[0].data(); - } - inline const char* data() const noexcept { - return datavec[0].data(); - } - - private: - using vector_t = typename std::vector>; - - public: - // Use the underlying iterator - using iterator = typename vector_t::iterator; - using const_iterator = typename vector_t::const_iterator; - using reverse_iterator = typename vector_t::reverse_iterator; - using const_reverse_iterator = typename vector_t::const_reverse_iterator; - using value_type = typename vector_t::value_type; - - inline iterator begin() noexcept { - return datavec.begin(); - } - inline iterator end() noexcept { - return datavec.end(); - } - inline const_iterator begin() const noexcept { - return datavec.begin(); - } - inline const_iterator cbegin() const noexcept { - return datavec.cbegin(); - } - inline const_iterator end() const noexcept { - return datavec.end(); - } - inline const_iterator cend() const noexcept { - return datavec.cend(); - } - inline reverse_iterator rbegin() noexcept { - return datavec.rbegin(); - } - inline reverse_iterator rend() noexcept { - return datavec.rend(); - } - inline const_reverse_iterator rbegin() const noexcept { - return datavec.rbegin(); - } - inline const_reverse_iterator rend() const noexcept { - return datavec.rend(); - } - - private: - vector_t datavec; -}; -} // namespace deprecated - -template -using FixedLenStringArray H5_DEPRECATED_USING("Use 'std::vector'.") = - deprecated::FixedLenStringArray; - } // namespace HighFive diff --git a/include/highfive/bits/H5DataType_misc.hpp b/include/highfive/bits/H5DataType_misc.hpp index 8098a675c..4321a4658 100644 --- a/include/highfive/bits/H5DataType_misc.hpp +++ b/include/highfive/bits/H5DataType_misc.hpp @@ -206,13 +206,6 @@ class AtomicType: public DataType { : DataType(create_string(StrLen)) {} }; -template -class AtomicType>: public DataType { - public: - inline AtomicType() - : DataType(create_string(StrLen)) {} -}; - template class AtomicType>: public DataType { public: @@ -239,51 +232,6 @@ AtomicType::AtomicType() { } -namespace deprecated { -template -inline FixedLenStringArray::FixedLenStringArray(const char array[][N], std::size_t length) { - datavec.resize(length); - std::memcpy(datavec[0].data(), array[0].data(), N * length); -} - -template -inline FixedLenStringArray::FixedLenStringArray(const std::string* iter_begin, - const std::string* iter_end) { - datavec.reserve(static_cast(iter_end - iter_begin)); - for (std::string const* it = iter_begin; it != iter_end; ++it) { - push_back(*it); - } -} - -template -inline FixedLenStringArray::FixedLenStringArray(const std::vector& vec) - : FixedLenStringArray(vec.data(), vec.data() + vec.size()) {} - -template -inline FixedLenStringArray::FixedLenStringArray( - const std::initializer_list& init_list) - : FixedLenStringArray(init_list.begin(), init_list.end()) {} - -template -inline void FixedLenStringArray::push_back(const std::string& src) { - datavec.emplace_back(); - const size_t length = std::min(N - 1, src.length()); - std::memcpy(datavec.back().data(), src.c_str(), length); - datavec.back()[length] = 0; -} - -template -inline void FixedLenStringArray::push_back(const std::array& src) { - datavec.emplace_back(); - std::copy(src.begin(), src.end(), datavec.back().data()); -} - -template -inline std::string FixedLenStringArray::getString(std::size_t i) const { - return std::string(datavec[i].data()); -} -} // namespace deprecated - // Internal // Reference mapping template <> diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index d85a4de12..49606005f 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -289,61 +289,6 @@ struct inspector: type_helper { } }; -template -struct inspector> { - using type = deprecated::FixedLenStringArray; - using value_type = char*; - using base_type = deprecated::FixedLenStringArray; - using hdf5_type = char; - - static constexpr size_t ndim = 1; - static constexpr size_t recursive_ndim = ndim; - static constexpr bool is_trivially_copyable = false; - - static std::vector getDimensions(const type& val) { - return std::vector{val.size()}; - } - - static size_t getSizeVal(const type& val) { - return N * compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return N * compute_total_size(dims); - } - - static void prepare(type& /* val */, const std::vector& dims) { - if (dims[0] > N) { - std::ostringstream os; - os << "Size of FixedlenStringArray (" << N << ") is too small for dims (" << dims[0] - << ")."; - throw DataSpaceException(os.str()); - } - } - - static hdf5_type* data(type& val) { - return val.data(); - } - - static const hdf5_type* data(const type& val) { - return val.data(); - } - - static void serialize(const type& val, hdf5_type* m) { - for (size_t i = 0; i < val.size(); ++i) { - std::memcpy(m + i * N, val[i], N); - } - } - - static void unserialize(const hdf5_type* vec, const std::vector& dims, type& val) { - for (size_t i = 0; i < dims[0]; ++i) { - std::array s; - std::memcpy(s.data(), vec + (i * N), N); - val.push_back(s); - } - } -}; - template struct inspector> { using type = std::vector; diff --git a/include/highfive/bits/H5Node_traits.hpp b/include/highfive/bits/H5Node_traits.hpp index 6f4a93ce6..56d9f8d3a 100644 --- a/include/highfive/bits/H5Node_traits.hpp +++ b/include/highfive/bits/H5Node_traits.hpp @@ -78,14 +78,6 @@ class NodeTraits { bool parents = true); - template - H5_DEPRECATED("Use 'std::vector'.") - DataSet createDataSet(const std::string& dataset_name, - const deprecated::FixedLenStringArray& data, - const DataSetCreateProps& createProps = DataSetCreateProps::Default(), - const DataSetAccessProps& accessProps = DataSetAccessProps::Default(), - bool parents = true); - /// /// \brief get an existing dataset in the current file /// \param dataset_name diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index a98600598..49cfc639d 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -80,19 +80,6 @@ inline DataSet NodeTraits::createDataSet(const std::string& dataset_na return ds; } -template -template -inline DataSet NodeTraits::createDataSet(const std::string& dataset_name, - const deprecated::FixedLenStringArray& data, - const DataSetCreateProps& createProps, - const DataSetAccessProps& accessProps, - bool parents) { - DataSet ds = createDataSet( - dataset_name, DataSpace(data.size()), createProps, accessProps, parents); - ds.write(data); - return ds; -} - template inline DataSet NodeTraits::getDataSet(const std::string& dataset_name, const DataSetAccessProps& accessProps) const { diff --git a/include/highfive/bits/H5Utils.hpp b/include/highfive/bits/H5Utils.hpp index b3f039e20..c005bd22b 100644 --- a/include/highfive/bits/H5Utils.hpp +++ b/include/highfive/bits/H5Utils.hpp @@ -25,12 +25,6 @@ namespace HighFive { -namespace deprecated { -// If ever used, recognize dimensions of FixedLenStringArray -template -class FixedLenStringArray; -} // namespace deprecated - namespace details { // converter function for hsize_t -> size_t when hsize_t != size_t template diff --git a/include/highfive/bits/H5_definitions.hpp b/include/highfive/bits/H5_definitions.hpp index ad4b95af2..56993c855 100644 --- a/include/highfive/bits/H5_definitions.hpp +++ b/include/highfive/bits/H5_definitions.hpp @@ -45,11 +45,6 @@ class AtomicType; template class AnnotateTraits; -namespace deprecated { -template -class FixedLenStringArray; -} - template class NodeTraits; diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 18110d83f..6c19a1d2b 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -64,5 +64,3 @@ if(HIGHFIVE_TEST_SINGLE_INCLUDES) target_link_libraries("tests_include_${CLASS_NAME}" HighFive HighFiveWarnings) endforeach() endif() - -add_subdirectory(deprecated) diff --git a/tests/unit/deprecated/CMakeLists.txt b/tests/unit/deprecated/CMakeLists.txt deleted file mode 100644 index 5e515374b..000000000 --- a/tests/unit/deprecated/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -foreach(test_name test_fixed_len_string_array) - add_executable(${test_name} "${test_name}.cpp") - - target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) - catch_discover_tests(${test_name}) - - if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU") - target_compile_options(${test_name} PRIVATE -Wno-deprecated-declarations) - endif() -endforeach() diff --git a/tests/unit/deprecated/test_fixed_len_string_array.cpp b/tests/unit/deprecated/test_fixed_len_string_array.cpp deleted file mode 100644 index 1d0c33aaa..000000000 --- a/tests/unit/deprecated/test_fixed_len_string_array.cpp +++ /dev/null @@ -1,172 +0,0 @@ -#include - -#include -#include "../tests_high_five.hpp" - -namespace HighFive { - -TEST_CASE("HighFiveFixedLenStringArray") { - const std::string file_name("fixed_len_string_array.h5"); - - // Create a new file using the default property lists. - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - - { // Dedicated FixedLenStringArray (now deprecated). - FixedLenStringArray<10> arr{"0000000", "1111111"}; - - // More API: test inserting something - arr.push_back("2222"); - auto ds = file.createDataSet("ds7", arr); // Short syntax ok - - // Recover truncating - FixedLenStringArray<4> array_back; - ds.read(array_back); - CHECK(array_back.size() == 3); - CHECK(array_back[0] == std::string("000")); - CHECK(array_back[1] == std::string("111")); - CHECK(array_back[2] == std::string("222")); - CHECK(array_back.getString(1) == "111"); - CHECK(array_back.front() == std::string("000")); - CHECK(array_back.back() == std::string("222")); - CHECK(array_back.data() == std::string("000")); - array_back.data()[0] = 'x'; - CHECK(array_back.data() == std::string("x00")); - - for (auto& raw_elem: array_back) { - raw_elem[1] = 'y'; - } - CHECK(array_back.getString(1) == "1y1"); - for (auto iter = array_back.cbegin(); iter != array_back.cend(); ++iter) { - CHECK((*iter)[1] == 'y'); - } - } -} - -template -static void check_fixed_len_string_array_contents(const FixedLenStringArray& array, - const std::vector& expected) { - REQUIRE(array.size() == expected.size()); - - for (size_t i = 0; i < array.size(); ++i) { - CHECK(array[i] == expected[i]); - } -} - - -TEST_CASE("HighFiveFixedLenStringArrayStructure") { - using fixed_array_t = FixedLenStringArray<10>; - // increment the characters of a string written in a std::array - auto increment_string = [](const fixed_array_t::value_type arr) { - fixed_array_t::value_type output(arr); - for (auto& c: output) { - if (c == 0) { - break; - } - ++c; - } - return output; - }; - - SECTION("create from std::vector (onpoint)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<4>(expected); - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from std::vector (oversized)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<8>(expected); - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from pointers (onpoint)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<4>(expected.data(), expected.data() + expected.size()); - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from pointers (oversized)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<8>(expected.data(), expected.data() + expected.size()); - check_fixed_len_string_array_contents(actual, expected); - } - - - SECTION("create from std::initializer_list (onpoint)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<4>{"000", "111"}; - check_fixed_len_string_array_contents(actual, expected); - } - - SECTION("create from std::initializer_list (oversized)") { - auto expected = std::vector{"000", "111"}; - auto actual = FixedLenStringArray<8>{"000", "111"}; - check_fixed_len_string_array_contents(actual, expected); - } - - // manipulate FixedLenStringArray with std::copy - SECTION("compatible with std::copy") { - const fixed_array_t arr1{"0000000", "1111111"}; - fixed_array_t arr2{"0000000", "1111111"}; - std::copy(arr1.begin(), arr1.end(), std::back_inserter(arr2)); - CHECK(arr2.size() == 4); - } - - SECTION("compatible with std::transform") { - fixed_array_t arr; - { - const fixed_array_t arr1{"0000000", "1111111"}; - std::transform(arr1.begin(), arr1.end(), std::back_inserter(arr), increment_string); - } - CHECK(arr.size() == 2); - CHECK(arr[0] == std::string("1111111")); - CHECK(arr[1] == std::string("2222222")); - } - - SECTION("compatible with std::transform (reverse iterator)") { - fixed_array_t arr; - { - const fixed_array_t arr1{"0000000", "1111111"}; - std::copy(arr1.rbegin(), arr1.rend(), std::back_inserter(arr)); - } - CHECK(arr.size() == 2); - CHECK(arr[0] == std::string("1111111")); - CHECK(arr[1] == std::string("0000000")); - } - - SECTION("compatible with std::remove_copy_if") { - fixed_array_t arr2; - { - const fixed_array_t arr1{"0000000", "1111111"}; - std::remove_copy_if(arr1.begin(), - arr1.end(), - std::back_inserter(arr2), - [](const fixed_array_t::value_type& s) { - return std::strncmp(s.data(), "1111111", 7) == 0; - }); - } - CHECK(arr2.size() == 1); - CHECK(arr2[0] == std::string("0000000")); - } -} - -TEST_CASE("HighFiveFixedLenStringArrayAttribute") { - const std::string file_name("fixed_array_attr.h5"); - // Create a new file using the default property lists. - { - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - FixedLenStringArray<10> arr{"Hello", "world"}; - file.createAttribute("str", arr); - } - // Re-read it - { - File file(file_name); - FixedLenStringArray<8> arr; // notice the output strings can be smaller - file.getAttribute("str").read(arr); - CHECK(arr.size() == 2); - CHECK(arr[0] == std::string("Hello")); - CHECK(arr[1] == std::string("world")); - } -} - -} // namespace HighFive From f1fbf21b5bf3b0d1fde96c1e9d2aeb156627d848 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 9 Feb 2024 17:01:47 +0100 Subject: [PATCH 54/97] Use enum class for `DataSpace::DataspaceType`. (#900) --- include/highfive/H5DataSpace.hpp | 6 +++++- tests/unit/tests_high_five_base.cpp | 12 ++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/include/highfive/H5DataSpace.hpp b/include/highfive/H5DataSpace.hpp index 7c7c5860a..463648507 100644 --- a/include/highfive/H5DataSpace.hpp +++ b/include/highfive/H5DataSpace.hpp @@ -45,12 +45,16 @@ class DataSpace: public Object { /// This enum is needed otherwise we will not be able to distringuish between both with normal /// constructors. Both have a dimension of 0. /// \since 1.3 - enum DataspaceType { + enum class DataspaceType { dataspace_scalar, ///< Value to create scalar DataSpace dataspace_null, ///< Value to create null DataSpace // simple dataspace are handle directly from their dimensions }; + // For backward compatibility: `DataSpace::dataspace_scalar`. + constexpr static DataspaceType dataspace_scalar = DataspaceType::dataspace_scalar; + constexpr static DataspaceType dataspace_null = DataspaceType::dataspace_null; + /// \brief Create a DataSpace of N-dimensions from a std::vector. /// \param dims Dimensions of the new DataSpace /// diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 03e30438c..bd00ce15c 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -786,6 +786,12 @@ TEST_CASE("DataSpace::getElementCount") { CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_NULL); } + SECTION("null initializer_list") { + auto space = DataSpace{DataSpace::dataspace_null}; + CHECK(space.getElementCount() == 0); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_NULL); + } + SECTION("null named ctor") { auto space = DataSpace::Null(); CHECK(space.getElementCount() == 0); @@ -798,6 +804,12 @@ TEST_CASE("DataSpace::getElementCount") { CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_SCALAR); } + SECTION("scalar initializer_list") { + auto space = DataSpace{DataSpace::dataspace_scalar}; + CHECK(space.getElementCount() == 1); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_SCALAR); + } + SECTION("scalar named ctor") { auto space = DataSpace::Scalar(); CHECK(space.getElementCount() == 1); From f71c7c4cf8687769be5ff9c281ceefaba5e43e2f Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 12 Feb 2024 10:17:53 +0100 Subject: [PATCH 55/97] Improve doc of 'HIGHFIVE_REGISTER_TYPE'. (#955) --- include/highfive/H5DataType.hpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/include/highfive/H5DataType.hpp b/include/highfive/H5DataType.hpp index efc924a17..985eb6dd4 100644 --- a/include/highfive/H5DataType.hpp +++ b/include/highfive/H5DataType.hpp @@ -348,12 +348,15 @@ DataType create_and_check_datatype(); /// This macro has to be called outside of any namespace. /// /// \code{.cpp} +/// namespace app { /// enum FooBar { FOO = 1, BAR = 2 }; /// EnumType create_enum_foobar() { /// return EnumType({{"FOO", FooBar::FOO}, /// {"BAR", FooBar::BAR}}); /// } -/// HIGHFIVE_REGISTER_TYPE(FooBar, create_enum_foobar) +/// } +/// +/// HIGHFIVE_REGISTER_TYPE(FooBar, ::app::create_enum_foobar) /// \endcode #define HIGHFIVE_REGISTER_TYPE(type, function) \ template <> \ From 2877159d285e0b6978c77a85fe6f32786d6c059f Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 14 Feb 2024 17:06:51 +0100 Subject: [PATCH 56/97] Rework CMake code for v3.0.0. (#897) Please consult `README.md`, the migration guide and `test/cmake_integration` for information about what changed. --- .github/workflows/ci.yml | 26 +-- .github/workflows/coverage.yml | 8 +- .github/workflows/version_file.yml | 2 +- CMake/HighFiveConfig.cmake.in | 74 -------- CMake/HighFiveTargetDeps.cmake | 122 ------------ CMake/HighFiveTargetExport.cmake | 48 ----- CMake/config/TestHelpers.cmake | 113 ----------- CMakeLists.txt | 179 ++++++++++-------- README.md | 131 ++++++------- cmake/HighFiveConfig.cmake | 19 ++ cmake/HighFiveOptionalDependencies.cmake | 27 +++ {CMake => cmake}/HighFiveWarnings.cmake | 0 doc/developer_guide.md | 4 +- doc/installation.md | 2 +- include/highfive/H5Version.hpp | 8 +- src/examples/CMakeLists.txt | 127 ++++++++----- tests/cmake_integration/README.md | 22 +++ .../application/CMakeLists.txt | 87 +++++++++ .../application/deps/.gitignore | 2 + .../application/hi5_application.cpp | 36 ++++ .../dependent_library/CMakeLists.txt | 134 +++++++++++++ .../cmake/Hi5DependentConfig.cmake.in | 24 +++ .../include/hi5_dependent/read.hpp | 17 ++ .../include/hi5_dependent/write.hpp | 8 + .../src/hi5_dependent/boost.cpp | 17 ++ .../src/hi5_dependent/read_vector.cpp | 7 + .../src/hi5_dependent/write_vector.cpp | 9 + .../test_cmake_integration.sh | 104 ++++++++++ .../test_dependent_library/CMakeList.txt | 22 +++ .../test_dependent_library/CMakeLists.txt | 22 +++ .../test_dependent_library.cpp | 34 ++++ tests/test_dependent_library/CMakeLists.txt | 42 ---- tests/test_dependent_library/deps/.gitignore | 4 - .../include/simpleton.hpp | 14 -- tests/test_dependent_library/src/otherton.cpp | 5 - .../test_dependent_library/src/simpleton.cpp | 9 - tests/test_project/CMakeLists.txt | 25 --- tests/test_project/deps/.gitignore | 4 - .../read_write_vector_dataset.cpp | 1 - tests/test_project_integration.sh | 57 ------ tests/unit/CMakeLists.txt | 12 +- 41 files changed, 867 insertions(+), 741 deletions(-) delete mode 100644 CMake/HighFiveConfig.cmake.in delete mode 100644 CMake/HighFiveTargetDeps.cmake delete mode 100644 CMake/HighFiveTargetExport.cmake delete mode 100644 CMake/config/TestHelpers.cmake create mode 100644 cmake/HighFiveConfig.cmake create mode 100644 cmake/HighFiveOptionalDependencies.cmake rename {CMake => cmake}/HighFiveWarnings.cmake (100%) create mode 100644 tests/cmake_integration/README.md create mode 100644 tests/cmake_integration/application/CMakeLists.txt create mode 100644 tests/cmake_integration/application/deps/.gitignore create mode 100644 tests/cmake_integration/application/hi5_application.cpp create mode 100644 tests/cmake_integration/dependent_library/CMakeLists.txt create mode 100644 tests/cmake_integration/dependent_library/cmake/Hi5DependentConfig.cmake.in create mode 100644 tests/cmake_integration/dependent_library/include/hi5_dependent/read.hpp create mode 100644 tests/cmake_integration/dependent_library/include/hi5_dependent/write.hpp create mode 100644 tests/cmake_integration/dependent_library/src/hi5_dependent/boost.cpp create mode 100644 tests/cmake_integration/dependent_library/src/hi5_dependent/read_vector.cpp create mode 100644 tests/cmake_integration/dependent_library/src/hi5_dependent/write_vector.cpp create mode 100644 tests/cmake_integration/test_cmake_integration.sh create mode 100644 tests/cmake_integration/test_dependent_library/CMakeList.txt create mode 100644 tests/cmake_integration/test_dependent_library/CMakeLists.txt create mode 100644 tests/cmake_integration/test_dependent_library/test_dependent_library.cpp delete mode 100644 tests/test_dependent_library/CMakeLists.txt delete mode 100644 tests/test_dependent_library/deps/.gitignore delete mode 100644 tests/test_dependent_library/include/simpleton.hpp delete mode 100644 tests/test_dependent_library/src/otherton.cpp delete mode 100644 tests/test_dependent_library/src/simpleton.cpp delete mode 100644 tests/test_project/CMakeLists.txt delete mode 100644 tests/test_project/deps/.gitignore delete mode 120000 tests/test_project/read_write_vector_dataset.cpp delete mode 100644 tests/test_project_integration.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8730ed410..3460c92fe 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,22 +41,22 @@ jobs: - config: os: ubuntu-20.04 pkgs: '' - flags: '-DHIGHFIVE_USE_BOOST:Bool=OFF' + flags: '-DHIGHFIVE_TEST_BOOST:Bool=OFF' - config: os: ubuntu-20.04 pkgs: 'libboost-all-dev libopencv-dev' - flags: '-DHIGHFIVE_USE_OPENCV:Bool=ON -GNinja' + flags: '-DHIGHFIVE_TEST_OPENCV:Bool=ON -GNinja' - config: os: ubuntu-latest pkgs: 'libboost-all-dev libeigen3-dev libopencv-dev' - flags: '-DHIGHFIVE_USE_EIGEN:Bool=ON -DHIGHFIVE_USE_OPENCV:Bool=ON -GNinja' + flags: '-DHIGHFIVE_TEST_EIGEN:Bool=ON -DHIGHFIVE_TEST_OPENCV:Bool=ON -GNinja' - config: os: ubuntu-20.04 pkgs: 'libboost-all-dev' flags: '-DCMAKE_CXX_STANDARD=17' - config: os: ubuntu-22.04 - flags: '-DHIGHFIVE_USE_BOOST=Off -DCMAKE_CXX_STANDARD=20' + flags: '-DHIGHFIVE_TEST_BOOST=Off -DCMAKE_CXX_STANDARD=20' steps: - uses: actions/checkout@v3 @@ -195,8 +195,8 @@ jobs: run: | CMAKE_OPTIONS=( -GNinja - -DHIGHFIVE_USE_BOOST:BOOL=ON - -DHIGHFIVE_USE_EIGEN:BOOL=ON + -DHIGHFIVE_TEST_BOOST:BOOL=ON + -DHIGHFIVE_TEST_EIGEN:BOOL=ON -DHIGHFIVE_BUILD_DOCS:BOOL=FALSE -DHIGHFIVE_GLIBCXX_ASSERTIONS=${HIGHFIVE_GLIBCXX_ASSERTIONS:-OFF} -DHIGHFIVE_SANITIZER=${HIGHFIVE_SANITIZER:-OFF} @@ -246,7 +246,7 @@ jobs: sudo apt-get -qq install libhdf5-openmpi-dev - name: "CMake Project Integration" - run: bash tests/test_project_integration.sh + run: bash tests/cmake_integration/test_cmake_integration.sh # Job checking the benchmarks work @@ -291,9 +291,9 @@ jobs: run: | CMAKE_OPTIONS=( -GNinja - -DHIGHFIVE_USE_BOOST:BOOL=ON - -DHIGHFIVE_USE_EIGEN:BOOL=ON - -DHIGHFIVE_USE_XTENSOR:BOOL=ON + -DHIGHFIVE_TEST_BOOST:BOOL=ON + -DHIGHFIVE_TEST_EIGEN:BOOL=ON + -DHIGHFIVE_TEST_XTENSOR:BOOL=ON -DHIGHFIVE_BUILD_DOCS:BOOL=FALSE -DHIGHFIVE_TEST_SINGLE_INCLUDES=ON -DCMAKE_CXX_FLAGS="-coverage -O0" @@ -348,9 +348,9 @@ jobs: -T ${{matrix.vs-toolset}} -DCMAKE_CXX_STANDARD=${{matrix.cxxstd}} -DHIGHFIVE_UNIT_TESTS=ON - -DHIGHFIVE_USE_BOOST:BOOL=ON - -DHIGHFIVE_USE_EIGEN:BOOL=ON - -DHIGHFIVE_USE_XTENSOR:BOOL=ON + -DHIGHFIVE_TEST_BOOST:BOOL=ON + -DHIGHFIVE_TEST_EIGEN:BOOL=ON + -DHIGHFIVE_TEST_XTENSOR:BOOL=ON -DHIGHFIVE_TEST_SINGLE_INCLUDES=ON ) source $GITHUB_WORKSPACE/.github/build.sh diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index b3f4a212b..aaf575a7d 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -45,10 +45,10 @@ jobs: run: | CMAKE_OPTIONS=( -GNinja - -DHIGHFIVE_USE_BOOST:BOOL=ON - -DHIGHFIVE_USE_EIGEN:BOOL=ON - -DHIGHFIVE_USE_OPENCV:BOOL=ON - #-DHIGHFIVE_USE_XTENSOR:BOOL=ON + -DHIGHFIVE_TEST_BOOST:BOOL=ON + -DHIGHFIVE_TEST_EIGEN:BOOL=ON + -DHIGHFIVE_TEST_OPENCV:BOOL=ON + #-DHIGHFIVE_TEST_XTENSOR:BOOL=ON -DHIGHFIVE_TEST_SINGLE_INCLUDES=ON -DHIGHFIVE_BUILD_DOCS:BOOL=FALSE -DCMAKE_CXX_FLAGS="-coverage -O0" diff --git a/.github/workflows/version_file.yml b/.github/workflows/version_file.yml index 816137e95..56703f593 100644 --- a/.github/workflows/version_file.yml +++ b/.github/workflows/version_file.yml @@ -27,7 +27,7 @@ jobs: - name: Build run: | # Will trigger `configure_file` for H5Version.hpp. - cmake -DHIGHFIVE_USE_BOOST=Off -B build . + cmake -B build . - name: Test run: | diff --git a/CMake/HighFiveConfig.cmake.in b/CMake/HighFiveConfig.cmake.in deleted file mode 100644 index 464a645d0..000000000 --- a/CMake/HighFiveConfig.cmake.in +++ /dev/null @@ -1,74 +0,0 @@ -function(copy_interface_properties target source) - foreach(prop - INTERFACE_COMPILE_DEFINITIONS - INTERFACE_COMPILE_FEATURES - INTERFACE_COMPILE_OPTIONS - INTERFACE_INCLUDE_DIRECTORIES - INTERFACE_LINK_LIBRARIES - INTERFACE_SOURCES - INTERFACE_SYSTEM_INCLUDE_DIRECTORIES) - set_property(TARGET ${target} APPEND PROPERTY ${prop} $) - endforeach() -endfunction() - -if(TARGET HighFive) - return() -endif() - -@PACKAGE_INIT@ - -# Get HighFive targets -include("${CMAKE_CURRENT_LIST_DIR}/HighFiveTargets.cmake") - -# Recreate combined HighFive -add_library(HighFive INTERFACE IMPORTED) -set_property(TARGET HighFive APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS MPI_NO_CPPBIND) # No c++ bindings - -# Ensure we activate required C++ std -if(NOT DEFINED CMAKE_CXX_STANDARD) - if(CMAKE_VERSION VERSION_LESS 3.8) - message(WARNING "HighFive requires minimum C++11. (C++14 for XTensor) \ - You may need to set CMAKE_CXX_STANDARD in you project") - else() - # A client request for a higher std overrides this - target_compile_features(HighFive INTERFACE cxx_std_11) - endif() -endif() - -# If the user sets this flag, all dependencies are preserved. -# Useful in central deployments where dependencies are not prepared later -set(HIGHFIVE_USE_INSTALL_DEPS @HIGHFIVE_USE_INSTALL_DEPS@ CACHE BOOL "Use original Highfive dependencies") -if(HIGHFIVE_USE_INSTALL_DEPS) - # If enabled in the deploy config, request c++14 - if(@HIGHFIVE_USE_XTENSOR@ AND NOT CMAKE_VERSION VERSION_LESS 3.8) - set_property(TARGET HighFive APPEND PROPERTY INTERFACE_COMPILE_FEATURES cxx_std_14) - endif() - message(STATUS "HIGHFIVE @PROJECT_VERSION@: Using original dependencies (HIGHFIVE_USE_INSTALL_DEPS=YES)") - copy_interface_properties(HighFive HighFive_HighFive) - return() -endif() - -# When not using the pre-built dependencies, give user options -if(DEFINED HIGHFIVE_USE_BOOST) - set(HIGHFIVE_USE_BOOST ${HIGHFIVE_USE_BOOST} CACHE BOOL "Enable Boost Support") -else() - set(HIGHFIVE_USE_BOOST @HIGHFIVE_USE_BOOST@ CACHE BOOL "Enable Boost Support") -endif() -set(HIGHFIVE_USE_EIGEN "${HIGHFIVE_USE_EIGEN}" CACHE BOOL "Enable Eigen testing") -set(HIGHFIVE_USE_XTENSOR "${HIGHFIVE_USE_XTENSOR}" CACHE BOOL "Enable xtensor testing") -set(HIGHFIVE_PARALLEL_HDF5 @HIGHFIVE_PARALLEL_HDF5@ CACHE BOOL "Enable Parallel HDF5 support") -option(HIGHFIVE_VERBOSE "Enable verbose logging" @HIGHFIVE_VERBOSE@) - -if(HIGHFIVE_USE_XTENSOR AND NOT CMAKE_VERSION VERSION_LESS 3.8) - set_property(TARGET HighFive APPEND PROPERTY INTERFACE_COMPILE_FEATURES cxx_std_14) -endif() - -if(NOT HighFive_FIND_QUIETLY) - message(STATUS "HIGHFIVE @PROJECT_VERSION@: (Re)Detecting Highfive dependencies (HIGHFIVE_USE_INSTALL_DEPS=NO)") -endif() -include("${CMAKE_CURRENT_LIST_DIR}/HighFiveTargetDeps.cmake") -foreach(dependency HighFive_libheaders libdeps) - copy_interface_properties(HighFive ${dependency}) -endforeach() - -check_required_components(HighFive) diff --git a/CMake/HighFiveTargetDeps.cmake b/CMake/HighFiveTargetDeps.cmake deleted file mode 100644 index 919b53544..000000000 --- a/CMake/HighFiveTargetDeps.cmake +++ /dev/null @@ -1,122 +0,0 @@ -# Link against target system libs -# ------------------------------- - -if(NOT TARGET libdeps) - - # Independent target to make it possible to have new dependencies each build - add_library(libdeps INTERFACE) - - if(HIGHFIVE_VERBOSE) - target_compile_definitions(libdeps INTERFACE -DHIGHFIVE_LOG_LEVEL=0) - endif() - - if(HIGHFIVE_GLIBCXX_ASSERTIONS) - target_compile_definitions(libdeps INTERFACE -D_GLIBCXX_ASSERTIONS) - endif() - - if(HIGHFIVE_HAS_FRIEND_DECLARATIONS) - target_compile_definitions(libdeps INTERFACE -DHIGHFIVE_HAS_FRIEND_DECLARATIONS=1) - endif() - - if(HIGHFIVE_SANITIZER) - target_compile_options(libdeps INTERFACE -fsanitize=${HIGHFIVE_SANITIZER}) - target_link_options(libdeps INTERFACE -fsanitize=${HIGHFIVE_SANITIZER}) - endif() - - # HDF5 - if(NOT DEFINED HDF5_C_LIBRARIES) - set(HDF5_PREFER_PARALLEL ${HIGHFIVE_PARALLEL_HDF5}) - set(HDF5_USE_STATIC_LIBRARIES ${HIGHFIVE_STATIC_HDF5}) - find_package(HDF5 REQUIRED) - endif() - - if(HIGHFIVE_PARALLEL_HDF5 AND NOT HDF5_IS_PARALLEL) - message(WARNING "Parallel HDF5 requested but libhdf5 doesnt support it") - endif() - - target_include_directories(libdeps SYSTEM INTERFACE ${HDF5_INCLUDE_DIRS}) - target_link_libraries(libdeps INTERFACE ${HDF5_LIBRARIES}) - target_compile_definitions(libdeps INTERFACE ${HDF5_DEFINITIONS}) - target_compile_definitions(libdeps INTERFACE HIGHFIVE_HAS_CONCEPTS=$) - - - # Boost - if(HIGHFIVE_USE_BOOST) - if(NOT DEFINED Boost_NO_BOOST_CMAKE) - # HighFive deactivated finding Boost via Boost's own CMake files - # in Oct 2016 (commit '25627b085'). Likely to appease one cluster. - # Boost's CMake support has since improved and likely this setting - # isn't needed anymore. It is kept for backwards compatibility. - # However, a rework of HighFive's CMake code should consider removing - # this default. Hard coding this to true has been reported to cause - # build failures. - set(Boost_NO_BOOST_CMAKE TRUE) - endif() - find_package(Boost REQUIRED COMPONENTS system serialization) - # Dont use imported targets yet, not avail before cmake 3.5 - target_include_directories(libdeps SYSTEM INTERFACE ${Boost_INCLUDE_DIR}) - target_compile_definitions(libdeps INTERFACE BOOST_ALL_NO_LIB H5_USE_BOOST) - endif() - - # Half - if(HIGHFIVE_USE_HALF_FLOAT) - find_file(FOUND_HALF half.hpp) - if (NOT FOUND_HALF) - message(FATAL_ERROR "Half-precision floating-point support requested but file half.hpp not found") - endif() - target_compile_definitions(libdeps INTERFACE H5_USE_HALF_FLOAT) - endif() - - # Eigen - if(HIGHFIVE_USE_EIGEN) - if (NOT EIGEN3_INCLUDE_DIRS) - find_package(Eigen3 NO_MODULE) - if(Eigen3_FOUND) - message(STATUS "Found Eigen ${Eigen3_VERSION}: ${EIGEN3_INCLUDE_DIRS}") - else() - find_package(PkgConfig) - pkg_check_modules(EIGEN3 REQUIRED eigen3) - endif() - endif() - if (NOT EIGEN3_INCLUDE_DIRS) - message(FATAL_ERROR "Eigen was requested but could not be found") - endif() - target_include_directories(libdeps SYSTEM INTERFACE ${EIGEN3_INCLUDE_DIRS}) - target_compile_definitions(libdeps INTERFACE H5_USE_EIGEN) - endif() - - # xtensor - if(HIGHFIVE_USE_XTENSOR) - if (NOT xtensor_INCLUDE_DIRS) - find_package(xtensor REQUIRED) - endif() - if (NOT xtl_INCLUDE_DIRS) - find_package(xtl REQUIRED) - endif() - target_include_directories(libdeps SYSTEM INTERFACE ${xtensor_INCLUDE_DIRS} ${xtl_INCLUDE_DIRS}) - target_compile_definitions(libdeps INTERFACE H5_USE_XTENSOR) - endif() - - # OpenCV - if(HIGHFIVE_USE_OPENCV) - if (NOT OpenCV_INCLUDE_DIRS) - find_package(OpenCV REQUIRED) - endif() - target_include_directories(libdeps SYSTEM INTERFACE ${OpenCV_INCLUDE_DIRS}) - target_link_libraries(libdeps INTERFACE ${OpenCV_LIBS}) - target_compile_definitions(libdeps INTERFACE H5_USE_OPENCV) - endif() - - # MPI - if(HIGHFIVE_PARALLEL_HDF5 OR HDF5_IS_PARALLEL) - find_package(MPI REQUIRED) - target_include_directories(libdeps SYSTEM INTERFACE ${MPI_CXX_INCLUDE_PATH}) - target_link_libraries(libdeps INTERFACE ${MPI_CXX_LIBRARIES}) - if(CMAKE_VERSION VERSION_LESS 3.13) - target_link_libraries(libdeps INTERFACE ${MPI_CXX_LINK_FLAGS}) - else() - target_link_options(libdeps INTERFACE "SHELL:${MPI_CXX_LINK_FLAGS}") - endif() - endif() - -endif() diff --git a/CMake/HighFiveTargetExport.cmake b/CMake/HighFiveTargetExport.cmake deleted file mode 100644 index 9906f3951..000000000 --- a/CMake/HighFiveTargetExport.cmake +++ /dev/null @@ -1,48 +0,0 @@ - -# Define the HighFive INTERFACE library -add_library(libheaders INTERFACE) - -target_include_directories(libheaders INTERFACE - "$" - "$") - -# Combined HighFive -add_library(HighFive INTERFACE) -target_compile_definitions(HighFive INTERFACE MPI_NO_CPPBIND) # No c++ bindings -target_link_libraries(HighFive INTERFACE libheaders libdeps) - - -# Generate ${PROJECT_NAME}Config.cmake - -include(CMakePackageConfigHelpers) -configure_package_config_file(${CMAKE_CURRENT_LIST_DIR}/HighFiveConfig.cmake.in - ${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake - INSTALL_DESTINATION share/${PROJECT_NAME}/CMake) - -write_basic_package_version_file( - ${PROJECT_NAME}ConfigVersion.cmake - VERSION ${PROJECT_VERSION} - COMPATIBILITY AnyNewerVersion) - -install(FILES - CMake/HighFiveTargetDeps.cmake - ${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake - ${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake - DESTINATION share/${PROJECT_NAME}/CMake) - - -# Provides IMPORTED targets when using this project from build/install trees. - -# Specify targets to include in the HighFive Exports -install(TARGETS HighFive libheaders libdeps - EXPORT HighFiveTargets) - -# Generate & install the Export for the INSTALL_INTERFACE -install(EXPORT HighFiveTargets - NAMESPACE HighFive_ - FILE HighFiveTargets.cmake - DESTINATION share/${PROJECT_NAME}/CMake) - -# Generate the Export for the BUILD_INTERACE (hardly used) -export(EXPORT HighFiveTargets - FILE "${PROJECT_BINARY_DIR}/HighFiveTargets.cmake") diff --git a/CMake/config/TestHelpers.cmake b/CMake/config/TestHelpers.cmake deleted file mode 100644 index f3ca1cb74..000000000 --- a/CMake/config/TestHelpers.cmake +++ /dev/null @@ -1,113 +0,0 @@ -# TestHelpers.cmake -# -# set of Convenience functions for unit testing with cmake -# -# License: BSD 3 -# -# Copyright (c) 2016, Adrien Devresse -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: -# -# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -# -# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. -# -# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - - -## -# enable or disable detection of SLURM and MPIEXEC -option(AUTO_TEST_WITH_SLURM "automatically add srun as test prefix in a SLURM environment" TRUE) -option(AUTO_TEST_WITH_MPIEXEC "automatically add mpiexec as test prefix in a MPICH2/OpenMPI environment" TRUE) - -### -## -## Basic SLURM support -## the prefix "srun" is added to any test in the environment -## For a slurm test execution, simply run "salloc [your_exec_parameters] ctest" -## -## -if(AUTO_TEST_WITH_SLURM) - if(NOT DEFINED SLURM_SRUN_COMMAND) - find_program(SLURM_SRUN_COMMAND - NAMES "srun" - HINTS "${SLURM_ROOT}/bin") - endif() - - if(SLURM_SRUN_COMMAND) - set(TEST_EXEC_PREFIX_DEFAULT "${SLURM_SRUN_COMMAND}") - set(TEST_MPI_EXEC_PREFIX_DEFAULT "${SLURM_SRUN_COMMAND}") - set(TEST_MPI_EXEC_BIN_DEFAULT "${SLURM_SRUN_COMMAND}") - set(TEST_WITH_SLURM ON) - message(STATUS " - AUTO_TEST_WITH_SLURM with slurm cmd ${TEST_EXEC_PREFIX_DEFAULT} ") - message(STATUS " -- set test execution prefix to ${TEST_EXEC_PREFIX_DEFAULT} ") - message(STATUS " -- set MPI test execution prefix to ${TEST_MPI_EXEC_PREFIX_DEFAULT} ") - endif() - -endif() - -### -## Basic MPIExec support, will just forward mpiexec as prefix -## -if(AUTO_TEST_WITH_MPIEXEC AND NOT TEST_WITH_SLURM) - - if(NOT DEFINED MPIEXEC) - find_program(MPIEXEC - NAMES "mpiexec" - HINTS "${MPI_ROOT}/bin") - endif() - - - if(MPIEXEC) - set(TEST_MPI_EXEC_PREFIX_DEFAULT "${MPIEXEC}") - set(TEST_MPI_EXEC_BIN_DEFAULT "${MPIEXEC}") - set(TEST_WITH_MPIEXEC ON) - message(STATUS " - AUTO_TEST_WITH_MPIEXEC cmd ${MPIEXEC} ") - message(STATUS " -- set MPI test execution prefix to ${TEST_MPI_EXEC_PREFIX_DEFAULT} ") - - endif() - -endif() - - - -### -## MPI executor program path without arguments used for testing. -## default: srun or mpiexec if found -## -set(TEST_MPI_EXEC_BIN "${TEST_MPI_EXEC_BIN_DEFAULT}" CACHE STRING "path of the MPI executor (mpiexec, mpirun) for test execution") - - - -### -## Test execution prefix. Override this variable for any execution prefix required in clustered environment -## -## To specify manually a command with argument, e.g -DTEST_EXEC_PREFIX="/var/empty/bin/srun;-n;-4" for a srun execution -## with 4 nodes -## -## default: srun if found -## -set(TEST_EXEC_PREFIX "${TEST_EXEC_PREFIX_DEFAULT}" CACHE STRING "prefix command for the test executions") - - - -### -## Test execution prefix specific for MPI programs. -## -## To specify manually a command with argument, use the cmake list syntax. e.g -DTEST_EXEC_PREFIX="/var/empty/bin/mpiexec;-n;-4" for an MPI execution -## with 4 nodes -## -## default: srun or mpiexec if found -## -set(TEST_MPI_EXEC_PREFIX "${TEST_MPI_EXEC_PREFIX_DEFAULT}" CACHE STRING "prefix command for the MPI test executions") - - - - - - - diff --git a/CMakeLists.txt b/CMakeLists.txt index 694960090..c1446545c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,43 +1,11 @@ -cmake_minimum_required(VERSION 3.1) -if(${CMAKE_VERSION} VERSION_LESS 3.13) - cmake_policy(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}) -else() - cmake_policy(VERSION 3.13) -endif() +cmake_minimum_required(VERSION 3.13) +cmake_policy(VERSION 3.13) -project(HighFive VERSION 2.9.0) +project(HighFive VERSION 3.0.0) -configure_file(${CMAKE_CURRENT_SOURCE_DIR}/include/highfive/H5Version.hpp.in - ${CMAKE_CURRENT_SOURCE_DIR}/include/highfive/H5Version.hpp) -# INCLUDES -list(APPEND CMAKE_MODULE_PATH - ${CMAKE_CURRENT_SOURCE_DIR}/CMake - ${CMAKE_CURRENT_SOURCE_DIR}/CMake/config) - -# OPTIONS -# Compatibility within Highfive 2.x series -set(USE_BOOST ON CACHE BOOL "Enable Boost Support") -set(USE_EIGEN OFF CACHE BOOL "Enable Eigen testing") -set(USE_XTENSOR OFF CACHE BOOL "Enable xtensor testing") -set(USE_OPENCV OFF CACHE BOOL "Enable OpenCV testing") -mark_as_advanced(USE_BOOST USE_EIGEN USE_XTENSOR) - -set(HIGHFIVE_UNIT_TESTS AUTO CACHE STRING "Enable unit tests (requires Catch2 to be present)") -set_property(CACHE HIGHFIVE_UNIT_TESTS PROPERTY STRINGS AUTO ON OFF) - -option(HIGHFIVE_USE_BOOST "Enable Boost Support" ${USE_BOOST}) -option(HIGHFIVE_USE_HALF_FLOAT "Enable half-precision floats" ${USE_HALF_FLOAT}) -option(HIGHFIVE_USE_EIGEN "Enable Eigen testing" ${USE_EIGEN}) -option(HIGHFIVE_USE_OPENCV "Enable OpenCV testing" ${USE_OPENCV}) -option(HIGHFIVE_USE_XTENSOR "Enable xtensor testing" ${USE_XTENSOR}) -option(HIGHFIVE_EXAMPLES "Compile examples" ON) -option(HIGHFIVE_PARALLEL_HDF5 "Enable Parallel HDF5 support" OFF) -option(HIGHFIVE_STATIC_HDF5 "Staticly link to HDF5 library" OFF) -option(HIGHFIVE_BUILD_DOCS "Enable documentation building" ON) +# Configure HighFive +# ------------------ option(HIGHFIVE_VERBOSE "Set logging level to verbose." OFF) -option(HIGHFIVE_GLIBCXX_ASSERTIONS "Enable bounds check for STL." OFF) -option(HIGHFIVE_HAS_CONCEPTS "Print readable compiler errors w/ C++20 concepts" ON) -option(HIGHFIVE_HAS_WERROR "Convert warnings to errors." OFF) # Controls if HighFive classes are friends of each other. # @@ -66,69 +34,126 @@ option(HIGHFIVE_HAS_WERROR "Convert warnings to errors." OFF) option(HIGHFIVE_HAS_FRIEND_DECLARATIONS "Enable additional friend declarations. Certain compiler require this On, others Off." OFF) mark_as_advanced(HIGHFIVE_HAS_FRIEND_DECLARATIONS) -set(HIGHFIVE_SANITIZER OFF CACHE STRING "Enable a group of sanitizers, requires compiler support. Supported: 'address' and 'undefined'.") -mark_as_advanced(HIGHFIVE_SANITIZER) +option(HIGHFIVE_FIND_HDF5 "Find and link with HDF5." On) + +# Configure Tests & Examples +# -------------------------- + +# Internal variable that controls the default value for building +# optional things like tests, examples and docs. +if (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR) + set(HIGHFIVE_EXTRAS_DEFAULT ON) +else() + set(HIGHFIVE_EXTRAS_DEFAULT OFF) +endif() + +option(HIGHFIVE_UNIT_TESTS "Compile unit-tests" ${HIGHFIVE_EXTRAS_DEFAULT}) +option(HIGHFIVE_EXAMPLES "Compile examples" ${HIGHFIVE_EXTRAS_DEFAULT}) +option(HIGHFIVE_BUILD_DOCS "Build documentation" ${HIGHFIVE_EXTRAS_DEFAULT}) -# In deployments we probably don't want/cant have dynamic dependencies -option(HIGHFIVE_USE_INSTALL_DEPS "End applications by default use detected dependencies here" OFF) -mark_as_advanced(HIGHFIVE_USE_INSTALL_DEPS) +option(HIGHFIVE_TEST_BOOST "Enable Boost testing" OFF) +option(HIGHFIVE_TEST_EIGEN "Enable Eigen testing" OFF) +option(HIGHFIVE_TEST_OPENCV "Enable OpenCV testing" OFF) +option(HIGHFIVE_TEST_XTENSOR "Enable xtensor testing" OFF) +option(HIGHFIVE_TEST_HALF_FLOAT "Enable half-precision floats" OFF) +# TODO remove entirely. +option(HIGHFIVE_HAS_CONCEPTS "Print readable compiler errors w/ C++20 concepts" OFF) + +option(HIGHFIVE_HAS_WERROR "Convert warnings to errors." OFF) +option(HIGHFIVE_GLIBCXX_ASSERTIONS "Enable bounds check for STL." OFF) +# TODO these some magic to get a drop down menu in ccmake +set(HIGHFIVE_SANITIZER OFF CACHE STRING "Enable a group of sanitizers, requires compiler support. Supported: 'address' and 'undefined'.") +mark_as_advanced(HIGHFIVE_SANITIZER) # Check compiler cxx_std requirements # ----------------------------------- -if(CMAKE_CXX_STANDARD EQUAL 98) - message(FATAL_ERROR "HighFive needs to be compiled with at least C++11") -endif() - if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 11) + if(HIGHFIVE_TEST_XTENSOR) + set(CMAKE_CXX_STANDARD 14) + else() + set(CMAKE_CXX_STANDARD 11) + endif() set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) endif() -if(HIGHFIVE_USE_XTENSOR AND CMAKE_CXX_STANDARD LESS 14) - set(CMAKE_CXX_STANDARD 14) - set(CMAKE_CXX_STANDARD_REQUIRED ON) +if(CMAKE_CXX_STANDARD EQUAL 98) + message(FATAL_ERROR "HighFive needs to be compiled with at least C++11") +endif() + +if(HIGHFIVE_TEST_XTENSOR AND CMAKE_CXX_STANDARD LESS 14) + message(FATAL_ERROR "XTensor requires C++14 or newer.") endif() add_compile_definitions(HIGHFIVE_CXX_STD=${CMAKE_CXX_STANDARD}) -# Search dependencies (hdf5, boost, eigen, xtensor, mpi) and build target highfive_deps -include(${PROJECT_SOURCE_DIR}/CMake/HighFiveTargetDeps.cmake) +# HighFive +# -------- +add_library(HighFiveInclude INTERFACE) +add_library(HighFive::Include ALIAS HighFiveInclude) +set_target_properties(HighFiveInclude PROPERTIES EXPORT_NAME Include) + +target_include_directories(HighFiveInclude INTERFACE + $ + $ +) + +add_library(HighFive INTERFACE) +add_library(HighFive::HighFive ALIAS HighFive) + +target_link_libraries(HighFive INTERFACE HighFive::Include) + +if(HIGHFIVE_FIND_HDF5) + find_package(HDF5 REQUIRED) + target_link_libraries(HighFive INTERFACE HDF5::HDF5) +endif() + +if(HDF5_IS_PARALLEL) + find_package(MPI REQUIRED) + target_link_libraries(HighFive + INTERFACE + $ + $ + ) +endif() + +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/include/highfive/H5Version.hpp.in + ${CMAKE_CURRENT_SOURCE_DIR}/include/highfive/H5Version.hpp) -# Set-up HighFive to be used in 3rd party project using exports. Create a HighFive target -include(${PROJECT_SOURCE_DIR}/CMake/HighFiveTargetExport.cmake) +# Install +# ------- +include(CMakePackageConfigHelpers) +write_basic_package_version_file( + ${CMAKE_CURRENT_BINARY_DIR}/cmake/HighFiveConfigVersion.cmake + VERSION ${PACKAGE_VERSION} + COMPATIBILITY AnyNewerVersion +) -# Installation of headers (HighFive is only interface) install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/include/ DESTINATION "include" PATTERN "*.in" EXCLUDE) +install(TARGETS HighFive HighFiveInclude EXPORT HighFiveTargets) +install(EXPORT HighFiveTargets + FILE HighFiveTargets.cmake + NAMESPACE HighFive:: + DESTINATION lib/cmake/HighFive +) + +install(FILES + ${CMAKE_CURRENT_SOURCE_DIR}/cmake/HighFiveConfig.cmake + ${CMAKE_CURRENT_BINARY_DIR}/cmake/HighFiveConfigVersion.cmake + DESTINATION lib/cmake/HighFive +) + # Preparing local building (tests, examples) # ------------------------------------------ -# Disable test if Boost was expressly disabled, or if HighFive is a sub-project -if (NOT CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR) - if(HIGHFIVE_UNIT_TESTS AND NOT HighFive_FIND_QUIETLY) - message(WARNING "Unit tests have been DISABLED.") - endif() - set(HIGHFIVE_UNIT_TESTS FALSE) -endif() - -if(HIGHFIVE_UNIT_TESTS) - if(EXISTS ${CMAKE_CURRENT_LIST_DIR}/deps/catch2/CMakeLists.txt) - add_subdirectory(deps/catch2 EXCLUDE_FROM_ALL) - list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/deps/catch2/contrib) - else() - find_package(Catch2) - if(NOT HIGHFIVE_UNIT_TESTS STREQUAL "AUTO" AND HIGHFIVE_UNIT_TESTS AND NOT Catch2_FOUND) - message(FATAL_ERROR "Please provide a Catch2 installation or clone the submodule") - elseif(NOT Catch2_FOUND) - message(WARNING "No Catch2 installation was found; Disabling unit tests.") - set(HIGHFIVE_UNIT_TESTS OFF) - endif() - endif() +if(HIGHFIVE_EXAMPLES OR HIGHFIVE_UNIT_TESTS) + include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/HighFiveWarnings.cmake) + include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/HighFiveOptionalDependencies.cmake) endif() if(HIGHFIVE_EXAMPLES) @@ -136,6 +161,8 @@ if(HIGHFIVE_EXAMPLES) endif() if(HIGHFIVE_UNIT_TESTS) + add_subdirectory(deps/catch2 EXCLUDE_FROM_ALL) + list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/deps/catch2/contrib) enable_testing() add_subdirectory(tests/unit) endif() diff --git a/README.md b/README.md index 668c90365..69e979f92 100644 --- a/README.md +++ b/README.md @@ -39,12 +39,11 @@ It integrates nicely with other CMake projects by defining (and exporting) a Hig - etc... (see [ChangeLog](./CHANGELOG.md)) ### Dependencies -- hdf5 (dev) -- hdf5-mpi (optional, opt-in with -D*HIGHFIVE_PARALLEL_HDF5*=ON) -- boost >= 1.41 (recommended, opt-out with -D*HIGHFIVE_USE_BOOST*=OFF) -- eigen3 (optional, opt-in with -D*HIGHFIVE_USE_EIGEN*=ON) -- xtensor (optional, opt-in with -D*HIGHFIVE_USE_XTENSOR*=ON) -- half (optional, opt-in with -D*HIGHFIVE_USE_HALF_FLOAT*=ON) +- HDF5 or pHDF5, including headers +- boost >= 1.41 (recommended) +- eigen3 (optional) +- xtensor (optional) +- half (optional) ### Known flaws - HighFive is not thread-safe. At best it has the same limitations as the HDF5 library. However, HighFive objects modify their members without protecting these writes. Users have reported that HighFive is not thread-safe even when using the threadsafe HDF5 library, e.g., https://github.com/BlueBrain/HighFive/discussions/675. @@ -124,11 +123,11 @@ For several 'standard' use cases the [highfive/H5Easy.hpp](include/highfive/H5Ea - scalars (to/from an extendible DataSet), - strings, - vectors (of standard types), - - [Eigen::Matrix](http://eigen.tuxfamily.org) (optional, enable CMake option `HIGHFIVE_USE_EIGEN`), + - [Eigen::Matrix](http://eigen.tuxfamily.org) (optional), - [xt::xarray](https://github.com/QuantStack/xtensor) and [xt::xtensor](https://github.com/QuantStack/xtensor) - (optional, enable CMake option `HIGHFIVE_USE_XTENSOR`). + (optional). - [cv::Mat_](https://docs.opencv.org/master/df/dfc/classcv_1_1Mat__.html) - (optional, enable CMake option `HIGHFIVE_USE_OPENCV`). + (optional). * Getting in a single line: @@ -150,16 +149,29 @@ int main() { } ``` -whereby the `int` type of this example can be replaced by any of the above types. See [easy_load_dump.cpp](src/examples/easy_load_dump.cpp) for more details. +whereby the `int` type of this example can be replaced by any of the above +types. See [easy_load_dump.cpp](src/examples/easy_load_dump.cpp) for more +details. -**Note:** Classes such as `H5Easy::File` are just short for the regular `HighFive` classes (in this case `HighFive::File`). They can thus be used interchangeably. +**Note:** Classes such as `H5Easy::File` are just short for the regular +`HighFive` classes (in this case `HighFive::File`). They can thus be used +interchangeably. ## CMake integration There's two common paths of integrating HighFive into a CMake based project. The first is to "vendor" HighFive, the second is to install HighFive as a -normal C++ library. Due to how HighFive CMake code works, sometimes following -the third Bailout Approach is needed. +normal C++ library. Since HighFive makes choices about how to integrate HDF5, +sometimes following the third Bailout Approach is needed. + +Regular HDF5 CMake variables can be used. Interesting variables include: + +* `HDF5_USE_STATIC_LIBRARIES` to link statically against the HDF5 library. +* `HDF5_PREFER_PARALLEL` to prefer pHDF5. +* `HDF5_IS_PARALLEL` to check if HDF5 is parallel. + +Please consult `tests/cmake_integration` for examples of how to write libraries +or applications using HighFive. ### Vendoring HighFive @@ -168,86 +180,63 @@ project (typically as a git submodule), for example in `third_party/HighFive`. The projects `CMakeLists.txt` add the following lines ```cmake -add_executable(foo foo.cpp) - -# You might want to turn off Boost support: -if(NOT DEFINED HIGHFIVE_USE_BOOST) - set(HIGHFIVE_USE_BOOST Off) -endif() - -# Include the subdirectory and use the target HighFive. add_subdirectory(third_party/HighFive) target_link_libraries(foo HighFive) ``` **Note:** `add_subdirectory(third_party/HighFive)` will search and "link" HDF5 -and optional dependencies such as Boost. +but wont search or link any optional dependencies such as Boost. ### Regular Installation of HighFive -Alternatively you can install HighFive once and use it in several projects via -`find_package()`. First one should clone the sources: -```bash -git clone --recursive https://github.com/BlueBrain/HighFive.git HighFive-src -``` -By default CMake will install systemwide, which is likely not appropriate. The -instruction below allow users to select a custom path where HighFive will be -installed, e.g. `HIGHFIVE_INSTALL_PREFIX=${HOME}/third_party/HighFive` or some -other location. The CMake invocations would be -```bash -cmake -DHIGHFIVE_EXAMPLES=Off \ - -DHIGHFIVE_USE_BOOST=Off \ - -DHIGHFIVE_UNIT_TESTS=Off \ - -DCMAKE_INSTALL_PREFIX=${HIGHFIVE_INSTALL_PREFIX} \ - -B HighFive-src/build \ - HighFive-src - -cmake --build HighFive-src/build -cmake --install HighFive-src/build -``` -This will install (i.e. copy) the headers to -`${HIGHFIVE_INSTALL_PREFIX}/include` and some CMake files into an appropriate -subfolder of `${HIGHFIVE_INSTALL_PREFIX}`. +Alternatively, HighFive can be install and "found" like regular software. -The projects `CMakeLists.txt` should add the following: +The project's `CMakeLists.txt` should add the following: ```cmake -# ... -add_executable(foo foo.cpp) - find_package(HighFive REQUIRED) target_link_libraries(foo HighFive) ``` -**Note:** If HighFive hasn't been installed in a default location, CMake needs -to be told where to find it which can be done by adding -`-DCMAKE_PREFIX_PATH=${HIGHFIVE_INSTALL_PREFIX}` to the CMake command for -building the project using HighFive. The variable `CMAKE_PREFIX_PATH` is a -semi-colon `;` separated list of directories. +**Note:** `find_package(HighFive)` will search for HDF5. "Linking" to +`HighFive` includes linking with HDF5. The two commands will not search for or +"link" to optional dependencies such as Boost. -**Note:** `find_package(HighFive)` will search and "link" HDF5 and optional -dependencies such as Boost. +### Bailout Approach -### The Bailout Approach -Since both `add_subdirectory` and `find_package` will trigger finding HDF5 and -other optional dependencies of HighFive as well as the `target_link_libraries` -code for "linking" with the dependencies, things can go wrong. +To prevent HighFive from searching or "linking" to HDF5 the project's +`CMakeLists.txt` should contain the following: -Fortunately, HighFive is a header only library and all that's needed is the -headers. Preferably, the version obtained by installing HighFive, since those -include `H5Version.hpp`. Let's assume they've been copied to -`third_party/HighFive`. Then one could create a target: +```cmake +# Prevent HighFive CMake code from searching for HDF5: +set(HIGHFIVE_FIND_HDF5 Off) -```bash -add_library(HighFive INTERFACE) -target_include_directory(HighFive INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/third_party/HighFive/include) +# Then "find" HighFive as usual: +find_package(HighFive REQUIRED) +# alternatively, when vendoring: +# add_subdirectory(third_party/HighFive) +# Finally, use the target `HighFive::Include` which +# doesn't add a dependency on HDF5. +target_link_libraries(foo HighFive::Include) -add_executable(foo foo.cpp) -target_link_libraries(foo HighFive) +# Proceed to find and link HDF5 as required. ``` -One known case where this is required is when vendoring the optional -dependencies of HighFive. +### Optional Dependencies + +HighFive does not attempt to find or "link" to any optional dependencies, such +as Boost, Eigen, etc. Any project using HighFive with any of the optional +dependencies must include the respective header: +``` +#include +#include +``` +and add the required CMake code to find and link against the dependencies. For +Boost the required lines might be +``` +find_package(Boost REQUIRED) +target_link_libraries(foo PUBLIC Boost::headers) +``` # Questions? diff --git a/cmake/HighFiveConfig.cmake b/cmake/HighFiveConfig.cmake new file mode 100644 index 000000000..33ce7b7b0 --- /dev/null +++ b/cmake/HighFiveConfig.cmake @@ -0,0 +1,19 @@ +include(CMakeFindDependencyMacro) + +if(NOT DEFINED HIGHFIVE_FIND_HDF5) + set(HIGHFIVE_FIND_HDF5 On) +endif() + +if(HIGHFIVE_FIND_HDF5) + find_dependency(HDF5) +endif() + +include("${CMAKE_CURRENT_LIST_DIR}/HighFiveTargets.cmake") + +if(HDF5_IS_PARALLEL) + find_dependency(MPI) + target_link_libraries(HighFive::HighFive INTERFACE MPI::MPI_C MPI::MPI_CXX) +endif() + +add_library(HighFive ALIAS HighFive::HighFive) +add_library(HighFiveInclude ALIAS HighFive::Include) diff --git a/cmake/HighFiveOptionalDependencies.cmake b/cmake/HighFiveOptionalDependencies.cmake new file mode 100644 index 000000000..53d10ecba --- /dev/null +++ b/cmake/HighFiveOptionalDependencies.cmake @@ -0,0 +1,27 @@ +if(HIGHFIVE_TEST_BOOST AND NOT TARGET HighFiveBoostDependency) + add_library(HighFiveBoostDependency INTERFACE) + find_package(Boost REQUIRED) + target_link_libraries(HighFiveBoostDependency INTERFACE Boost::headers) + # TODO check if we need Boost::disable_autolinking to cause: + # -DBOOST_ALL_NO_LIB (does something on MSVC). +endif() + +if(HIGHFIVE_TEST_EIGEN AND NOT TARGET HighFiveEigenDependency) + add_library(HighFiveEigenDependency INTERFACE) + find_package(Eigen3 REQUIRED NO_MODULE) + target_link_libraries(HighFiveEigenDependency INTERFACE Eigen3::Eigen) +endif() + +if(HIGHFIVE_TEST_XTENSOR AND NOT TARGET HighFiveXTensorDependency) + add_library(HighFiveXTensorDependency INTERFACE) + find_package(xtensor REQUIRED) + target_link_libraries(HighFiveXTensorDependency INTERFACE xtensor) +endif() + +if(HIGHFIVE_TEST_OPENCV AND NOT TARGET HighFiveOpenCVDependency) + add_library(HighFiveOpenCVDependency INTERFACE) + find_package(OpenCV REQUIRED) + target_include_directories(HighFiveOpenCVDependency SYSTEM INTERFACE ${OpenCV_INCLUDE_DIRS}) + target_link_libraries(HighFiveOpenCVDependency INTERFACE ${OpenCV_LIBS}) + target_compile_definitions(HighFiveOpenCVDependency INTERFACE H5_USE_OPENCV) +endif() diff --git a/CMake/HighFiveWarnings.cmake b/cmake/HighFiveWarnings.cmake similarity index 100% rename from CMake/HighFiveWarnings.cmake rename to cmake/HighFiveWarnings.cmake diff --git a/doc/developer_guide.md b/doc/developer_guide.md index fc388f3b5..f129ecb1d 100644 --- a/doc/developer_guide.md +++ b/doc/developer_guide.md @@ -23,8 +23,8 @@ cmake --build build --parallel ctest --test-dir build ``` -You might want to turn off Boost `-DHIGHFIVE_USE_BOOST=Off` or turn on other -optional dependencies. +You might want to turn Boost `-DHIGHFIVE_TEST_BOOST=On` or optional +dependencies on. ## Contributing There's numerous HDF5 features that haven't been wrapped yet. HighFive is a diff --git a/doc/installation.md b/doc/installation.md index e9c5b2e6e..3dfb3dc4b 100644 --- a/doc/installation.md +++ b/doc/installation.md @@ -211,7 +211,7 @@ this you type Okay, on to configure, compile and install. The CMake commands are - cmake -DCMAKE_INSTALL_PREFIX=build/install -DHIGHFIVE_USE_BOOST=Off -B build . + cmake -DCMAKE_INSTALL_PREFIX=build/install -B build . cmake --build build --parallel cmake --install build diff --git a/include/highfive/H5Version.hpp b/include/highfive/H5Version.hpp index bca2c3a83..4ffb03645 100644 --- a/include/highfive/H5Version.hpp +++ b/include/highfive/H5Version.hpp @@ -8,8 +8,8 @@ */ #pragma once -#define HIGHFIVE_VERSION_MAJOR 2 -#define HIGHFIVE_VERSION_MINOR 9 +#define HIGHFIVE_VERSION_MAJOR 3 +#define HIGHFIVE_VERSION_MINOR 0 #define HIGHFIVE_VERSION_PATCH 0 /** \brief Concatenated representation of the HighFive version. @@ -24,10 +24,10 @@ * std::cout << STRINGIFY_VALUE(HIGHFIVE_VERSION) << "\n"; * \endcode */ -#define HIGHFIVE_VERSION 2.9.0 +#define HIGHFIVE_VERSION 3.0.0 /** \brief String representation of the HighFive version. * * \warning This macro only exists from 2.7.1 onwards. */ -#define HIGHFIVE_VERSION_STRING "2.9.0" +#define HIGHFIVE_VERSION_STRING "3.0.0" diff --git a/src/examples/CMakeLists.txt b/src/examples/CMakeLists.txt index 8b5f8b0af..ab5700c6a 100644 --- a/src/examples/CMakeLists.txt +++ b/src/examples/CMakeLists.txt @@ -1,51 +1,90 @@ -include(HighFiveWarnings) +set(core_examples + ${CMAKE_CURRENT_SOURCE_DIR}/compound_types.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/create_attribute_string_integer.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/create_dataset_double.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/create_datatype.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/create_extensible_dataset.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/create_large_attribute.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/create_page_allocated_files.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/readme_snippet.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/read_write_dataset_string.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/read_write_raw_ptr.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/read_write_single_scalar.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/read_write_std_strings.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/read_write_vector_dataset.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/read_write_vector_dataset_references.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/renaming_objects.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/select_by_id_dataset_cpp11.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/select_partial_dataset_cpp11.cpp +) -function(compile_example example_source) +set(easy_examples + ${CMAKE_CURRENT_SOURCE_DIR}/easy_attribute.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/easy_dumpoptions.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/easy_load_dump.cpp +) + +set(boost_examples + ${CMAKE_CURRENT_SOURCE_DIR}/boost_multi_array_2D.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/boost_multiarray_complex.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/boost_ublas_double.cpp +) + +set(hl_hdf5_examples + ${CMAKE_CURRENT_SOURCE_DIR}/hl_hdf5_inmemory_files.cpp +) + +set(parallel_hdf5_examples + ${CMAKE_CURRENT_SOURCE_DIR}/parallel_hdf5_collective_io.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/parallel_hdf5_independent_io.cpp +) - get_filename_component(example_filename ${example_source} NAME) - string(REPLACE ".cpp" "_bin" example_name ${example_filename}) - - if(${example_filename} MATCHES ".*eigen.*") - if(NOT HIGHFIVE_USE_EIGEN) - return() - endif() - endif() - - if(${example_filename} MATCHES ".*boost.*") - if(NOT HIGHFIVE_USE_BOOST) - return() - endif() - endif() - - if(${example_filename} MATCHES ".*parallel_hdf5.*") - if(NOT HIGHFIVE_PARALLEL_HDF5) - return() - endif() - endif() - - if(${example_filename} MATCHES ".*half_float.*") - if(NOT HIGHFIVE_USE_HALF_FLOAT) - return() - endif() - endif() - - if(${example_name} MATCHES ".*hl_hdf5.*") - find_package(HDF5 QUIET COMPONENTS HL NAMES HDF5_HL) - if(${HDF5_HL_FOUND}) - message("HDF5 HL: ${HDF5_HL_LIBRARIES}") - add_executable(${example_name} ${example_source}) - target_link_libraries(${example_name} HighFive HighFiveWarnings ${HDF5_HL_LIBRARIES}) - endif() - return() - endif() - - add_executable(${example_name} ${example_source}) - target_link_libraries(${example_name} HighFive HighFiveWarnings) +set(half_float_examples + ${CMAKE_CURRENT_SOURCE_DIR}/create_dataset_half_float.cpp +) +function(compile_example example_source) + get_filename_component(example_filename ${example_source} NAME) + string(REPLACE ".cpp" "_bin" example_name ${example_filename}) + message("example_name: ${example_name}") + + add_executable(${example_name} ${example_source}) + target_link_libraries(${example_name} PUBLIC HighFive HighFiveWarnings) + if(${ARGC} EQUAL 2) + target_link_libraries(${example_name} PUBLIC ${ARGV1}) + endif() endfunction() -file(GLOB list_example "*.cpp") -foreach(example_src ${list_example}) - compile_example(${example_src}) +foreach(example_source ${core_examples}) + compile_example(${example_source}) endforeach() + +foreach(example_source ${easy_examples}) + compile_example(${example_source}) +endforeach() + +if(HIGHFIVE_TEST_BOOST) + foreach(example_source ${boost_examples}) + compile_example(${example_source} HighFiveBoostDependency) + endforeach() +endif() + +if(HDF5_IS_PARALLEL) + foreach(example_source ${parallel_hdf5_examples}) + compile_example(${example_source}) + endforeach() +endif() + +add_library(HighFiveHlHdf5Dependency INTERFACE) +find_package(HDF5 QUIET COMPONENTS HL NAMES HDF5_HL) +if(${HDF5_HL_FOUND}) + message("HDF5 HL: ${HDF5_HL_LIBRARIES}") + target_link_libraries(HighFiveHlHdf5Dependency ${HDF5_HL_LIBRARIES}) + + foreach(example_source ${hl_hdf5_examples}) + compile_examples(${example_source} HighFiveHlHdf5Dependency) + endforeach() +endif() + +# TODO Half-float examples diff --git a/tests/cmake_integration/README.md b/tests/cmake_integration/README.md new file mode 100644 index 000000000..c69df2ced --- /dev/null +++ b/tests/cmake_integration/README.md @@ -0,0 +1,22 @@ +# Examples of CMake Integration. +This folder container examples of projects using CMake to integrate HighFive in +the project. The following examples have been provided: + +* `application` contains an application/executable + that uses HighFive and the optional Boost dependency. + +* `dependent_library` contains a library that uses HighFive in its API. It + consists of a shared and static library; and includes, as an optional + component, a Boost dependency. + +* `test_dependent_library` is an application to test that (or demonstrate how) + `dependent_library` can be consumed easily. + +## Vendoring and Integration Strategy +Note that all examples have been written to pick different vendoring and +integration strategies. This is for testing purposes only. Any real project +would pick a single integration strategy and at most two vendoring strategies. + +## Testing +Run `bash test_cmake_integration.sh` to check if the CMake integration example +are working as expected. diff --git a/tests/cmake_integration/application/CMakeLists.txt b/tests/cmake_integration/application/CMakeLists.txt new file mode 100644 index 000000000..469344e9b --- /dev/null +++ b/tests/cmake_integration/application/CMakeLists.txt @@ -0,0 +1,87 @@ +# This is an example of an application/executable using HighFive. It +# demonstrates the different vendoring strategies and targets provided by +# HighFive. + +cmake_minimum_required(VERSION 3.14) +project(Hi5Application VERSION 0.1) + +if(NOT DEFINED CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS OFF) +endif() + +set(INTEGRATION_STRATEGY "short" CACHE STRING "Use 'Include' for HighFive::Include, 'full' for HighFive::HighFive, 'short' for HighFive.") +set(VENDOR_STRATEGY "submodule" CACHE STRING "Use 'submodule' for Git submodules, 'fetch_content' for FetchContent, 'external' for `find_package`.") +option(USE_STATIC_HDF5 "Link against static HDF5" OFF) +option(USE_BOOST "Simulates an application using Boost" OFF) + +# Controlling HDF5 features is done by directly setting the HDF5 flags. The +# interesting ones are probably: +# * HDF5_USE_STATIC_LIBRARIES +# * HDF5_PREFER_PARALLEL +if(USE_STATIC_HDF5) + set(HDF5_USE_STATIC_LIBRARIES On) +else() + set(HDF5_USE_STATIC_LIBRARIES Off) +endif() + +if(${INTEGRATION_STRATEGY} STREQUAL "bailout") + set(HIGHFIVE_FIND_HDF5 Off) +endif() + +if(${VENDOR_STRATEGY} STREQUAL "submodule") + # When vendoring via a Git submodule, this is the correct + # line to include HighFive. + add_subdirectory("deps/HighFive" EXCLUDE_FROM_ALL) +elseif(${VENDOR_STRATEGY} STREQUAL "fetch_content") + include(FetchContent) + FetchContent_Declare(HighFive + GIT_REPOSITORY $ENV{HIGHFIVE_GIT_REPOSITORY} + GIT_TAG $ENV{HIGHFIVE_GIT_TAG} + ) + FetchContent_MakeAvailable(HighFive) +elseif(${VENDOR_STRATEGY} STREQUAL "external") + # When HighFive is installed like regular software and then "found", do the + # following: + find_package(HighFive REQUIRED) +endif() + +add_executable(Hi5Application "hi5_application.cpp") + +if( ${INTEGRATION_STRATEGY} STREQUAL "Include" + OR ${INTEGRATION_STRATEGY} STREQUAL "bailout") + # Only add `-I${HIGHFIVE_DIR}/include`. + target_link_libraries(Hi5Application PUBLIC HighFive::Include) + + # Now link to HDF5 in whatever fashion you desire. + find_package(HDF5 REQUIRED) + target_link_libraries(Hi5Application PUBLIC HDF5::HDF5) + + # You might need to take care of MPI. + find_package(MPI REQUIRED) + target_link_libraries(Hi5Application PUBLIC MPI::MPI_C MPI::MPI_CXX) +elseif(${INTEGRATION_STRATEGY} STREQUAL "short") + # Highest chance of being backwards compatible with v2. + target_link_libraries(Hi5Application PUBLIC HighFive) +elseif(${INTEGRATION_STRATEGY} STREQUAL "full") + target_link_libraries(Hi5Application PUBLIC HighFive::HighFive) +endif() + +if(USE_BOOST) + find_package(Boost REQUIRED) + target_link_libraries(Hi5Application PUBLIC Boost::headers) + target_compile_definitions(Hi5Application PUBLIC HI5_APPLICATION_HAS_BOOST=1) +endif() + +if(USE_STATIC_HDF5) + find_package(ZLIB REQUIRED) + target_link_libraries(${target} PUBLIC ZLIB::ZLIB) +endif() + +# Install +# ------- +install(TARGETS Hi5Application RUNTIME DESTINATION bin) + +enable_testing() +add_test(NAME test_hi5_application COMMAND ${CMAKE_CURRENT_BINARY_DIR}/Hi5Application) diff --git a/tests/cmake_integration/application/deps/.gitignore b/tests/cmake_integration/application/deps/.gitignore new file mode 100644 index 000000000..d6b7ef32c --- /dev/null +++ b/tests/cmake_integration/application/deps/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/tests/cmake_integration/application/hi5_application.cpp b/tests/cmake_integration/application/hi5_application.cpp new file mode 100644 index 000000000..dd27260aa --- /dev/null +++ b/tests/cmake_integration/application/hi5_application.cpp @@ -0,0 +1,36 @@ +#include +#if HI5_APPLICATION_HAS_BOOST == 1 +#include +#endif + +int main() { + { + auto file = HighFive::File("foo.h5", HighFive::File::Truncate); + + auto dset = file.createDataSet("foo", std::vector{1.0, 2.0, 3.0}); + auto x = dset.read>(); + + for (size_t i = 0; i < x.size(); i++) { + if (x[i] != double(i + 1)) { + throw std::runtime_error("HighFiveDemo is broken."); + } + } + + std::cout << "Hi5Application: success \n"; + } + +#if HI5_APPLICATION_HAS_BOOST == 1 + { + using matrix_t = boost::numeric::ublas::matrix; + + auto file = HighFive::File("bar.h5", HighFive::File::Truncate); + matrix_t x(3, 5); + auto dset = file.createDataSet("foo", x); + auto y = dset.read(); + + std::cout << "Hi5BoostApplication: success \n"; + } +#endif + + return 0; +} diff --git a/tests/cmake_integration/dependent_library/CMakeLists.txt b/tests/cmake_integration/dependent_library/CMakeLists.txt new file mode 100644 index 000000000..1cdbaf35a --- /dev/null +++ b/tests/cmake_integration/dependent_library/CMakeLists.txt @@ -0,0 +1,134 @@ +# Do NOT document the available components and targets. Guessing CMake targets +# and components is an essential part of the game. This library for example has +# one component called `boost` and three targets. Use +# +# # Without Boost: +# find_package(Hi5Dependent REQUIRED) +# target_link_libraries(foo PUBLIC Hi5Dependent::Read Hi5Dependent::Write) +# +# # With Boost: +# find_package(Hi5Dependent REQUIRED COMPONENTS boost) +# target_link_libraries(foo PUBLIC Hi5Dependent::Read Hi5Dependent::Write) +# target_link_libraries(foo PUBLIC Hi5Dependent::Boost) + +cmake_minimum_required(VERSION 3.14) +project(Hi5Dependent VERSION 0.1) + +if(NOT DEFINED CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS OFF) +endif() + +set(INTEGRATION_STRATEGY "short" CACHE STRING "Use 'Include' for HighFive::Include, 'full' for HighFive::HighFive, 'short' for HighFive.") +option(USE_STATIC_HDF5 "Link against static HDF5" OFF) +option(USE_BOOST "Build '${PROJECT_NAME}' with optional Boost dependency." OFF) + +if(USE_STATIC_HDF5) + set(HDF5_USE_STATIC_LIBRARIES On) +else() + set(HDF5_USE_STATIC_LIBRARIES Off) +endif() + +if(${INTEGRATION_STRATEGY} STREQUAL "bailout") + set(HIGHFIVE_FIND_HDF5 Off) +endif() + +# Since any project depending on 'Hi5Dependent' also needs HighFive, it doesn't +# make sense to vendor HighFive. Therefore, use +find_package(HighFive REQUIRED) + +# For demonstration purposes it consists of a shared and static library +add_library(${PROJECT_NAME}Write SHARED "src/hi5_dependent/write_vector.cpp") +add_library(${PROJECT_NAME}::Write ALIAS ${PROJECT_NAME}Write) +set_target_properties(${PROJECT_NAME}Write PROPERTIES EXPORT_NAME Write) + +add_library(${PROJECT_NAME}Read STATIC "src/hi5_dependent/read_vector.cpp") +add_library(${PROJECT_NAME}::Read ALIAS ${PROJECT_NAME}Read) +set_target_properties(${PROJECT_NAME}Read PROPERTIES EXPORT_NAME Read) + +set(Hi5DependentCoreTargets ${PROJECT_NAME}Write ${PROJECT_NAME}Read) +set(Hi5DependentAllTargets ${Hi5DependentCoreTargets}) + +# ... and two more for demonstrating an optional dependency (on Boost). +if(USE_BOOST) + add_library(${PROJECT_NAME}Boost SHARED "src/hi5_dependent/boost.cpp") + add_library(${PROJECT_NAME}::Boost ALIAS ${PROJECT_NAME}Boost) + set_target_properties(${PROJECT_NAME}Boost PROPERTIES EXPORT_NAME Boost) + + find_package(Boost REQUIRED) + target_link_libraries(${PROJECT_NAME}Boost PUBLIC Boost::headers) + target_compile_definitions(${PROJECT_NAME}Boost PUBLIC HI5_DEPENDENT_HAS_BOOST=1) + + list(APPEND Hi5DependentAllTargets ${PROJECT_NAME}Boost) +endif() + +foreach(target IN LISTS Hi5DependentAllTargets) + target_include_directories(${target} + PUBLIC $ + PUBLIC $ + ) + + # Remember to pick one. Writing out all variation serves testing and + # demonstration purposes only. Minimizing lines of code is probably a good + # strategy. + if( ${INTEGRATION_STRATEGY} STREQUAL "Include" + OR ${INTEGRATION_STRATEGY} STREQUAL "bailout") + target_link_libraries(${target} PUBLIC HighFive::Include) + + find_package(HDF5 REQUIRED) + target_link_libraries(${target} PUBLIC HDF5::HDF5) + + find_package(MPI REQUIRED) + target_link_libraries(${target} PUBLIC MPI::MPI_C MPI::MPI_CXX) + elseif(${INTEGRATION_STRATEGY} STREQUAL "short") + target_link_libraries(${target} PUBLIC HighFive) + elseif(${INTEGRATION_STRATEGY} STREQUAL "full") + target_link_libraries(${target} PUBLIC HighFive::HighFive) + endif() + + if(USE_STATIC_HDF5) + find_package(ZLIB REQUIRED) + target_link_libraries(${target} PUBLIC ZLIB::ZLIB) + endif() +endforeach() + +# Install +# ------- +include(CMakePackageConfigHelpers) +write_basic_package_version_file( + ${CMAKE_CURRENT_BINARY_DIR}/cmake/${PROJECT_NAME}ConfigVersion.cmake + VERSION ${PACKAGE_VERSION} + COMPATIBILITY AnyNewerVersion +) + +install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/include/ + DESTINATION "include") + +install(TARGETS ${PROJECT_NAME}Read ${PROJECT_NAME}Write EXPORT ${PROJECT_NAME}Targets) +install(EXPORT ${PROJECT_NAME}Targets + FILE ${PROJECT_NAME}Targets.cmake + NAMESPACE ${PROJECT_NAME}:: + DESTINATION cmake +) + +configure_file( + ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${PROJECT_NAME}Config.cmake.in + ${CMAKE_CURRENT_BINARY_DIR}/cmake/${PROJECT_NAME}Config.cmake + @ONLY +) + +install(FILES + ${CMAKE_CURRENT_BINARY_DIR}/cmake/${PROJECT_NAME}Config.cmake + ${CMAKE_CURRENT_BINARY_DIR}/cmake/${PROJECT_NAME}ConfigVersion.cmake + DESTINATION cmake +) + +if(USE_BOOST) + install(TARGETS ${PROJECT_NAME}Boost EXPORT ${PROJECT_NAME}BoostTargets) + install(EXPORT ${PROJECT_NAME}BoostTargets + FILE ${PROJECT_NAME}BoostTargets.cmake + NAMESPACE ${PROJECT_NAME}:: + DESTINATION cmake + ) +endif() diff --git a/tests/cmake_integration/dependent_library/cmake/Hi5DependentConfig.cmake.in b/tests/cmake_integration/dependent_library/cmake/Hi5DependentConfig.cmake.in new file mode 100644 index 000000000..b5df933b9 --- /dev/null +++ b/tests/cmake_integration/dependent_library/cmake/Hi5DependentConfig.cmake.in @@ -0,0 +1,24 @@ +include(CMakeFindDependencyMacro) + +find_dependency(HighFive) + +set(Hi5Dependent_INTEGRATION_STRATEGY @INTEGRATION_STRATEGY@) +if( ${Hi5Dependent_INTEGRATION_STRATEGY} STREQUAL "Include" + OR ${Hi5Dependent_INTEGRATION_STRATEGY} STREQUAL "bailout") + # Remember to 'find' any dependencies you introduce, including HDF5 if you + # use additional COMPONENTS; or MPI if you unconditionally use it. + find_dependency(MPI) +endif() + +include("${CMAKE_CURRENT_LIST_DIR}/Hi5DependentTargets.cmake") + +if(boost IN_LIST Hi5Dependent_FIND_COMPONENTS) + set(Hi5Dependent_USE_BOOST @USE_BOOST@) + + if(NOT Hi5Dependent_USE_BOOST) + message(FATAL_ERROR "Library was built without the component: boost") + endif() + + find_dependency(Boost) + include("${CMAKE_CURRENT_LIST_DIR}/Hi5DependentBoostTargets.cmake") +endif() diff --git a/tests/cmake_integration/dependent_library/include/hi5_dependent/read.hpp b/tests/cmake_integration/dependent_library/include/hi5_dependent/read.hpp new file mode 100644 index 000000000..7550efc8a --- /dev/null +++ b/tests/cmake_integration/dependent_library/include/hi5_dependent/read.hpp @@ -0,0 +1,17 @@ +#pragma once + +#include +#include + +#if HI5_DEPENDENT_HAS_BOOST == 1 +#include +#endif + +namespace hi5_dependent { +std::vector read_vector(const HighFive::DataSet& dset); + +#if HI5_DEPENDENT_HAS_BOOST == 1 +boost::numeric::ublas::matrix read_boost(const HighFive::DataSet& dset); +HighFive::DataSet write_boost(HighFive::File& file, const boost::numeric::ublas::matrix& x); +#endif +} // namespace hi5_dependent diff --git a/tests/cmake_integration/dependent_library/include/hi5_dependent/write.hpp b/tests/cmake_integration/dependent_library/include/hi5_dependent/write.hpp new file mode 100644 index 000000000..469b378bf --- /dev/null +++ b/tests/cmake_integration/dependent_library/include/hi5_dependent/write.hpp @@ -0,0 +1,8 @@ +#pragma once + +#include +#include + +namespace hi5_dependent { +HighFive::DataSet write_vector(HighFive::File& file, const std::vector& x); +} diff --git a/tests/cmake_integration/dependent_library/src/hi5_dependent/boost.cpp b/tests/cmake_integration/dependent_library/src/hi5_dependent/boost.cpp new file mode 100644 index 000000000..dc5f6909a --- /dev/null +++ b/tests/cmake_integration/dependent_library/src/hi5_dependent/boost.cpp @@ -0,0 +1,17 @@ +#include +#include + +#include + +namespace hi5_dependent { + +boost::numeric::ublas::matrix read_boost(const HighFive::DataSet& dset) { + return dset.read>(); +} + +HighFive::DataSet write_boost(HighFive::File& file, + const boost::numeric::ublas::matrix& x) { + return file.createDataSet("foo", x); +} + +} // namespace hi5_dependent diff --git a/tests/cmake_integration/dependent_library/src/hi5_dependent/read_vector.cpp b/tests/cmake_integration/dependent_library/src/hi5_dependent/read_vector.cpp new file mode 100644 index 000000000..e7e6d973f --- /dev/null +++ b/tests/cmake_integration/dependent_library/src/hi5_dependent/read_vector.cpp @@ -0,0 +1,7 @@ +#include + +namespace hi5_dependent { +std::vector read_vector(const HighFive::DataSet& dset) { + return dset.read>(); +} +} // namespace hi5_dependent diff --git a/tests/cmake_integration/dependent_library/src/hi5_dependent/write_vector.cpp b/tests/cmake_integration/dependent_library/src/hi5_dependent/write_vector.cpp new file mode 100644 index 000000000..595cd9678 --- /dev/null +++ b/tests/cmake_integration/dependent_library/src/hi5_dependent/write_vector.cpp @@ -0,0 +1,9 @@ +#include + +namespace hi5_dependent { + +HighFive::DataSet write_vector(HighFive::File& file, const std::vector& x) { + return file.createDataSet("foo", x); +} + +} // namespace hi5_dependent diff --git a/tests/cmake_integration/test_cmake_integration.sh b/tests/cmake_integration/test_cmake_integration.sh new file mode 100644 index 000000000..cf80fbfdd --- /dev/null +++ b/tests/cmake_integration/test_cmake_integration.sh @@ -0,0 +1,104 @@ +#!/usr/bin/env bash +set -xeuo pipefail +cd "$( dirname "${BASH_SOURCE[0]}")" # cd here + +# All output should be within this directory. +TMP_DIR="${PWD}/tmp" + +# Root of the cmake integration tests. +TEST_DIR="${PWD}" + +# Path of the sources, build and install directory of HighFive. +HIGHFIVE_DIR="${TEST_DIR}/../.." +HIGHFIVE_BUILD_DIR="${TMP_DIR}/build-highfive" +HIGHFIVE_INSTALL_DIR="${HIGHFIVE_BUILD_DIR}/install" + +export HIGHFIVE_GIT_REPOSITORY="file://$(realpath "$HIGHFIVE_DIR")" +export HIGHFIVE_GIT_TAG=$(git rev-parse HEAD) + +test_dependent_library() { + local project="dependent_library" + local project_dir="${TEST_DIR}/${project}" + + for use_boost in On Off + do + local build_dir="${TMP_DIR}/build" + local install_dir="${TMP_DIR}/build/install" + + rm -rf ${build_dir} || true + + cmake "$@" \ + -DUSE_BOOST=${use_boost} \ + -DCMAKE_PREFIX_PATH="${HIGHFIVE_INSTALL_DIR}" \ + -DCMAKE_INSTALL_PREFIX="${install_dir}" \ + -B "${build_dir}" "${project_dir}" + + cmake --build "${build_dir}" --verbose --target install + + local test_project="test_dependent_library" + local test_build_dir="${TMP_DIR}/test_build" + local test_install_dir="${TMP_DIR}/test_build/install" + + rm -rf ${test_build_dir} || true + + cmake -DUSE_BOOST=${use_boost} \ + -DCMAKE_PREFIX_PATH="${HIGHFIVE_INSTALL_DIR};${install_dir}" \ + -DCMAKE_INSTALL_PREFIX="${test_install_dir}" \ + -B "${test_build_dir}" "${test_project}" + + cmake --build "${test_build_dir}" --verbose + ctest --test-dir "${test_build_dir}" --verbose + + done +} + +test_application() { + local project="application" + local project_dir="${TEST_DIR}/${project}" + local dep_dir="${TEST_DIR}/${project}/deps/HighFive" + + rm "${dep_dir}" || true + ln -sf "${HIGHFIVE_DIR}" "${dep_dir}" + + echo ${HIGHFIVE_DIR} + echo ${dep_dir} + + for vendor in submodule fetch_content external + do + for use_boost in On Off + do + local build_dir="${TMP_DIR}/build" + local install_dir="${TMP_DIR}/build/install" + + rm -rf ${build_dir} || true + + cmake "$@" \ + -DUSE_BOOST=${use_boost} \ + -DVENDOR_STRATEGY=${vendor} \ + -DCMAKE_PREFIX_PATH="${HIGHFIVE_INSTALL_DIR}" \ + -DCMAKE_INSTALL_PREFIX="${install_dir}" \ + -B "${build_dir}" "${project_dir}" + + cmake --build "${build_dir}" --verbose --target install + ctest --test-dir "${build_dir}" + "${install_dir}"/bin/Hi5Application + done + done +} + +cmake -DHIGHFIVE_EXAMPLES=OFF \ + -DHIGHFIVE_UNIT_TESTS=OFF \ + -DCMAKE_INSTALL_PREFIX="${HIGHFIVE_INSTALL_DIR}" \ + -B "${HIGHFIVE_BUILD_DIR}" \ + "${HIGHFIVE_DIR}" + +cmake --build "${HIGHFIVE_BUILD_DIR}" --target install + +for integration in Include full short bailout +do + test_dependent_library \ + -DINTEGRATION_STRATEGY=${integration} + + test_application \ + -DINTEGRATION_STRATEGY=${integration} +done diff --git a/tests/cmake_integration/test_dependent_library/CMakeList.txt b/tests/cmake_integration/test_dependent_library/CMakeList.txt new file mode 100644 index 000000000..b8b7cb021 --- /dev/null +++ b/tests/cmake_integration/test_dependent_library/CMakeList.txt @@ -0,0 +1,22 @@ +cmake_minimum_required(VERSION 3.14) +project(TestHi5Dependent VERSION 0.1) + +if(NOT DEFINED CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS OFF) +endif() + +add_executable(test_hi5_dependent test_dependent_library.cpp) + +if(NOT USE_BOOST) + find_package(Hi5Dependent REQUIRED) +else() + find_package(Hi5Dependent REQUIRED COMPONENTS boost) + target_link_libraries(test_hi5_dependent PUBLIC Hi5Dependent::Boost) +endif() + +target_link_libraries(test_hi5_dependent PUBLIC Hi5Dependent::Read Hi5Dependent::Write) + +enable_testing() +add_test(NAME test_hi5_dependent COMMAND ${CMAKE_CURRENT_BINARY_DIR}/test_hi5_dependent) diff --git a/tests/cmake_integration/test_dependent_library/CMakeLists.txt b/tests/cmake_integration/test_dependent_library/CMakeLists.txt new file mode 100644 index 000000000..b6c7f3375 --- /dev/null +++ b/tests/cmake_integration/test_dependent_library/CMakeLists.txt @@ -0,0 +1,22 @@ +cmake_minimum_required(VERSION 3.14) +project(TestHi5Dependent VERSION 0.1) + +if(NOT DEFINED CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS OFF) +endif() + +add_executable(test_hi5_dependent test_dependent_library.cpp) + +if(NOT USE_BOOST) + find_package(Hi5Dependent REQUIRED) + target_link_libraries(test_hi5_dependent PUBLIC Hi5Dependent::Read Hi5Dependent::Write) +else() + find_package(Hi5Dependent REQUIRED COMPONENTS boost) + target_link_libraries(test_hi5_dependent PUBLIC Hi5Dependent::Read Hi5Dependent::Write) + target_link_libraries(test_hi5_dependent PUBLIC Hi5Dependent::Boost) +endif() + +enable_testing() +add_test(NAME run_test_hi5_dependent COMMAND ${CMAKE_CURRENT_BINARY_DIR}/test_hi5_dependent) diff --git a/tests/cmake_integration/test_dependent_library/test_dependent_library.cpp b/tests/cmake_integration/test_dependent_library/test_dependent_library.cpp new file mode 100644 index 000000000..40aef20a6 --- /dev/null +++ b/tests/cmake_integration/test_dependent_library/test_dependent_library.cpp @@ -0,0 +1,34 @@ +#include +#include + + +int main() { + { + auto file = HighFive::File("foo.h5", HighFive::File::Truncate); + + auto dset = hi5_dependent::write_vector(file, {1.0, 2.0, 3.0}); + auto x = hi5_dependent::read_vector(dset); + + for (size_t i = 0; i < x.size(); i++) { + if (x[i] != double(i + 1)) { + throw std::runtime_error("HighFiveDemo is broken."); + } + } + + std::cout << "Hi5Dependent: success \n"; + } + +#if HI5_DEPENDENT_HAS_BOOST == 1 + { + auto file = HighFive::File("bar.h5", HighFive::File::Truncate); + + boost::numeric::ublas::matrix x(3, 5); + auto dset = hi5_dependent::write_boost(file, x); + auto y = hi5_dependent::read_boost(dset); + + std::cout << "Hi5BoostDependent: success \n"; + } +#endif + + return 0; +} diff --git a/tests/test_dependent_library/CMakeLists.txt b/tests/test_dependent_library/CMakeLists.txt deleted file mode 100644 index 570dba224..000000000 --- a/tests/test_dependent_library/CMakeLists.txt +++ /dev/null @@ -1,42 +0,0 @@ -# This is a sample library to test integration via add_subdirectory and CMakeConfig -cmake_minimum_required(VERSION 3.1) - -project(test_project VERSION 0.1) - -if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX11_STANDARD_COMPILE_OPTION "-std=c++11") # For come compilers under cmake 3.1 - set(CMAKE_CXX_STANDARD 11) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS OFF) -endif() - -option(USE_BUNDLED_HIGHFIVE "Use highfive from deps folder. Otherwise must be installed" ON) - -if(USE_BUNDLED_HIGHFIVE) - add_subdirectory("deps/HighFive" EXCLUDE_FROM_ALL) -else() - find_package(HighFive REQUIRED QUIET) -endif() - -add_library(simpleton SHARED "src/simpleton.cpp" "src/otherton.cpp") -target_include_directories(simpleton - PUBLIC - $ - $) -target_link_libraries(simpleton PUBLIC HighFive) -set_property(TARGET simpleton PROPERTY POSITION_INDEPENDENT_CODE ON) - -add_library(otherton STATIC "src/simpleton.cpp" "src/otherton.cpp") -target_include_directories(otherton - PUBLIC - $ - $) -target_link_libraries(otherton PUBLIC HighFive) -set_property(TARGET otherton PROPERTY POSITION_INDEPENDENT_CODE OFF) - -install( - TARGETS simpleton otherton - EXPORT simpletonTarget - DESTINATION lib - ARCHIVE DESTINATION lib) -install(EXPORT simpletonTarget DESTINATION lib) diff --git a/tests/test_dependent_library/deps/.gitignore b/tests/test_dependent_library/deps/.gitignore deleted file mode 100644 index 5e7d2734c..000000000 --- a/tests/test_dependent_library/deps/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore diff --git a/tests/test_dependent_library/include/simpleton.hpp b/tests/test_dependent_library/include/simpleton.hpp deleted file mode 100644 index b98a09fda..000000000 --- a/tests/test_dependent_library/include/simpleton.hpp +++ /dev/null @@ -1,14 +0,0 @@ -#ifndef H5_TEST_SIMPLETON_HPP -#define H5_TEST_SIMPLETON_HPP - -// Include all headers here to catch any missing `inline` statements, since -// they will be included by two different compilation units. -#include - -// Boost should always be found in this setup -#include - -void function(const HighFive::Object& obj); -void other_function(const boost::numeric::ublas::matrix& m); - -#endif diff --git a/tests/test_dependent_library/src/otherton.cpp b/tests/test_dependent_library/src/otherton.cpp deleted file mode 100644 index 3e10a3630..000000000 --- a/tests/test_dependent_library/src/otherton.cpp +++ /dev/null @@ -1,5 +0,0 @@ -#include "simpleton.hpp" - -void other_function(const boost::numeric::ublas::matrix& m) { - m(0, 0) * 0.0; -} diff --git a/tests/test_dependent_library/src/simpleton.cpp b/tests/test_dependent_library/src/simpleton.cpp deleted file mode 100644 index 12cef5bfc..000000000 --- a/tests/test_dependent_library/src/simpleton.cpp +++ /dev/null @@ -1,9 +0,0 @@ -#include - -#include "simpleton.hpp" - -void function(const HighFive::Object& obj) { - if (!obj.isValid()) { - throw std::exception(); - } -} diff --git a/tests/test_project/CMakeLists.txt b/tests/test_project/CMakeLists.txt deleted file mode 100644 index 1a8ef098a..000000000 --- a/tests/test_project/CMakeLists.txt +++ /dev/null @@ -1,25 +0,0 @@ -# This is a sample project to test integration via add_subdirectory and CMakeConfig -cmake_minimum_required(VERSION 3.1) - -project(test_project VERSION 0.1) - -if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX11_STANDARD_COMPILE_OPTION "-std=c++11") # For come compilers under cmake 3.1 - set(CMAKE_CXX_STANDARD 11) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS OFF) -endif() - -option(USE_BUNDLED_HIGHFIVE "Use highfive from deps folder. Otherwise must be installed" ON) - -if(USE_BUNDLED_HIGHFIVE) - add_subdirectory("deps/HighFive" EXCLUDE_FROM_ALL) -else() - find_package(HighFive REQUIRED) -endif() - -add_executable(read_write_bin "read_write_vector_dataset.cpp") -target_link_libraries(read_write_bin HighFive) - -enable_testing() -add_test(NAME test_project COMMAND ${CMAKE_CURRENT_BINARY_DIR}/read_write_bin) diff --git a/tests/test_project/deps/.gitignore b/tests/test_project/deps/.gitignore deleted file mode 100644 index 5e7d2734c..000000000 --- a/tests/test_project/deps/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore diff --git a/tests/test_project/read_write_vector_dataset.cpp b/tests/test_project/read_write_vector_dataset.cpp deleted file mode 120000 index 84b5175b8..000000000 --- a/tests/test_project/read_write_vector_dataset.cpp +++ /dev/null @@ -1 +0,0 @@ -../../src/examples/read_write_vector_dataset.cpp \ No newline at end of file diff --git a/tests/test_project_integration.sh b/tests/test_project_integration.sh deleted file mode 100644 index 955219952..000000000 --- a/tests/test_project_integration.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/sh -set -xeo pipefail -cd "$( dirname "${BASH_SOURCE[0]}")" # cd here - -BUILD_DIR="${PWD}/build-highfive" -ROOT="${PWD}/.." -TEST_DIR="${PWD}" -INSTALL_DIR="${BUILD_DIR}/install" - -test_install() { - local project="${1}" - local project_dir="${TEST_DIR}/${project}" - local dep_dir="${TEST_DIR}/${project}/deps/HighFive" - shift - - pushd "${project_dir}" - - local build_dir="build" - - ln -sf ../../.. "${dep_dir}" - - cmake "$@" -B "${build_dir}" . - cmake --build "${build_dir}" --verbose - ctest --test-dir "${build_dir}" - - rm -f "${dep_dir}" - rm -rf "${build_dir}" - - popd -} - -cmake "${ROOT}" \ - -DHIGHFIVE_EXAMPLES=OFF \ - -DHIGHFIVE_UNIT_TESTS=OFF \ - -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \ - -B "${BUILD_DIR}" -cmake --build "${BUILD_DIR}" --target install - -for project in test_project test_dependent_library; do - # Case 1. Base case: include subdirectory - test_install "${project}" - - # Case 2. We use an install dir and all deps configuration - # Install highfive (no tests required) - test_install "${project}" \ - -DUSE_BUNDLED_HIGHFIVE=NO \ - -DHIGHFIVE_USE_INSTALL_DEPS=YES \ - -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" - - # Case 3. We redetect-dependencies - test_install "${project}" \ - -DUSE_BUNDLED_HIGHFIVE=NO \ - -DHIGHFIVE_USE_INSTALL_DEPS=NO \ - -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" -done - -rm -rf "${BUILD_DIR}" diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 6c19a1d2b..93533ee91 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -1,6 +1,5 @@ include(CTest) include(Catch) -include(HighFiveWarnings) if(MSVC) add_definitions(/bigobj) @@ -14,7 +13,6 @@ foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_fiv endforeach() if(HIGHFIVE_PARALLEL_HDF5) - include(TestHelpers) set(tests_parallel_src "tests_high_five_parallel.cpp") ## parallel MPI tests @@ -29,7 +27,7 @@ if(HIGHFIVE_PARALLEL_HDF5) file(READ "${original_catch_script}" original_catch_script_contents) string(REGEX REPLACE "(add_command\\(add_test.*TEST_EXECUTOR})" - "\\1 ${TEST_MPI_EXEC_PREFIX_DEFAULT} -n 2" + "\\1 ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2" modified_catch_script_contents "${original_catch_script_contents}") if(original_catch_script_contents STREQUAL modified_catch_script_contents) @@ -44,17 +42,17 @@ endif() option(HIGHFIVE_TEST_SINGLE_INCLUDES "Enable testing single includes" FALSE) if(HIGHFIVE_TEST_SINGLE_INCLUDES) - file(GLOB public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) + file(GLOB CONFIGURE_DEPENDS public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) foreach(PUBLIC_HEADER ${public_headers}) - if(PUBLIC_HEADER STREQUAL "highfive/boost.hpp" AND NOT HIGHFIVE_USE_BOOST) + if(PUBLIC_HEADER STREQUAL "highfive/boost.hpp" AND NOT HIGHFIVE_TEST_BOOST) continue() endif() - if(PUBLIC_HEADER STREQUAL "highfive/half_float.hpp" AND NOT HIGHFIVE_USE_HALF_FLOAT) + if(PUBLIC_HEADER STREQUAL "highfive/half_float.hpp" AND NOT HIGHFIVE_TEST_HALF_FLOAT) continue() endif() - if(PUBLIC_HEADER STREQUAL "highfive/eigen.hpp" AND NOT HIGHFIVE_USE_EIGEN) + if(PUBLIC_HEADER STREQUAL "highfive/eigen.hpp" AND NOT HIGHFIVE_TEST_EIGEN) continue() endif() From f32d18696d2570d3d364ecbbc047d9ed1c39cc16 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 14 Feb 2024 17:40:20 +0100 Subject: [PATCH 57/97] Drop support of C++11 and require C++14. (#957) --- CMakeLists.txt | 14 +++----------- README.md | 4 ++-- tests/cmake_integration/application/CMakeLists.txt | 2 +- .../dependent_library/CMakeLists.txt | 2 +- .../test_dependent_library/CMakeList.txt | 2 +- .../test_dependent_library/CMakeLists.txt | 2 +- 6 files changed, 9 insertions(+), 17 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c1446545c..a5c5d113c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -70,21 +70,13 @@ mark_as_advanced(HIGHFIVE_SANITIZER) # ----------------------------------- if(NOT DEFINED CMAKE_CXX_STANDARD) - if(HIGHFIVE_TEST_XTENSOR) - set(CMAKE_CXX_STANDARD 14) - else() - set(CMAKE_CXX_STANDARD 11) - endif() + set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) endif() -if(CMAKE_CXX_STANDARD EQUAL 98) - message(FATAL_ERROR "HighFive needs to be compiled with at least C++11") -endif() - -if(HIGHFIVE_TEST_XTENSOR AND CMAKE_CXX_STANDARD LESS 14) - message(FATAL_ERROR "XTensor requires C++14 or newer.") +if(CMAKE_CXX_STANDARD EQUAL 98 OR CMAKE_CXX_STANDARD LESS 14) + message(FATAL_ERROR "HighFive needs to be compiled with at least C++14") endif() add_compile_definitions(HIGHFIVE_CXX_STD=${CMAKE_CXX_STANDARD}) diff --git a/README.md b/README.md index 69e979f92..ef35f990d 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Documentation: https://bluebrain.github.io/HighFive/ ## Brief -HighFive is a modern header-only C++11 friendly interface for libhdf5. +HighFive is a modern header-only C++14 friendly interface for libhdf5. HighFive supports STL vector/string, Boost::UBLAS, Boost::Multi-array and Xtensor. It handles C++ from/to HDF5 with automatic type mapping. HighFive does not require additional libraries (see dependencies). @@ -24,7 +24,7 @@ It integrates nicely with other CMake projects by defining (and exporting) a Hig - Simple C++-ish minimalist interface - No other dependency than libhdf5 - Zero overhead -- Support C++11 +- Support C++14 ### Feature support - create/read/write files, datasets, attributes, groups, dataspaces. diff --git a/tests/cmake_integration/application/CMakeLists.txt b/tests/cmake_integration/application/CMakeLists.txt index 469344e9b..8f2a71423 100644 --- a/tests/cmake_integration/application/CMakeLists.txt +++ b/tests/cmake_integration/application/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.14) project(Hi5Application VERSION 0.1) if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) endif() diff --git a/tests/cmake_integration/dependent_library/CMakeLists.txt b/tests/cmake_integration/dependent_library/CMakeLists.txt index 1cdbaf35a..ad76d99b2 100644 --- a/tests/cmake_integration/dependent_library/CMakeLists.txt +++ b/tests/cmake_integration/dependent_library/CMakeLists.txt @@ -15,7 +15,7 @@ cmake_minimum_required(VERSION 3.14) project(Hi5Dependent VERSION 0.1) if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) endif() diff --git a/tests/cmake_integration/test_dependent_library/CMakeList.txt b/tests/cmake_integration/test_dependent_library/CMakeList.txt index b8b7cb021..08bc4a4ad 100644 --- a/tests/cmake_integration/test_dependent_library/CMakeList.txt +++ b/tests/cmake_integration/test_dependent_library/CMakeList.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.14) project(TestHi5Dependent VERSION 0.1) if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) endif() diff --git a/tests/cmake_integration/test_dependent_library/CMakeLists.txt b/tests/cmake_integration/test_dependent_library/CMakeLists.txt index b6c7f3375..c9023f98a 100644 --- a/tests/cmake_integration/test_dependent_library/CMakeLists.txt +++ b/tests/cmake_integration/test_dependent_library/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.14) project(TestHi5Dependent VERSION 0.1) if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) endif() From aa43041b99e4e16bc01eb54aa5b15bbd270c5905 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 15 Feb 2024 15:28:40 +0100 Subject: [PATCH 58/97] Fix when CI is run. (#963) The tests should run for every PR and when merged into the main or `v2.x` branch. Docs should be published only when merging into `v2.x` (for now). --- .github/workflows/ci.yml | 8 +++++--- .github/workflows/clang_format.yml | 6 ++---- .github/workflows/coverage.yml | 7 ++++--- .github/workflows/gh-pages.yml | 6 ++++-- .github/workflows/version_file.yml | 8 +++++--- 5 files changed, 20 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3460c92fe..032cab8e0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,12 +7,14 @@ concurrency: on: push: branches: - - ci_test - - release/** + - master + - main + - v2.x pull_request: branches: - master - - release/** + - main + - v2.x paths-ignore: - '**.md' - '**.rst' diff --git a/.github/workflows/clang_format.yml b/.github/workflows/clang_format.yml index 56f2fd8d5..7b9be4909 100644 --- a/.github/workflows/clang_format.yml +++ b/.github/workflows/clang_format.yml @@ -8,10 +8,8 @@ on: pull_request: branches: - master - paths-ignore: - - '**.md' - - '**.rst' - - 'doc/**' + - main + - v2.x jobs: Code_Format: diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index aaf575a7d..e5f3ed5fd 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -8,12 +8,13 @@ on: push: branches: - master - - ci_test - - release/** + - main + - v2.x pull_request: branches: - master - - release/** + - main + - v2.x paths-ignore: - '**.md' - '**.rst' diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 2032f91ab..101e7234d 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -3,10 +3,12 @@ name: gh-pages on: push: branches: - - master + - v2.x pull_request: branches: + - v2.x - master + - main jobs: @@ -39,7 +41,7 @@ jobs: cp -r doc/poster build/doc/html/ - name: Deploy to GitHub Pages - if: ${{ success() && github.ref == 'refs/heads/master' && github.event_name == 'push' }} + if: ${{ success() && github.ref == 'refs/heads/v2.x' && github.event_name == 'push' }} uses: crazy-max/ghaction-github-pages@v2 with: target_branch: gh-pages diff --git a/.github/workflows/version_file.yml b/.github/workflows/version_file.yml index 56703f593..55cf0f6f5 100644 --- a/.github/workflows/version_file.yml +++ b/.github/workflows/version_file.yml @@ -3,12 +3,14 @@ name: HighFive Check Version File on: push: branches: - - ci_test - - release/** + - master + - main + - v2.x pull_request: branches: - master - - release/** + - main + - v2.x jobs: CheckVersion: From a169b83422d0220bc1fbf44f4de0b5ae2daa09d5 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 16 Feb 2024 15:21:49 +0100 Subject: [PATCH 59/97] Pass dimensions to 'serialize'. (#939) By passing the dimensions to `serialize` allows serializing nested pointers. This was previously not possible because, we didn't know how many elements the pointer pointed to. The logic for obtaining the dimensions of the array is: * When creating/opening the array the memspace is set to be the same as the filespace. * Performing a selection either leads to a simple (packed) multi-dimensional memspace; or to a one-dimensional memspace. Therefore, if we perform a write without selection, we know the dimensions the input array must have. If we perform a selection we again know the dimensions the input buffer needs to have. --- include/highfive/bits/H5Attribute_misc.hpp | 3 +- include/highfive/bits/H5Converter_misc.hpp | 21 ++++++++----- include/highfive/bits/H5Inspector_misc.hpp | 31 +++++++++++-------- include/highfive/bits/H5Slice_traits_misc.hpp | 3 +- include/highfive/boost.hpp | 10 +++--- include/highfive/eigen.hpp | 2 +- 6 files changed, 42 insertions(+), 28 deletions(-) diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 62f6ebd82..042c63014 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -120,6 +120,7 @@ inline void Attribute::read_raw(T* array) const { template inline void Attribute::write(const T& buffer) { const DataSpace& mem_space = getMemSpace(); + auto dims = mem_space.getDimensions(); if (mem_space.getElementCount() == 0) { return; @@ -138,7 +139,7 @@ inline void Attribute::write(const T& buffer) { << " into dataset of dimensions " << mem_space.getNumberDimensions(); throw DataSpaceException(ss.str()); } - auto w = details::data_converter::serialize(buffer, file_datatype); + auto w = details::data_converter::serialize(buffer, dims, file_datatype); write_raw(w.getPointer(), buffer_info.data_type); } diff --git a/include/highfive/bits/H5Converter_misc.hpp b/include/highfive/bits/H5Converter_misc.hpp index ed387702f..2f84a7bec 100644 --- a/include/highfive/bits/H5Converter_misc.hpp +++ b/include/highfive/bits/H5Converter_misc.hpp @@ -344,23 +344,27 @@ struct Writer::type>: public ShallowCopyBuffe using super = ShallowCopyBuffer; public: - explicit Writer(const T& val, const DataType& /* file_datatype */) + explicit Writer(const T& val, + const std::vector& /* dims */, + const DataType& /* file_datatype */) : super(val){}; }; template struct Writer::type>: public DeepCopyBuffer { - explicit Writer(const T& val, const DataType& /* file_datatype */) - : DeepCopyBuffer(inspector::getDimensions(val)) { - inspector::serialize(val, this->begin()); + explicit Writer(const T& val, + const std::vector& _dims, + const DataType& /* file_datatype */) + : DeepCopyBuffer(_dims) { + inspector::serialize(val, _dims, this->begin()); } }; template struct Writer::type>: public StringBuffer { - explicit Writer(const T& val, const DataType& _file_datatype) - : StringBuffer(inspector::getDimensions(val), _file_datatype) { - inspector::serialize(val, this->begin()); + explicit Writer(const T& val, const std::vector& _dims, const DataType& _file_datatype) + : StringBuffer(_dims, _file_datatype) { + inspector::serialize(val, _dims, this->begin()); } }; @@ -402,8 +406,9 @@ struct Reader::type>: public StringBuffer static Writer serialize(const typename inspector::type& val, + const std::vector& dims, const DataType& file_datatype) { - return Writer(val, file_datatype); + return Writer(val, dims, file_datatype); } template diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 49606005f..a1f1e0a3a 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -149,7 +149,7 @@ inspector { // Return a point of the first value of val static const hdf5_type* data(const type& val) // Take a val and serialize it inside 'out' - static void serialize(const type& val, hdf5_type* out) + static void serialize(const type& val, const std::vector& dims, hdf5_type* out) // Return an array of dimensions of the space needed for writing val static std::vector getDimensions(const type& val) } @@ -191,7 +191,7 @@ struct type_helper { return &val; } - static void serialize(const type& val, hdf5_type* m) { + static void serialize(const type& val, const std::vector& /* dims*/, hdf5_type* m) { static_assert(is_trivially_copyable, "The type is not trivially copyable"); *m = val; } @@ -233,7 +233,7 @@ struct inspector: type_helper { val = vec[0] != 0 ? true : false; } - static void serialize(const type& val, hdf5_type* m) { + static void serialize(const type& val, const std::vector& /* dims*/, hdf5_type* m) { *m = val ? 1 : 0; } }; @@ -251,7 +251,7 @@ struct inspector: type_helper { } template - static void serialize(const type& val, It m) { + static void serialize(const type& val, const std::vector& /* dims*/, It m) { (*m).assign(val.data(), val.size(), StringPadding::NullTerminated); } @@ -276,7 +276,7 @@ struct inspector: type_helper { throw DataSpaceException("A Reference cannot be written directly."); } - static void serialize(const type& val, hdf5_type* m) { + static void serialize(const type& val, const std::vector& /* dims*/, hdf5_type* m) { hobj_ref_t ref; val.create_ref(&ref); *m = ref; @@ -339,11 +339,12 @@ struct inspector> { } template - static void serialize(const type& val, It m) { + static void serialize(const type& val, const std::vector& dims, It m) { if (!val.empty()) { size_t subsize = inspector::getSizeVal(val[0]); + auto subdims = std::vector(dims.begin() + 1, dims.end()); for (auto&& e: val) { - inspector::serialize(e, m); + inspector::serialize(e, subdims, m); m += subsize; } } @@ -401,7 +402,7 @@ struct inspector> { throw DataSpaceException("A std::vector cannot be written directly."); } - static void serialize(const type& val, hdf5_type* m) { + static void serialize(const type& val, const std::vector& /* dims*/, hdf5_type* m) { for (size_t i = 0; i < val.size(); ++i) { m[i] = val[i] ? 1 : 0; } @@ -468,10 +469,11 @@ struct inspector> { } template - static void serialize(const type& val, It m) { + static void serialize(const type& val, const std::vector& dims, It m) { size_t subsize = inspector::getSizeVal(val[0]); + auto subdims = std::vector(dims.begin() + 1, dims.end()); for (auto& e: val) { - inspector::serialize(e, m); + inspector::serialize(e, subdims, m); m += subsize; } } @@ -519,7 +521,9 @@ struct inspector { /* it works because there is only T[][][] currently we will fix it one day */ - static void serialize(const type& /* val */, hdf5_type* /* m */) { + static void serialize(const type& /* val */, + const std::vector& /* dims*/, + hdf5_type* /* m */) { throw DataSpaceException("Not possible to serialize a T*"); } }; @@ -575,10 +579,11 @@ struct inspector { /* it works because there is only T[][][] currently we will fix it one day */ - static void serialize(const type& val, hdf5_type* m) { + static void serialize(const type& val, const std::vector& dims, hdf5_type* m) { size_t subsize = inspector::getSizeVal(val[0]); + auto subdims = std::vector(dims.begin() + 1, dims.end()); for (size_t i = 0; i < N; ++i) { - inspector::serialize(val[i], m + i * subsize); + inspector::serialize(val[i], subdims, m + i * subsize); } } }; diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 27c103ae2..2ae6640b0 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -242,6 +242,7 @@ template inline void SliceTraits::write(const T& buffer, const DataTransferProps& xfer_props) { const auto& slice = static_cast(*this); const DataSpace& mem_space = slice.getMemSpace(); + auto dims = mem_space.getDimensions(); auto file_datatype = slice.getDataType(); @@ -257,7 +258,7 @@ inline void SliceTraits::write(const T& buffer, const DataTransferProp << " into dataset with n = " << buffer_info.n_dimensions << " dimensions."; throw DataSpaceException(ss.str()); } - auto w = details::data_converter::serialize(buffer, file_datatype); + auto w = details::data_converter::serialize(buffer, dims, file_datatype); write_raw(w.getPointer(), buffer_info.data_type, xfer_props); } diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp index a4364faf3..e9b89675d 100644 --- a/include/highfive/boost.hpp +++ b/include/highfive/boost.hpp @@ -68,11 +68,12 @@ struct inspector> { } template - static void serialize(const type& val, It m) { + static void serialize(const type& val, const std::vector& dims, It m) { size_t size = val.num_elements(); size_t subsize = inspector::getSizeVal(*val.origin()); + auto subdims = std::vector(dims.begin() + ndim, dims.end()); for (size_t i = 0; i < size; ++i) { - inspector::serialize(*(val.origin() + i), m + i * subsize); + inspector::serialize(*(val.origin() + i), subdims, m + i * subsize); } } @@ -133,11 +134,12 @@ struct inspector> { return inspector::data(val(0, 0)); } - static void serialize(const type& val, hdf5_type* m) { + static void serialize(const type& val, const std::vector& dims, hdf5_type* m) { size_t size = val.size1() * val.size2(); size_t subsize = inspector::getSizeVal(val(0, 0)); + auto subdims = std::vector(dims.begin() + ndim, dims.end()); for (size_t i = 0; i < size; ++i) { - inspector::serialize(*(&val(0, 0) + i), m + i * subsize); + inspector::serialize(*(&val(0, 0) + i), subdims, m + i * subsize); } } diff --git a/include/highfive/eigen.hpp b/include/highfive/eigen.hpp index f91dab24c..2aee101fc 100644 --- a/include/highfive/eigen.hpp +++ b/include/highfive/eigen.hpp @@ -67,7 +67,7 @@ struct inspector> { return inspector::data(*val.data()); } - static void serialize(const type& val, hdf5_type* m) { + static void serialize(const type& val, const std::vector& /* dims */, hdf5_type* m) { assert_not_buggy(val.rows(), val.cols()); std::memcpy(m, val.data(), static_cast(val.size()) * sizeof(hdf5_type)); } From aee2b9747827f4283f6144271a2f31145626d5e5 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 19 Feb 2024 09:14:16 +0100 Subject: [PATCH 60/97] Fix CMake to run tests for optional dependencies. (#965) --- .github/workflows/ci.yml | 9 ++-- cmake/HighFiveOptionalDependencies.cmake | 53 ++++++++++++++++------- tests/unit/CMakeLists.txt | 3 ++ tests/unit/data_generator.hpp | 4 +- tests/unit/supported_types.hpp | 6 +-- tests/unit/tests_high_five.hpp | 2 +- tests/unit/tests_high_five_base.cpp | 18 ++++---- tests/unit/tests_high_five_easy.cpp | 19 +++++--- tests/unit/tests_high_five_multi_dims.cpp | 4 +- 9 files changed, 75 insertions(+), 43 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 032cab8e0..7bb089a0e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,19 +43,18 @@ jobs: - config: os: ubuntu-20.04 pkgs: '' - flags: '-DHIGHFIVE_TEST_BOOST:Bool=OFF' - config: os: ubuntu-20.04 pkgs: 'libboost-all-dev libopencv-dev' - flags: '-DHIGHFIVE_TEST_OPENCV:Bool=ON -GNinja' + flags: '-DHIGHFIVE_TEST_BOOST:Bool=ON -DHIGHFIVE_TEST_OPENCV:Bool=ON -GNinja' - config: os: ubuntu-latest pkgs: 'libboost-all-dev libeigen3-dev libopencv-dev' - flags: '-DHIGHFIVE_TEST_EIGEN:Bool=ON -DHIGHFIVE_TEST_OPENCV:Bool=ON -GNinja' + flags: '-DHIGHFIVE_TEST_BOOST:Bool=ON -DHIGHFIVE_TEST_EIGEN:Bool=ON -DHIGHFIVE_TEST_OPENCV:Bool=ON -GNinja' - config: os: ubuntu-20.04 pkgs: 'libboost-all-dev' - flags: '-DCMAKE_CXX_STANDARD=17' + flags: '-DCMAKE_CXX_STANDARD=17 -DHIGHFIVE_TEST_BOOST:Bool=ON' - config: os: ubuntu-22.04 flags: '-DHIGHFIVE_TEST_BOOST=Off -DCMAKE_CXX_STANDARD=20' @@ -157,7 +156,7 @@ jobs: - name: Build env: ${{matrix.env}} run: | - CMAKE_OPTIONS=(-GNinja) + CMAKE_OPTIONS=(-DHIGHFIVE_TEST_BOOST=ON -GNinja) source $GITHUB_WORKSPACE/.github/build.sh - name: Test diff --git a/cmake/HighFiveOptionalDependencies.cmake b/cmake/HighFiveOptionalDependencies.cmake index 53d10ecba..1b27edd10 100644 --- a/cmake/HighFiveOptionalDependencies.cmake +++ b/cmake/HighFiveOptionalDependencies.cmake @@ -1,27 +1,48 @@ -if(HIGHFIVE_TEST_BOOST AND NOT TARGET HighFiveBoostDependency) +if(NOT TARGET HighFiveBoostDependency) add_library(HighFiveBoostDependency INTERFACE) - find_package(Boost REQUIRED) - target_link_libraries(HighFiveBoostDependency INTERFACE Boost::headers) - # TODO check if we need Boost::disable_autolinking to cause: - # -DBOOST_ALL_NO_LIB (does something on MSVC). + if(HIGHFIVE_TEST_BOOST) + find_package(Boost REQUIRED) + target_link_libraries(HighFiveBoostDependency INTERFACE Boost::headers) + # TODO check if we need Boost::disable_autolinking to cause: + # -DBOOST_ALL_NO_LIB (does something on MSVC). + target_compile_definitions(HighFiveBoostDependency INTERFACE HIGHFIVE_TEST_BOOST=1) + endif() endif() -if(HIGHFIVE_TEST_EIGEN AND NOT TARGET HighFiveEigenDependency) +if(NOT TARGET HighFiveEigenDependency) add_library(HighFiveEigenDependency INTERFACE) - find_package(Eigen3 REQUIRED NO_MODULE) - target_link_libraries(HighFiveEigenDependency INTERFACE Eigen3::Eigen) + if(HIGHFIVE_TEST_EIGEN) + find_package(Eigen3 REQUIRED NO_MODULE) + target_link_libraries(HighFiveEigenDependency INTERFACE Eigen3::Eigen) + target_compile_definitions(HighFiveEigenDependency INTERFACE HIGHFIVE_TEST_EIGEN=1) + endif() endif() -if(HIGHFIVE_TEST_XTENSOR AND NOT TARGET HighFiveXTensorDependency) +if(NOT TARGET HighFiveXTensorDependency) add_library(HighFiveXTensorDependency INTERFACE) - find_package(xtensor REQUIRED) - target_link_libraries(HighFiveXTensorDependency INTERFACE xtensor) + if(HIGHFIVE_TEST_XTENSOR) + find_package(xtensor REQUIRED) + target_link_libraries(HighFiveXTensorDependency INTERFACE xtensor) + target_compile_definitions(HighFiveXTensorDependency INTERFACE HIGHFIVE_TEST_XTENSOR=1) + endif() endif() -if(HIGHFIVE_TEST_OPENCV AND NOT TARGET HighFiveOpenCVDependency) +if(NOT TARGET HighFiveOpenCVDependency) add_library(HighFiveOpenCVDependency INTERFACE) - find_package(OpenCV REQUIRED) - target_include_directories(HighFiveOpenCVDependency SYSTEM INTERFACE ${OpenCV_INCLUDE_DIRS}) - target_link_libraries(HighFiveOpenCVDependency INTERFACE ${OpenCV_LIBS}) - target_compile_definitions(HighFiveOpenCVDependency INTERFACE H5_USE_OPENCV) + if(HIGHFIVE_TEST_OPENCV) + find_package(OpenCV REQUIRED) + target_include_directories(HighFiveOpenCVDependency SYSTEM INTERFACE ${OpenCV_INCLUDE_DIRS}) + target_link_libraries(HighFiveOpenCVDependency INTERFACE ${OpenCV_LIBS}) + target_compile_definitions(HighFiveOpenCVDependency INTERFACE HIGHFIVE_TEST_OPENCV=1) + endif() +endif() + +if(NOT TARGET HighFiveOptionalDependencies) + add_library(HighFiveOptionalDependencies INTERFACE) + target_link_libraries(HighFiveOptionalDependencies INTERFACE + HighFiveBoostDependency + HighFiveEigenDependency + HighFiveXTensorDependency + HighFiveOpenCVDependency + ) endif() diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 93533ee91..048ccd9b6 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -9,6 +9,8 @@ endif() foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_legacy) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) + target_link_libraries(${test_name} HighFiveOptionalDependencies) + catch_discover_tests(${test_name}) endforeach() @@ -18,6 +20,7 @@ if(HIGHFIVE_PARALLEL_HDF5) ## parallel MPI tests add_executable(tests_parallel_bin ${tests_parallel_src}) target_link_libraries(tests_parallel_bin HighFive HighFiveWarnings Catch2::Catch2) + target_link_libraries(tests_parallel_bin HighFiveOptionalDependencies) # We need to patch in a call to `mpirun` or equivalent when using # parallel tests. Somehow, this is not foreseen in Catch2, modify the diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp index f5dc681c5..9c17dfe81 100644 --- a/tests/unit/data_generator.hpp +++ b/tests/unit/data_generator.hpp @@ -7,7 +7,7 @@ #include #include -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST #include #endif @@ -200,7 +200,7 @@ struct ContainerTraits>: public STLLikeContainerTraits struct ContainerTraits> { using container_type = typename boost::multi_array; diff --git a/tests/unit/supported_types.hpp b/tests/unit/supported_types.hpp index f708303b1..843814f91 100644 --- a/tests/unit/supported_types.hpp +++ b/tests/unit/supported_types.hpp @@ -6,7 +6,7 @@ #include #include -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST #include #endif @@ -30,7 +30,7 @@ struct STDArray { using type = std::array, n>; }; -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST template struct BoostMultiArray { template @@ -83,7 +83,7 @@ using scalar_types = typename ConcatenateTuples>::type; using supported_array_types = typename ConcatenateTuples< -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST typename ContainerProduct, scalar_types_boost>::type, typename ContainerProduct>, scalar_types_boost>::type, typename ContainerProduct>, scalar_types_boost>::type, diff --git a/tests/unit/tests_high_five.hpp b/tests/unit/tests_high_five.hpp index fa0cfd714..25839c69e 100644 --- a/tests/unit/tests_high_five.hpp +++ b/tests/unit/tests_high_five.hpp @@ -42,7 +42,7 @@ using base_test_types = std::tuple; -#ifdef H5_USE_HALF_FLOAT +#ifdef HIGHFIVE_TEST_HALF_FLOAT #include using float16_t = half_float::half; diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index bd00ce15c..0ef9715bd 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -27,11 +27,11 @@ #include #include "tests_high_five.hpp" -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST #include #endif -#ifdef H5_USE_EIGEN +#ifdef HIGHFIVE_TEST_EIGEN #include #endif @@ -1529,7 +1529,7 @@ struct CreateEmptyVector { } }; -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST template struct CreateEmptyBoostMultiArray { using container_type = boost::multi_array(n_dim)>; @@ -1546,7 +1546,7 @@ struct CreateEmptyBoostMultiArray { #endif -#ifdef H5_USE_EIGEN +#ifdef HIGHFIVE_TEST_EIGEN struct CreateEmptyEigenVector { using container_type = Eigen::VectorXi; @@ -1676,7 +1676,7 @@ void check_empty_everything(const std::vector& dims) { } } -#ifdef H5_USE_EIGEN +#ifdef HIGHFIVE_TEST_EIGEN template void check_empty_eigen(const std::vector&) {} @@ -1703,13 +1703,13 @@ void check_empty(const std::vector& dims) { check_empty_everything>(dims); } -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST SECTION("boost::multi_array") { check_empty_everything>(dims); } #endif -#ifdef H5_USE_EIGEN +#ifdef HIGHFIVE_TEST_EIGEN check_empty_eigen(dims); #endif } @@ -2559,7 +2559,7 @@ TEST_CASE("HighFiveDataTypeClass") { CHECK(((Float | String) & String) == String); } -#ifdef H5_USE_EIGEN +#ifdef HIGHFIVE_TEST_EIGEN template void test_eigen_vec(File& file, const std::string& test_flavor, const T& vec_input, T& vec_output) { @@ -2636,7 +2636,7 @@ TEST_CASE("HighFiveEigen") { CHECK_THROWS(test_eigen_vec(file, ds_name_flavor, vec_in, vec_out)); } -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST // boost::multi_array { ds_name_flavor = "BMultiEigenVector3f"; diff --git a/tests/unit/tests_high_five_easy.cpp b/tests/unit/tests_high_five_easy.cpp index e003c3234..aa30b4e96 100644 --- a/tests/unit/tests_high_five_easy.cpp +++ b/tests/unit/tests_high_five_easy.cpp @@ -20,13 +20,22 @@ #include -#include -#ifdef H5_USE_XTENSOR +#ifdef HIGHFIVE_TEST_XTENSOR #include #include #endif +#ifdef HIGHFIVE_TEST_EIGEN +#include +#endif + +#ifdef HIGHFIVE_TEST_OPENCV +#define H5_USE_OPENCV +#endif + +#include + #include TEST_CASE("H5Easy_Compression") { @@ -179,7 +188,7 @@ TEST_CASE("H5Easy_Attribute_scalar") { CHECK(c == c_r); } -#ifdef H5_USE_XTENSOR +#ifdef HIGHFIVE_TEST_XTENSOR TEST_CASE("H5Easy_extend1d") { H5Easy::File file("h5easy_extend1d.h5", H5Easy::File::Overwrite); @@ -304,7 +313,7 @@ TEST_CASE("H5Easy_Attribute_xtensor") { } #endif -#ifdef H5_USE_EIGEN +#ifdef HIGHFIVE_TEST_EIGEN TEST_CASE("H5Easy_Eigen_MatrixX") { H5Easy::File file("h5easy_eigen_MatrixX.h5", H5Easy::File::Overwrite); @@ -439,7 +448,7 @@ TEST_CASE("H5Easy_Attribute_Eigen_MatrixX") { } #endif -#ifdef H5_USE_OPENCV +#ifdef HIGHFIVE_TEST_OPENCV TEST_CASE("H5Easy_OpenCV_Mat_") { H5Easy::File file("h5easy_opencv_Mat_.h5", H5Easy::File::Overwrite); diff --git a/tests/unit/tests_high_five_multi_dims.cpp b/tests/unit/tests_high_five_multi_dims.cpp index 31757d6c5..60ec66cae 100644 --- a/tests/unit/tests_high_five_multi_dims.cpp +++ b/tests/unit/tests_high_five_multi_dims.cpp @@ -13,7 +13,7 @@ #include -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST #include #include #endif @@ -128,7 +128,7 @@ TEMPLATE_LIST_TEST_CASE("vector of array", "[template]", numerical_test_types) { } -#ifdef H5_USE_BOOST +#ifdef HIGHFIVE_TEST_BOOST template void MultiArray3DTest() { From 94ede4f756f2d1ba77296f0f25d22142e8567e0e Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 19 Feb 2024 11:34:20 +0100 Subject: [PATCH 61/97] Allow `-DHIGHFIVE_MAX_ERROR=3`. (#967) --- CMakeLists.txt | 1 + cmake/HighFiveWarnings.cmake | 21 ++++++++++++++------- doc/developer_guide.md | 9 +++++++-- 3 files changed, 22 insertions(+), 9 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a5c5d113c..7060fe713 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -60,6 +60,7 @@ option(HIGHFIVE_TEST_HALF_FLOAT "Enable half-precision floats" OFF) # TODO remove entirely. option(HIGHFIVE_HAS_CONCEPTS "Print readable compiler errors w/ C++20 concepts" OFF) +set(HIGHFIVE_MAX_ERROR 0 "Maximum number of compiler errors.") option(HIGHFIVE_HAS_WERROR "Convert warnings to errors." OFF) option(HIGHFIVE_GLIBCXX_ASSERTIONS "Enable bounds check for STL." OFF) # TODO these some magic to get a drop down menu in ccmake diff --git a/cmake/HighFiveWarnings.cmake b/cmake/HighFiveWarnings.cmake index 16896b648..dfd43c7f1 100644 --- a/cmake/HighFiveWarnings.cmake +++ b/cmake/HighFiveWarnings.cmake @@ -31,13 +31,20 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" -Wcast-align -Wdouble-promotion ) - endif() - if(HIGHFIVE_HAS_WERROR) - target_compile_options(HighFiveWarnings - INTERFACE - -Werror - -Wno-error=deprecated-declarations - ) + if(HIGHFIVE_MAX_ERRORS) + target_compile_options(HighFiveWarnings + INTERFACE + -fmax-errors=${HIGHFIVE_MAX_ERRORS} + ) + endif() + + if(HIGHFIVE_HAS_WERROR) + target_compile_options(HighFiveWarnings + INTERFACE + -Werror + -Wno-error=deprecated-declarations + ) + endif() endif() endif() diff --git a/doc/developer_guide.md b/doc/developer_guide.md index f129ecb1d..a5ed2767d 100644 --- a/doc/developer_guide.md +++ b/doc/developer_guide.md @@ -23,8 +23,13 @@ cmake --build build --parallel ctest --test-dir build ``` -You might want to turn Boost `-DHIGHFIVE_TEST_BOOST=On` or optional -dependencies on. +You might want to add: +* `-DHIGHFIVE_TEST_BOOST=On` or other optional dependencies on, +* `-DHIGHFIVE_MAX_ERROR=3` to only show the first three errors. + +Generic CMake reminders: +* `-DCMAKE_INSTALL_PREFIX` defines where HighFive will be installed, +* `-DCMAKE_PREFIX_PATH` defines where `*Config.cmake` files are found. ## Contributing There's numerous HDF5 features that haven't been wrapped yet. HighFive is a From d5f3afef35fa110bbb39feb4994ebfc32a082c88 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 19 Feb 2024 14:57:03 +0100 Subject: [PATCH 62/97] Remove `inspector::getSize{,Val}`. (#959) The method getSizeVal(T& value) is problematic because some containers don't know their dimensions, yet they need to forward the input pointer when serializing/deserializing. After changing serialize to accept the dims the number of elements can be obtained from there. It's also tricky in the context of arrays of chars, these could be a counted as one string or as multiple characters. Inside the inspector we don't know which interpretation is correct. The method getSize(dims) seems to only have a single plausible implementation. Hence, it's being removed mostly for cosmetic reasons. --- include/highfive/bits/H5Converter_misc.hpp | 2 +- include/highfive/bits/H5Inspector_misc.hpp | 55 ++-------------------- include/highfive/boost.hpp | 20 +------- include/highfive/eigen.hpp | 8 ---- 4 files changed, 7 insertions(+), 78 deletions(-) diff --git a/include/highfive/bits/H5Converter_misc.hpp b/include/highfive/bits/H5Converter_misc.hpp index 2f84a7bec..d1ba132c4 100644 --- a/include/highfive/bits/H5Converter_misc.hpp +++ b/include/highfive/bits/H5Converter_misc.hpp @@ -69,7 +69,7 @@ struct DeepCopyBuffer { using hdf5_type = typename inspector::hdf5_type; explicit DeepCopyBuffer(const std::vector& _dims) - : buffer(inspector::getSize(_dims)) + : buffer(compute_total_size(_dims)) , dims(_dims) {} hdf5_type* getPointer() { diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index a1f1e0a3a..3f69276c4 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -129,14 +129,12 @@ inspector { static constexpr size_t recursive_ndim // Is the inner type trivially copyable for optimisation // If this value is true: data() is mandatory - // If this value is false: getSizeVal, getSize, serialize, unserialize are mandatory + // If this value is false: serialize, unserialize are mandatory static constexpr bool is_trivially_copyable // Reading: // Allocate the value following dims (should be recursive) static void prepare(type& val, const std::vector dims) - // Return the size of the vector pass to/from hdf5 from a vector of dims - static size_t getSize(const std::vector& dims) // Return a pointer of the first value of val (for reading) static hdf5_type* data(type& val) // Take a serialized vector 'in', some dims and copy value to val (for reading) @@ -144,8 +142,6 @@ inspector { // Writing: - // Return the size of the vector pass to/from hdf5 from a value - static size_t getSizeVal(const type& val) // Return a point of the first value of val static const hdf5_type* data(const type& val) // Take a val and serialize it inside 'out' @@ -171,14 +167,6 @@ struct type_helper { return {}; } - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - static void prepare(type& /* val */, const std::vector& /* dims */) {} static hdf5_type* data(type& val) { @@ -314,14 +302,6 @@ struct inspector> { return sizes; } - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - static void prepare(type& val, const std::vector& dims) { val.resize(dims[0]); std::vector next_dims(dims.begin() + 1, dims.end()); @@ -341,8 +321,8 @@ struct inspector> { template static void serialize(const type& val, const std::vector& dims, It m) { if (!val.empty()) { - size_t subsize = inspector::getSizeVal(val[0]); auto subdims = std::vector(dims.begin() + 1, dims.end()); + size_t subsize = compute_total_size(subdims); for (auto&& e: val) { inspector::serialize(e, subdims, m); m += subsize; @@ -376,17 +356,6 @@ struct inspector> { return sizes; } - static size_t getSizeVal(const type& val) { - return val.size(); - } - - static size_t getSize(const std::vector& dims) { - if (dims.size() > 1) { - throw DataSpaceException("std::vector is only 1 dimension."); - } - return dims[0]; - } - static void prepare(type& val, const std::vector& dims) { if (dims.size() > 1) { throw DataSpaceException("std::vector is only 1 dimension."); @@ -439,14 +408,6 @@ struct inspector> { return sizes; } - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - static void prepare(type& val, const std::vector& dims) { if (dims[0] > N) { std::ostringstream os; @@ -470,8 +431,8 @@ struct inspector> { template static void serialize(const type& val, const std::vector& dims, It m) { - size_t subsize = inspector::getSizeVal(val[0]); auto subdims = std::vector(dims.begin() + 1, dims.end()); + size_t subsize = compute_total_size(subdims); for (auto& e: val) { inspector::serialize(e, subdims, m); m += subsize; @@ -507,10 +468,6 @@ struct inspector { static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_copyable; - static size_t getSizeVal(const type& /* val */) { - throw DataSpaceException("Not possible to have size of a T*"); - } - static std::vector getDimensions(const type& /* val */) { throw DataSpaceException("Not possible to have size of a T*"); } @@ -556,10 +513,6 @@ struct inspector { } } - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - static std::vector getDimensions(const type& val) { std::vector sizes{N}; if (N > 0) { @@ -580,8 +533,8 @@ struct inspector { /* it works because there is only T[][][] currently we will fix it one day */ static void serialize(const type& val, const std::vector& dims, hdf5_type* m) { - size_t subsize = inspector::getSizeVal(val[0]); auto subdims = std::vector(dims.begin() + 1, dims.end()); + size_t subsize = compute_total_size(subdims); for (size_t i = 0; i < N; ++i) { inspector::serialize(val[i], subdims, m + i * subsize); } diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp index e9b89675d..fb8a709c5 100644 --- a/include/highfive/boost.hpp +++ b/include/highfive/boost.hpp @@ -31,14 +31,6 @@ struct inspector> { return sizes; } - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - static void prepare(type& val, const std::vector& dims) { if (dims.size() < ndim) { std::ostringstream os; @@ -70,8 +62,8 @@ struct inspector> { template static void serialize(const type& val, const std::vector& dims, It m) { size_t size = val.num_elements(); - size_t subsize = inspector::getSizeVal(*val.origin()); auto subdims = std::vector(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(subdims); for (size_t i = 0; i < size; ++i) { inspector::serialize(*(val.origin() + i), subdims, m + i * subsize); } @@ -108,14 +100,6 @@ struct inspector> { return sizes; } - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - static void prepare(type& val, const std::vector& dims) { if (dims.size() < ndim) { std::ostringstream os; @@ -136,8 +120,8 @@ struct inspector> { static void serialize(const type& val, const std::vector& dims, hdf5_type* m) { size_t size = val.size1() * val.size2(); - size_t subsize = inspector::getSizeVal(val(0, 0)); auto subdims = std::vector(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(subdims); for (size_t i = 0; i < size; ++i) { inspector::serialize(*(&val(0, 0) + i), subdims, m + i * subsize); } diff --git a/include/highfive/eigen.hpp b/include/highfive/eigen.hpp index 2aee101fc..69c01a7dc 100644 --- a/include/highfive/eigen.hpp +++ b/include/highfive/eigen.hpp @@ -39,14 +39,6 @@ struct inspector> { return sizes; } - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector& dims) { - return compute_total_size(dims); - } - static void prepare(type& val, const std::vector& dims) { if (dims[0] != static_cast(val.rows()) || dims[1] != static_cast(val.cols())) { From f39a112e87b602a99d85490c61eaec2197e3cb1e Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 20 Feb 2024 08:26:11 +0100 Subject: [PATCH 63/97] Add Zenodo badge and citation instructions. (#970) --- .zenodo.json | 38 ++++++++++++++++++++++++++++++++++++++ README.md | 5 +++++ doc/developer_guide.md | 1 + 3 files changed, 44 insertions(+) create mode 100644 .zenodo.json diff --git a/.zenodo.json b/.zenodo.json new file mode 100644 index 000000000..0e96eedbd --- /dev/null +++ b/.zenodo.json @@ -0,0 +1,38 @@ +{ + "creators": [ + { + "affiliation": "", + "name": "Devresse, Adrien" + }, + { + "affiliation": "", + "name": "Cornu, Nicolas" + }, + { + "affiliation": "", + "name": "Grosheintz-Laval, Luc" + }, + { + "affiliation": "", + "name": "Awile, Omar" + }, + { + "affiliation": "", + "name": "de Geus, Tom" + }, + { + "affiliation": "", + "name": "Pereira, Fernando" + }, + { + "affiliation": "", + "name": "Wolf, Matthias" + }, + { + "affiliation": "", + "name": "HighFive Contributors" + } + ], + + "title": "HighFive - Header-only C++ HDF5 interface" +} diff --git a/README.md b/README.md index ef35f990d..8e04eb13c 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ https://github.com/BlueBrain/HighFive/issues/864 [![Doxygen -> gh-pages](https://github.com/BlueBrain/HighFive/workflows/gh-pages/badge.svg?branch=master)](https://BlueBrain.github.io/HighFive/actions/workflows/gh-pages.yml?query=branch%3Amaster) [![codecov](https://codecov.io/gh/BlueBrain/HighFive/branch/master/graph/badge.svg?token=UBKxHEn7RS)](https://codecov.io/gh/BlueBrain/HighFive) [![HighFive_Integration_tests](https://github.com/BlueBrain/HighFive-testing/actions/workflows/integration.yml/badge.svg)](https://github.com/BlueBrain/HighFive-testing/actions/workflows/integration.yml) +[![Zenodo](https://zenodo.org/badge/47755262.svg)](https://zenodo.org/doi/10.5281/zenodo.10679422) Documentation: https://bluebrain.github.io/HighFive/ @@ -249,6 +250,10 @@ For bugs and issues please use [Issues](https://github.com/BlueBrain/HighFive/is # Funding & Acknowledgment The development of this software was supported by funding to the Blue Brain Project, a research center of the École polytechnique fédérale de Lausanne (EPFL), from the Swiss government's ETH Board of the Swiss Federal Institutes of Technology. + +HighFive releases are uploaded to Zenodo. If you wish to cite HighFive in a +scientific publication you can use the DOIs for the +[Zenodo records](https://zenodo.org/doi/10.5281/zenodo.10679422). Copyright © 2015-2022 Blue Brain Project/EPFL diff --git a/doc/developer_guide.md b/doc/developer_guide.md index a5ed2767d..13e360fc3 100644 --- a/doc/developer_guide.md +++ b/doc/developer_guide.md @@ -95,6 +95,7 @@ release. Once this is done perform a final round of updates: * Download the archive (`*.tar.gz`) and compute its SHA256. * Update BlueBrain Spack recipe to use the archive and not the Git commit. * Update the upstream Spack recipe. +* A Zenodo record should be generated automatically, check if it's sensible. ## Writing Tests ### Generate Multi-Dimensional Test Data From 5244490467718a56e41161f7aced09bb98f3f5a6 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 20 Feb 2024 17:13:18 +0100 Subject: [PATCH 64/97] Re-implement Eigen support. (#968) This commit re-implements support for Eigen. Supported are Matrix, Array and Map, both row and column-major. Not supported are strided or padded objects. --- include/highfive/eigen.hpp | 125 +++++++++++++++---- src/examples/CMakeLists.txt | 12 ++ src/examples/eigen_map.cpp | 48 +++++++ src/examples/eigen_matrix.cpp | 33 +++++ src/examples/eigen_vector.cpp | 33 +++++ tests/unit/data_generator.hpp | 187 +++++++++++++++++++++++++++- tests/unit/supported_types.hpp | 83 ++++++++++-- tests/unit/test_all_types.cpp | 24 +++- tests/unit/tests_high_five_base.cpp | 8 +- 9 files changed, 505 insertions(+), 48 deletions(-) create mode 100644 src/examples/eigen_map.cpp create mode 100644 src/examples/eigen_matrix.cpp create mode 100644 src/examples/eigen_vector.cpp diff --git a/include/highfive/eigen.hpp b/include/highfive/eigen.hpp index 69c01a7dc..aaad280ef 100644 --- a/include/highfive/eigen.hpp +++ b/include/highfive/eigen.hpp @@ -3,36 +3,36 @@ #include "bits/H5Inspector_decl.hpp" #include "H5Exception.hpp" -#include - +#include +#include namespace HighFive { namespace details { -template -struct inspector> { - using type = Eigen::Matrix; - using value_type = T; +template +struct eigen_inspector { + using type = EigenType; + using value_type = typename EigenType::Scalar; using base_type = typename inspector::base_type; using hdf5_type = base_type; + static_assert(int(EigenType::ColsAtCompileTime) == int(EigenType::MaxColsAtCompileTime), + "Padding isn't supported."); + static_assert(int(EigenType::RowsAtCompileTime) == int(EigenType::MaxRowsAtCompileTime), + "Padding isn't supported."); + + static constexpr bool is_row_major() { + return EigenType::ColsAtCompileTime == 1 || EigenType::RowsAtCompileTime == 1 || + EigenType::IsRowMajor; + } + static constexpr size_t ndim = 2; static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; - static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && + static constexpr bool is_trivially_copyable = is_row_major() && + std::is_trivially_copyable::value && inspector::is_trivially_copyable; - - static void assert_not_buggy(Eigen::Index nrows, Eigen::Index ncols) { - if (nrows > 1 && ncols > 1) { - throw std::runtime_error( - "HighFive has been broken for Eigen::Matrix. Please check " - "https://github.com/BlueBrain/HighFive/issues/532."); - } - } - static std::vector getDimensions(const type& val) { - assert_not_buggy(val.rows(), val.cols()); - std::vector sizes{static_cast(val.rows()), static_cast(val.cols())}; auto s = inspector::getDimensions(val.data()[0]); sizes.insert(sizes.end(), s.begin(), s.end()); @@ -45,38 +45,109 @@ struct inspector> { val.resize(static_cast(dims[0]), static_cast(dims[1])); } - - assert_not_buggy(val.rows(), val.cols()); } static hdf5_type* data(type& val) { - assert_not_buggy(val.rows(), val.cols()); + if (!is_trivially_copyable) { + throw DataSetException("Invalid used of `inspector>::data`."); + } + return inspector::data(*val.data()); } static const hdf5_type* data(const type& val) { - assert_not_buggy(val.rows(), val.cols()); + if (!is_trivially_copyable) { + throw DataSetException("Invalid used of `inspector>::data`."); + } + return inspector::data(*val.data()); } - static void serialize(const type& val, const std::vector& /* dims */, hdf5_type* m) { - assert_not_buggy(val.rows(), val.cols()); - std::memcpy(m, val.data(), static_cast(val.size()) * sizeof(hdf5_type)); + static void serialize(const type& val, const std::vector& dims, hdf5_type* m) { + Eigen::Index n_rows = val.rows(); + Eigen::Index n_cols = val.cols(); + + auto subdims = std::vector(dims.begin() + ndim, dims.end()); + auto subsize = compute_total_size(subdims); + for (Eigen::Index i = 0; i < n_rows; ++i) { + for (Eigen::Index j = 0; j < n_cols; ++j) { + inspector::serialize(val(i, j), dims, m); + m += subsize; + } + } } static void unserialize(const hdf5_type* vec_align, const std::vector& dims, type& val) { - assert_not_buggy(val.rows(), val.cols()); if (dims.size() < 2) { std::ostringstream os; os << "Impossible to pair DataSet with " << dims.size() << " dimensions into an eigen-matrix."; throw DataSpaceException(os.str()); } - std::memcpy(val.data(), vec_align, compute_total_size(dims) * sizeof(hdf5_type)); + + auto n_rows = static_cast(dims[0]); + auto n_cols = static_cast(dims[1]); + + auto subdims = std::vector(dims.begin() + ndim, dims.end()); + auto subsize = compute_total_size(subdims); + for (Eigen::Index i = 0; i < n_rows; ++i) { + for (Eigen::Index j = 0; j < n_cols; ++j) { + inspector::unserialize(vec_align, subdims, val(i, j)); + vec_align += subsize; + } + } + } +}; + +template +struct inspector> + : public eigen_inspector> { + private: + using super = eigen_inspector>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; +}; + +template +struct inspector> + : public eigen_inspector> { + private: + using super = eigen_inspector>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; +}; + + +template +struct inspector> + : public eigen_inspector> { + private: + using super = eigen_inspector>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; + + static void prepare(type& val, const std::vector& dims) { + if (dims[0] != static_cast(val.rows()) || + dims[1] != static_cast(val.cols())) { + throw DataSetException("Eigen::Map has invalid shape and can't be resized."); + } } }; + } // namespace details } // namespace HighFive diff --git a/src/examples/CMakeLists.txt b/src/examples/CMakeLists.txt index ab5700c6a..47c43e00a 100644 --- a/src/examples/CMakeLists.txt +++ b/src/examples/CMakeLists.txt @@ -30,6 +30,12 @@ set(boost_examples ${CMAKE_CURRENT_SOURCE_DIR}/boost_ublas_double.cpp ) +set(eigen_examples + ${CMAKE_CURRENT_SOURCE_DIR}/eigen_matrix.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/eigen_vector.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/eigen_map.cpp +) + set(hl_hdf5_examples ${CMAKE_CURRENT_SOURCE_DIR}/hl_hdf5_inmemory_files.cpp ) @@ -70,6 +76,12 @@ if(HIGHFIVE_TEST_BOOST) endforeach() endif() +if(HIGHFIVE_TEST_EIGEN) + foreach(example_source ${eigen_examples}) + compile_example(${example_source} HighFiveEigenDependency) + endforeach() +endif() + if(HDF5_IS_PARALLEL) foreach(example_source ${parallel_hdf5_examples}) compile_example(${example_source}) diff --git a/src/examples/eigen_map.cpp b/src/examples/eigen_map.cpp new file mode 100644 index 000000000..7cdd15cd4 --- /dev/null +++ b/src/examples/eigen_map.cpp @@ -0,0 +1,48 @@ +#include +#include + +// Example showing reading and writing of `Eigen::Map`. Using +// `Map` as an example, but `Map` works +// analogously. +// +// Both `Eigen::Matrix` and `Eigen::Vector` have their own examples. + +int main() { + HighFive::File file("eigen_map.h5", HighFive::File::Truncate); + + // Somehow allocate some memory: + double* p1 = (double*) malloc(4 * 3 * sizeof(double)); + + Eigen::Map A(p1, 4, 3); + + // clang-format off + A << 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + 10, 11, 12; + // clang-format on + std::cout << "A = \n" << A << "\n\n"; + + // Write it to the file: + file.createDataSet("mat", A); + + // ... and read it back as fixed-size and row-major: + using Matrix43d = Eigen::Matrix; + + // Again, memory was obtain somehow, and we create an `Eigen::Map` + // from it: + double* p2 = (double*) malloc(4 * 3 * sizeof(double)); + Eigen::Map B(p2, 4, 3); + + // Since, we've pre-allocated the memory, we use the overload of `read` + // accepts `B` and an argument. Note, this will throw if `B` needs to be + // resized, because a map shouldn't resize the underlying memory: + file.getDataSet("mat").read(B); + + std::cout << "B = \n" << B << "\n"; + + free(p1); + free(p2); + + return 0; +} diff --git a/src/examples/eigen_matrix.cpp b/src/examples/eigen_matrix.cpp new file mode 100644 index 000000000..7a40445d7 --- /dev/null +++ b/src/examples/eigen_matrix.cpp @@ -0,0 +1,33 @@ +#include +#include + +// Example showing reading and writing of `Eigen::Matrix`. Using +// `Eigen::Matrix` as an example, but `Eigen::Array` works analogously. +// +// Both `Eigen::Vector` and `Eigen::Map` have their own examples. + +int main() { + HighFive::File file("eigen_matrix.h5", HighFive::File::Truncate); + + // Create a matrix. + Eigen::MatrixXd A(4, 3); + // clang-format off + A << 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + 10, 11, 12; + // clang-format on + // + std::cout << "A = \n" << A << "\n\n"; + + // Write it to the file: + file.createDataSet("mat", A); + + // ... and read it back as fixed-size and row-major: + using Matrix43d = Eigen::Matrix; + auto B = file.getDataSet("mat").read(); + + std::cout << "B = \n" << B << "\n"; + + return 0; +} diff --git a/src/examples/eigen_vector.cpp b/src/examples/eigen_vector.cpp new file mode 100644 index 000000000..aa4772ce7 --- /dev/null +++ b/src/examples/eigen_vector.cpp @@ -0,0 +1,33 @@ +#include +#include + +// Example showing reading and writing of `Eigen::Matrix`. Using +// `Eigen::Matrix` as an example, but `Eigen::Array` works analogously. +// +// Both `Eigen::Vector` and `Eigen::Map` have their own examples. + +int main() { + HighFive::File file("eigen_vector.h5", HighFive::File::Truncate); + + // Create a matrix. + Eigen::VectorXd v(3); + v << 1, 2, 3; + std::cout << "v = \n" << v << "\n\n"; + + // Write it to the file: + file.createDataSet("col_vec", v); + + // The twist is that Eigen typedefs: + // using VectorXd = Matrix; + // + // Therefore, for HighFive it's indistinguishable from a Nx1 matrix. Since, + // Eigen distinguishes row and column vectors, the HighFive chooses to + // respect the distinction and deduces the shape of vector as Nx1. + + // ... and read it back as fixed-size: + auto w = file.getDataSet("col_vec").read(); + + std::cout << "w = \n" << w << "\n"; + + return 0; +} diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp index 9c17dfe81..2964bf9fd 100644 --- a/tests/unit/data_generator.hpp +++ b/tests/unit/data_generator.hpp @@ -7,11 +7,16 @@ #include #include +#include + #ifdef HIGHFIVE_TEST_BOOST #include #endif -#include +#ifdef HIGHFIVE_TEST_EIGEN +#include +#endif + namespace HighFive { namespace testing { @@ -69,6 +74,8 @@ struct ScalarContainerTraits { using container_type = T; using base_type = T; + static constexpr bool is_view = false; + static void set(container_type& array, std::vector /* indices */, base_type value) { array = value; } @@ -85,6 +92,8 @@ struct ScalarContainerTraits { return container_type{}; } + static void deallocate(container_type& /* array */, const std::vector& /* dims */) {} + static void sanitize_dims(std::vector& /* dims */, size_t /* axis */) {} }; @@ -106,6 +115,8 @@ struct ContainerTraits> { using value_type = bool; using base_type = bool; + static constexpr bool is_view = false; + static void set(container_type& array, const std::vector& indices, const base_type& value) { @@ -125,6 +136,8 @@ struct ContainerTraits> { return array; } + static void deallocate(container_type& /* array */, const std::vector& /* dims */) {} + static void sanitize_dims(std::vector& dims, size_t axis) { ContainerTraits::sanitize_dims(dims, axis + 1); } @@ -136,6 +149,8 @@ struct STLLikeContainerTraits { using value_type = ValueType; using base_type = typename ContainerTraits::base_type; + static constexpr bool is_view = ContainerTraits::is_view; + static void set(container_type& array, const std::vector& indices, const base_type& value) { @@ -151,15 +166,22 @@ struct STLLikeContainerTraits { } static container_type allocate(const std::vector& dims) { - container_type array(dims[0]); + container_type array; + array.reserve(dims[0]); for (size_t i = 0; i < dims[0]; ++i) { auto value = ContainerTraits::allocate(lstrip(dims, 1)); - ContainerTraits::assign(array[i], value); + array.push_back(value); } return array; } + static void deallocate(container_type& array, const std::vector& dims) { + for (size_t i = 0; i < dims[0]; ++i) { + ContainerTraits::deallocate(array[i], lstrip(dims, 1)); + } + } + static void sanitize_dims(std::vector& dims, size_t axis) { ContainerTraits::sanitize_dims(dims, axis + 1); } @@ -207,6 +229,8 @@ struct ContainerTraits> { using value_type = T; using base_type = typename ContainerTraits::base_type; + static constexpr bool is_view = ContainerTraits::is_view; + static void set(container_type& array, const std::vector& indices, const base_type& value) { @@ -238,6 +262,14 @@ struct ContainerTraits> { return array; } + static void deallocate(container_type& array, const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + n); + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + ContainerTraits::deallocate(array(unravel(i, local_dims)), lstrip(dims, n)); + } + } + static void sanitize_dims(std::vector& dims, size_t axis) { ContainerTraits::sanitize_dims(dims, axis + n); } @@ -249,6 +281,8 @@ struct ContainerTraits> { using value_type = T; using base_type = typename ContainerTraits::base_type; + static constexpr bool is_view = ContainerTraits::is_view; + static void set(container_type& array, const std::vector& indices, const base_type& value) { @@ -282,11 +316,158 @@ struct ContainerTraits> { return array; } + static void deallocate(container_type& array, const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + 2); + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + auto indices = unravel(i, local_dims); + ContainerTraits::deallocate(array(indices[0], indices[1]), lstrip(dims, 2)); + } + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + ContainerTraits::sanitize_dims(dims, axis + 2); + } +}; + +#endif + +#if HIGHFIVE_TEST_EIGEN + +template +struct EigenContainerTraits { + using container_type = EigenType; + using value_type = typename EigenType::Scalar; + using base_type = typename ContainerTraits::base_type; + + static constexpr bool is_view = ContainerTraits::is_view; + + static void set(container_type& array, + const std::vector& indices, + const base_type& value) { + auto i = static_cast(indices[0]); + auto j = static_cast(indices[1]); + return ContainerTraits::set(array(i, j), lstrip(indices, 2), value); + } + + static base_type get(const container_type& array, const std::vector& indices) { + auto i = static_cast(indices[0]); + auto j = static_cast(indices[1]); + return ContainerTraits::get(array(i, j), lstrip(indices, 2)); + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + 2); + auto n_rows = static_cast(local_dims[0]); + auto n_cols = static_cast(local_dims[1]); + container_type array = container_type::Zero(n_rows, n_cols); + + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + auto element = ContainerTraits::allocate(lstrip(dims, 2)); + set(array, unravel(i, local_dims), element); + } + + return array; + } + + static void deallocate(container_type& array, const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + 2); + size_t n_elements = flat_size(local_dims); + for (size_t i_flat = 0; i_flat < n_elements; ++i_flat) { + auto indices = unravel(i_flat, local_dims); + auto i = static_cast(indices[0]); + auto j = static_cast(indices[1]); + ContainerTraits::deallocate(array(i, j), lstrip(dims, 2)); + } + } + static void sanitize_dims(std::vector& dims, size_t axis) { + if (EigenType::RowsAtCompileTime != Eigen::Dynamic) { + dims[axis + 0] = static_cast(EigenType::RowsAtCompileTime); + } + + if (EigenType::ColsAtCompileTime != Eigen::Dynamic) { + dims[axis + 1] = static_cast(EigenType::ColsAtCompileTime); + } ContainerTraits::sanitize_dims(dims, axis + 2); } }; +template +struct ContainerTraits> + : public EigenContainerTraits> { + private: + using super = EigenContainerTraits>; + + public: + using container_type = typename super::container_type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; +}; + +template +struct ContainerTraits> + : public EigenContainerTraits> { + private: + using super = EigenContainerTraits>; + + public: + using container_type = typename super::container_type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; +}; + +template +struct ContainerTraits> + : public EigenContainerTraits> { + private: + using super = EigenContainerTraits>; + + public: + using container_type = typename super::container_type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + + static constexpr bool is_view = true; + + static container_type allocate(const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + 2); + auto n_rows = static_cast(local_dims[0]); + auto n_cols = static_cast(local_dims[1]); + + size_t n_elements = flat_size(local_dims); + value_type* ptr = new value_type[n_elements]; + + container_type array = container_type(ptr, n_rows, n_cols); + + for (size_t i = 0; i < n_elements; ++i) { + auto element = ContainerTraits::allocate(lstrip(dims, 2)); + ContainerTraits::set(array, unravel(i, local_dims), element); + } + + return array; + } + + static void deallocate(container_type& array, const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + 2); + size_t n_elements = flat_size(local_dims); + for (size_t i_flat = 0; i_flat < n_elements; ++i_flat) { + auto indices = unravel(i_flat, local_dims); + auto i = static_cast(indices[0]); + auto j = static_cast(indices[1]); + ContainerTraits::deallocate(array(i, j), lstrip(dims, 2)); + } + + delete[] array.data(); + } +}; + + #endif template diff --git a/tests/unit/supported_types.hpp b/tests/unit/supported_types.hpp index 843814f91..75e442c60 100644 --- a/tests/unit/supported_types.hpp +++ b/tests/unit/supported_types.hpp @@ -1,4 +1,3 @@ - #pragma once #include @@ -10,6 +9,11 @@ #include #endif +#ifdef HIGHFIVE_TEST_EIGEN +#include +#include +#endif + namespace HighFive { namespace testing { @@ -44,6 +48,32 @@ struct BoostUblasMatrix { }; #endif +#ifdef HIGHFIVE_TEST_EIGEN +template +struct EigenMatrix { + template + using type = Eigen::Matrix, n, m, Option>; +}; + +template +struct EigenArray { + template + using type = Eigen::Array, n, m, Option>; +}; + +template +struct EigenMapArray { + template + using type = Eigen::Map, n, m, Option>>; +}; + +template +struct EigenMapMatrix { + template + using type = Eigen::Map, n, m, Option>>; +}; +#endif + template struct ContainerProduct; @@ -66,7 +96,7 @@ struct ConcatenateTuples> { }; // clang-format off -using numeric_scalar_types = std::tuple< +using all_numeric_scalar_types = std::tuple< int, unsigned int, long, @@ -79,8 +109,17 @@ using numeric_scalar_types = std::tuple< unsigned long long >; -using scalar_types = typename ConcatenateTuples>::type; -using scalar_types_boost = typename ConcatenateTuples>::type; + +// To reduce the explosion of combinations, we don't always need +// to test against every numeric scalar type. These three should +// suffice. +using some_numeric_scalar_types = std::tuple; + +using all_scalar_types = typename ConcatenateTuples>::type; +using some_scalar_types = typename ConcatenateTuples>::type; + +using scalar_types_boost = some_numeric_scalar_types; +using scalar_types_eigen = some_numeric_scalar_types; using supported_array_types = typename ConcatenateTuples< #ifdef HIGHFIVE_TEST_BOOST @@ -92,14 +131,34 @@ using supported_array_types = typename ConcatenateTuples< typename ContainerProduct>, scalar_types_boost>::type, typename ContainerProduct>, scalar_types_boost>::type, #endif - typename ContainerProduct, scalar_types>::type, - typename ContainerProduct>, scalar_types>::type, - typename ContainerProduct>>, scalar_types>::type, - typename ContainerProduct>>>, scalar_types>::type, - typename ContainerProduct, scalar_types>::type, - typename ContainerProduct>, scalar_types>::type, - typename ContainerProduct>, scalar_types>::type, - typename ContainerProduct>, scalar_types>::type +#ifdef HIGHFIVE_TEST_EIGEN + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + std::tuple, + typename ContainerProduct, scalar_types_eigen>::type, + + typename ContainerProduct>, scalar_types_eigen>::type, + typename ContainerProduct>, scalar_types_eigen>::type, + std::tuple, std::vector>, + + typename ContainerProduct>, scalar_types_eigen>::type, + typename ContainerProduct>, scalar_types_eigen>::type, + std::tuple>, +#endif + typename ContainerProduct, all_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type, + typename ContainerProduct>>, some_scalar_types>::type, + typename ContainerProduct>>>, some_scalar_types>::type, + typename ContainerProduct, some_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type >::type; // clang-format on diff --git a/tests/unit/test_all_types.cpp b/tests/unit/test_all_types.cpp index e772fd1d7..cddc73312 100644 --- a/tests/unit/test_all_types.cpp +++ b/tests/unit/test_all_types.cpp @@ -12,6 +12,8 @@ #include #include + + #include #include "tests_high_five.hpp" #include "data_generator.hpp" @@ -303,10 +305,15 @@ void compare_arrays(const Actual& actual, } template -void check_read_auto(const Expected& expected, const std::vector& dims, const Obj& obj) { +auto check_read_auto(const Expected& expected, const std::vector& dims, const Obj& obj) -> + typename std::enable_if::is_view>::type { compare_arrays(obj.template read(), expected, dims); } +template +auto check_read_auto(const Expected&, const std::vector&, const Obj&) -> + typename std::enable_if::is_view>::type {} + template void check_read_preallocated(const Expected& expected, const std::vector& dims, @@ -315,6 +322,8 @@ void check_read_preallocated(const Expected& expected, obj.read(actual); compare_arrays(actual, expected, dims); + + testing::ContainerTraits::deallocate(actual, dims); } template @@ -349,6 +358,8 @@ void check_read_regular(const std::string& file_name, const std::vector& SECTION("attr.read(values)") { check_read_preallocated(expected, dims, attr); } + + testing::ContainerTraits::deallocate(expected, dims); } template @@ -378,10 +389,15 @@ void check_writing(const std::vector& dims, Write write) { obj.read(actual); compare_arrays(actual, expected, dims); + + testing::ContainerTraits::deallocate(actual, dims); + testing::ContainerTraits::deallocate(values, dims); + testing::ContainerTraits::deallocate(expected, dims); } template -void check_write_auto(File& file, const std::string& name, const std::vector& dims) { +auto check_write_auto(File& file, const std::string& name, const std::vector& dims) -> + typename std::enable_if::is_view>::type { auto write_auto = [&](const Container& values) { return CreateTraits::create(file, "auto_" + name, values); }; @@ -389,6 +405,10 @@ void check_write_auto(File& file, const std::string& name, const std::vector(dims, write_auto); } +template +auto check_write_auto(File&, const std::string&, const std::vector&) -> + typename std::enable_if::is_view>::type {} + template void check_write_deduce_type(File& file, const std::string& name, const std::vector& dims) { auto write_two_phase_auto = [&](const Container& values) { diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 0ef9715bd..dbb6e7fc0 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -2610,7 +2610,7 @@ TEST_CASE("HighFiveEigen") { vec_in << 1, 2, 3, 4, 5, 6, 7, 8, 9; Eigen::Matrix vec_out; - CHECK_THROWS(test_eigen_vec(file, ds_name_flavor, vec_in, vec_out)); + test_eigen_vec(file, ds_name_flavor, vec_in, vec_out); } // Eigen MatrixXd @@ -2619,7 +2619,7 @@ TEST_CASE("HighFiveEigen") { Eigen::MatrixXd vec_in = 100. * Eigen::MatrixXd::Random(20, 5); Eigen::MatrixXd vec_out(20, 5); - CHECK_THROWS(test_eigen_vec(file, ds_name_flavor, vec_in, vec_out)); + test_eigen_vec(file, ds_name_flavor, vec_in, vec_out); } // std::vector @@ -2633,7 +2633,7 @@ TEST_CASE("HighFiveEigen") { vec_in.push_back(m2); std::vector vec_out(2, Eigen::MatrixXd::Zero(20, 5)); - CHECK_THROWS(test_eigen_vec(file, ds_name_flavor, vec_in, vec_out)); + test_eigen_vec(file, ds_name_flavor, vec_in, vec_out); } #ifdef HIGHFIVE_TEST_BOOST @@ -2675,7 +2675,7 @@ TEST_CASE("HighFiveEigen") { } } - CHECK_THROWS(test_eigen_vec(file, ds_name_flavor, vec_in, vec_out)); + test_eigen_vec(file, ds_name_flavor, vec_in, vec_out); } #endif From e54d176be806c60aa9d61e333912934cc6dfa3cc Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 21 Feb 2024 10:24:58 +0100 Subject: [PATCH 65/97] Windows CI should be red on test failure. (#971) The issue is that `bash -l {0}` seems to turn off the `set -e` behaviour we see in the other pipelines. Simply asking for `bash`, leads to every ctest invocation failing with an exit code (even tests that are known to succeed). Therefore, we put one test per stage of the pipeline, this ensures its exit code is relevant for success/failure of the pipeline. --- .github/run_examples.sh | 2 +- .github/workflows/ci.yml | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/.github/run_examples.sh b/.github/run_examples.sh index 77861f388..9af9a6ef1 100755 --- a/.github/run_examples.sh +++ b/.github/run_examples.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -set -e +set -eu if [[ $# -eq 0 ]] then diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7bb089a0e..edfbd6b57 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -83,6 +83,10 @@ jobs: working-directory: ${{github.workspace}}/build run: | ctest -j2 --output-on-failure -C $BUILD_TYPE + + - name: Test No HDF5 Diagnositics + working-directory: ${{github.workspace}}/build + run: | ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG @@ -123,6 +127,10 @@ jobs: working-directory: ${{github.workspace}}/build run: | ctest -j2 --output-on-failure -C $BUILD_TYPE + + - name: Test No HDF5 Diagnositics + working-directory: ${{github.workspace}}/build + run: | ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG @@ -163,6 +171,10 @@ jobs: working-directory: ${{github.workspace}}/build run: | ctest -j2 --output-on-failure -C $BUILD_TYPE + + - name: Test No HDF5 Diagnositics + working-directory: ${{github.workspace}}/build + run: | ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples @@ -208,6 +220,10 @@ jobs: working-directory: ${{github.workspace}}/build run: | ctest -j2 --output-on-failure -C $BUILD_TYPE + + - name: Test No HDF5 Diagnositics + working-directory: ${{github.workspace}}/build + run: | ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples @@ -306,6 +322,10 @@ jobs: working-directory: ${{github.workspace}}/build run: | ctest -j2 --output-on-failure -C $BUILD_TYPE + + - name: Test No HDF5 Diagnositics + working-directory: ${{github.workspace}}/build + run: | ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples @@ -361,4 +381,9 @@ jobs: shell: bash -l {0} run: | ctest -j2 --output-on-failure -C $BUILD_TYPE + + - name: Test No HDF5 Diagnositics + working-directory: ${{github.workspace}}/build + shell: bash -l {0} + run: | ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG From f169f384040b8c69397d2f514d53aa47e0786eea Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 08:46:09 +0100 Subject: [PATCH 66/97] Update doxygen-awesome to v2.3.2 (#973) Co-authored-by: github-actions --- doc/doxygen-awesome-css/doxygen-awesome.css | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/doc/doxygen-awesome-css/doxygen-awesome.css b/doc/doxygen-awesome-css/doxygen-awesome.css index ac7f0608e..a44945b36 100644 --- a/doc/doxygen-awesome-css/doxygen-awesome.css +++ b/doc/doxygen-awesome-css/doxygen-awesome.css @@ -313,7 +313,7 @@ body { body, table, div, p, dl, #nav-tree .label, .title, .sm-dox a, .sm-dox a:hover, .sm-dox a:focus, #projectname, .SelectItem, #MSearchField, .navpath li.navelem a, -.navpath li.navelem a:hover, p.reference, p.definition { +.navpath li.navelem a:hover, p.reference, p.definition, div.toc li, div.toc h3 { font-family: var(--font-family); } @@ -334,6 +334,7 @@ p.reference, p.definition { a:link, a:visited, a:hover, a:focus, a:active { color: var(--primary-color) !important; font-weight: 500; + background: none; } a.anchor { @@ -806,6 +807,10 @@ html.dark-mode iframe#MSearchResults { line-height: var(--tree-item-height); } +#nav-tree .item > a:focus { + outline: none; +} + #nav-sync { bottom: 12px; right: 12px; @@ -843,6 +848,7 @@ html.dark-mode iframe#MSearchResults { #nav-tree .arrow { opacity: var(--side-nav-arrow-opacity); + background: none; } .arrow { @@ -2460,17 +2466,17 @@ h2:hover a.anchorlink, h1:hover a.anchorlink, h3:hover a.anchorlink, h4:hover a. Optional tab feature */ -.tabbed ul { +.tabbed > ul { padding-inline-start: 0px; margin: 0; padding: var(--spacing-small) 0; } -.tabbed li { +.tabbed > ul > li { display: none; } -.tabbed li.selected { +.tabbed > ul > li.selected { display: block; } From 25f6481f1c1970ca8b1014565cbc970e2a6e1668 Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Fri, 5 Apr 2024 18:36:39 +0200 Subject: [PATCH 67/97] Remove deprecated default ctor for DataSet (#947) * Remove deprecated default ctor for DataSet * Allow default ctors. * Test that default constructed objects throw. (#975) The only valid operations of default constructed DataSets and Groups is to assign a valid object to them. Therefore, calling any other methods should raise an exception. --------- Co-authored-by: Luc Grosheintz --- include/highfive/H5DataSet.hpp | 2 - include/highfive/bits/H5Attribute_misc.hpp | 4 + include/highfive/bits/H5DataSet_misc.hpp | 4 + include/highfive/bits/H5Path_traits.hpp | 3 +- include/highfive/bits/H5Path_traits_misc.hpp | 6 +- tests/unit/tests_high_five_base.cpp | 164 +++++++++++++++++-- 6 files changed, 160 insertions(+), 23 deletions(-) diff --git a/include/highfive/H5DataSet.hpp b/include/highfive/H5DataSet.hpp index 0236f06c2..566eb17ff 100644 --- a/include/highfive/H5DataSet.hpp +++ b/include/highfive/H5DataSet.hpp @@ -98,8 +98,6 @@ class DataSet: public Object, return details::get_plist(*this, H5Dget_access_plist); } - /// \deprecated Default constructor creates unsafe uninitialized objects - H5_DEPRECATED("Default constructor creates unsafe uninitialized objects") DataSet() = default; protected: diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 042c63014..19eceb49f 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -31,6 +31,10 @@ inline std::string Attribute::getName() const { } inline size_t Attribute::getStorageSize() const { + if (!this->isValid()) { + throw AttributeException("Invalid call to `DataSet::getFile` for invalid object"); + } + return static_cast(detail::h5a_get_storage_size(_hid)); } diff --git a/include/highfive/bits/H5DataSet_misc.hpp b/include/highfive/bits/H5DataSet_misc.hpp index 4817fe001..45e530efc 100644 --- a/include/highfive/bits/H5DataSet_misc.hpp +++ b/include/highfive/bits/H5DataSet_misc.hpp @@ -22,6 +22,10 @@ namespace HighFive { inline uint64_t DataSet::getStorageSize() const { + if (!this->isValid()) { + throw DataSetException("Invalid call to `DataSet::getStorageSize` for invalid object"); + } + return detail::h5d_get_storage_size(_hid); } diff --git a/include/highfive/bits/H5Path_traits.hpp b/include/highfive/bits/H5Path_traits.hpp index 46a038c4f..a58f96187 100644 --- a/include/highfive/bits/H5Path_traits.hpp +++ b/include/highfive/bits/H5Path_traits.hpp @@ -25,8 +25,7 @@ class PathTraits { /// /// \brief Return a reference to the File object this object belongs /// \return the File object ref - File& getFile() const noexcept; - + File& getFile() const; protected: std::shared_ptr _file_obj; // keep a ref to file so we keep its ref count > 0 diff --git a/include/highfive/bits/H5Path_traits_misc.hpp b/include/highfive/bits/H5Path_traits_misc.hpp index acde06d1e..0893599c6 100644 --- a/include/highfive/bits/H5Path_traits_misc.hpp +++ b/include/highfive/bits/H5Path_traits_misc.hpp @@ -35,7 +35,11 @@ inline std::string PathTraits::getPath() const { } template -inline File& PathTraits::getFile() const noexcept { +inline File& PathTraits::getFile() const { + const auto& obj = static_cast(*this); + if (!obj.isValid()) { + throw ObjectException("Invalid call to `PathTraits::getFile` for invalid object"); + } return *_file_obj; } diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index dbb6e7fc0..bc33d5dad 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -6,6 +6,7 @@ * http://www.boost.org/LICENSE_1_0.txt) * */ +#include #include #include #include @@ -321,27 +322,154 @@ TEST_CASE("Test allocation time") { CHECK(alloc_size == data.size() * sizeof(decltype(data)::value_type)); } -/* - * Test to ensure legacy support: DataSet used to have a default constructor. - * However, it is not useful to have a DataSet object that does not actually - * refer to a dataset in a file. Hence, the the default constructor was - * deprecated. - * This test is to ensure that the constructor is not accidentally removed and - * thereby break users' code. - */ -TEST_CASE("Test default constructors") { - const std::string file_name("h5_default_ctors.h5"); - const std::string dataset_name("dset"); - File file(file_name, File::Truncate); - auto ds = file.createDataSet(dataset_name, std::vector{1, 2, 3, 4, 5}); +template +void check_invalid_hid_Object(T& obj) { + auto silence = SilenceHDF5(); + + CHECK(!obj.isValid()); + CHECK(obj.getId() == H5I_INVALID_HID); + + CHECK_THROWS(obj.getInfo()); + CHECK_THROWS(obj.getType()); +} - DataSet d2; // expect deprecation warning, as it constructs unsafe object - // d2.getFile(); // runtime error - CHECK(!d2.isValid()); - d2 = ds; // copy - CHECK(d2.isValid()); +template +void check_invalid_hid_NodeTraits(T& obj, const U& linkable) { + auto silence = SilenceHDF5(); + + auto data_space = DataSpace{2, 3}; + auto data_type = HighFive::create_datatype(); + auto data = std::vector{1.0, 2.0, 3.0}; + auto gcpl = GroupCreateProps(); + + CHECK_THROWS(obj.createDataSet("foo", data_space, data_type)); + CHECK_THROWS(obj.template createDataSet("foo", data_space)); + CHECK_THROWS(obj.createDataSet("foo", data)); + + CHECK_THROWS(obj.getDataSet("foo")); + CHECK_THROWS(obj.createGroup("foo")); + CHECK_THROWS(obj.createGroup("foo", gcpl)); + CHECK_THROWS(obj.getGroup("foo")); + CHECK_THROWS(obj.getDataType("foo")); + CHECK_THROWS(obj.getNumberObjects()); + CHECK_THROWS(obj.getObjectName(0)); + CHECK_THROWS(obj.rename("foo", "bar")); + CHECK_THROWS(obj.listObjectNames()); + CHECK_THROWS(obj.exist("foo")); + CHECK_THROWS(obj.unlink("foo")); + CHECK_THROWS(obj.getLinkType("foo")); + CHECK_THROWS(obj.getObjectType("foo")); + CHECK_THROWS(obj.createSoftLink("foo", linkable)); + CHECK_THROWS(obj.createSoftLink("foo", "bar")); + CHECK_THROWS(obj.createExternalLink("foo", "bar", "baz")); + CHECK_THROWS(obj.createHardLink("foo", linkable)); } +template +void check_invalid_hid_DataSet(T& obj) { + auto silence = SilenceHDF5(); + + CHECK_THROWS(obj.getStorageSize()); + CHECK_THROWS(obj.getOffset()); + CHECK_THROWS(obj.getMemSpace()); + CHECK_THROWS(obj.resize({1, 2, 3})); + CHECK_THROWS(obj.getDimensions()); + CHECK_THROWS(obj.getElementCount()); + CHECK_THROWS(obj.getCreatePropertyList()); + CHECK_THROWS(obj.getAccessPropertyList()); +} + +template +void check_invalid_hid_SliceTraits(T& obj) { + auto silence = SilenceHDF5(); + + auto slab = HighFive::HyperSlab(RegularHyperSlab({0})); + auto space = DataSpace{3}; + auto set = ElementSet{0, 1, 3}; + auto data = std::vector{1.0, 2.0, 3.0}; + auto type = create_datatype(); + auto cols = std::vector{0, 2, 3}; + + CHECK_THROWS(obj.select(slab)); + CHECK_THROWS(obj.select(slab, space)); + CHECK_THROWS(obj.select({0}, {3})); + CHECK_THROWS(obj.select(cols)); + CHECK_THROWS(obj.select(set)); + + CHECK_THROWS(obj.template read()); + CHECK_THROWS(obj.read(data)); + CHECK_THROWS(obj.read_raw(data.data(), type)); + CHECK_THROWS(obj.template read_raw(data.data())); + + CHECK_THROWS(obj.write(data)); + CHECK_THROWS(obj.write_raw(data.data(), type)); + CHECK_THROWS(obj.template write_raw(data.data())); +} + +template +void check_invalid_hid_PathTraits(T& obj) { + auto silence = SilenceHDF5(); + + CHECK_THROWS(obj.getPath()); + CHECK_THROWS(obj.getFile()); +} + +template +void check_invalid_hid_AnnotateTraits(T& obj) { + auto silence = SilenceHDF5(); + + auto space = DataSpace{3}; + auto data = std::vector{1.0, 2.0, 3.0}; + auto type = create_datatype(); + + CHECK_THROWS(obj.createAttribute("foo", space, type)); + CHECK_THROWS(obj.template createAttribute("foo", space)); + CHECK_THROWS(obj.createAttribute("foo", data)); + + CHECK_THROWS(obj.deleteAttribute("foo")); + CHECK_THROWS(obj.getAttribute("foo")); + CHECK_THROWS(obj.getNumberAttributes()); + CHECK_THROWS(obj.listAttributeNames()); + CHECK_THROWS(obj.hasAttribute("foo")); +} + +template +void check_invalid_hid_Group(T& obj) { + auto silence = SilenceHDF5(); + + CHECK_THROWS(obj.getEstimatedLinkInfo()); + CHECK_THROWS(obj.getCreatePropertyList()); +} + +TEST_CASE("Test default DataSet constructor") { + DataSet ds; + check_invalid_hid_Object(ds); + check_invalid_hid_DataSet(ds); + check_invalid_hid_SliceTraits(ds); + check_invalid_hid_AnnotateTraits(ds); + check_invalid_hid_PathTraits(ds); + + File file("h5_default_dset_ctor.h5", File::Truncate); + ds = file.createDataSet("dset", std::vector{1, 2, 3, 4, 5}); + CHECK(ds.isValid()); +} + +TEST_CASE("Test default Group constructor") { + File file("h5_default_group_ctor.h5", File::Truncate); + Group linkable = file.createGroup("bar"); + + Group grp; + check_invalid_hid_Object(grp); + check_invalid_hid_NodeTraits(grp, linkable); + check_invalid_hid_AnnotateTraits(grp); + check_invalid_hid_PathTraits(grp); + + grp = file.createGroup("grp"); + + CHECK(grp.isValid()); +} + + TEST_CASE("Test groups and datasets") { const std::string file_name("h5_group_test.h5"); const std::string dataset_name("dset"); From 9a2850001c1876dd35e40ba4aad1d7c542fc659c Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Fri, 5 Apr 2024 18:37:01 +0200 Subject: [PATCH 68/97] Remove deprecated default ctor for Group (#948) * Remove default ctor for Group * Allow default ctors. --------- Co-authored-by: Luc Grosheintz --- include/highfive/H5Group.hpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/include/highfive/H5Group.hpp b/include/highfive/H5Group.hpp index 0a6a4cdae..eebfcbb3e 100644 --- a/include/highfive/H5Group.hpp +++ b/include/highfive/H5Group.hpp @@ -47,8 +47,6 @@ class Group: public Object, public: const static ObjectType type = ObjectType::Group; - /// \deprecated Default constructor creates unsafe uninitialized objects - H5_DEPRECATED("Default constructor creates unsafe uninitialized objects") Group() = default; std::pair getEstimatedLinkInfo() const; From 5ab4752f21986977e728f8cc6708b17224aa3086 Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Mon, 15 Apr 2024 11:00:22 +0200 Subject: [PATCH 69/97] Bump CI version to 1.14.4.1 (#979) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index edfbd6b57..e400ba18d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,7 +96,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - hdf5_version : [ hdf5-1_8_23, hdf5-1_10_11, hdf5-1_12_3, hdf5-1_14_3 ] + hdf5_version : [ hdf5-1_8_23, hdf5-1_10_11, hdf5-1_12_3, hdf5_1.14.4.1 ] steps: - uses: actions/checkout@v3 From c1b3cde78c4a1bd4768e9247f61f9ebe2d8afc6a Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 19 Apr 2024 15:33:03 +0200 Subject: [PATCH 70/97] Improve information content of GCC error messages. (#985) Frequently, GCC prints error messages, but, ironically, they're too short, it'll include the inner most context, somewhere deep inside the STL or some other external libary, then skip all the HighFive context and continue printing the outer most layers which typically happen to be in Catch2. --- cmake/HighFiveWarnings.cmake | 8 +++++++- src/examples/CMakeLists.txt | 2 +- tests/unit/CMakeLists.txt | 6 +++--- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/cmake/HighFiveWarnings.cmake b/cmake/HighFiveWarnings.cmake index dfd43c7f1..3f569d5d5 100644 --- a/cmake/HighFiveWarnings.cmake +++ b/cmake/HighFiveWarnings.cmake @@ -6,6 +6,7 @@ if(TARGET HighFiveWarnings) endif() add_library(HighFiveWarnings INTERFACE) +add_library(HighFiveFlags INTERFACE) if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU" @@ -32,8 +33,13 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" -Wdouble-promotion ) + target_compile_options(HighFiveWarnings + INTERFACE + -ftemplate-backtrace-limit=0 + ) + if(HIGHFIVE_MAX_ERRORS) - target_compile_options(HighFiveWarnings + target_compile_options(HighFiveFlags INTERFACE -fmax-errors=${HIGHFIVE_MAX_ERRORS} ) diff --git a/src/examples/CMakeLists.txt b/src/examples/CMakeLists.txt index 47c43e00a..5a1384557 100644 --- a/src/examples/CMakeLists.txt +++ b/src/examples/CMakeLists.txt @@ -55,7 +55,7 @@ function(compile_example example_source) message("example_name: ${example_name}") add_executable(${example_name} ${example_source}) - target_link_libraries(${example_name} PUBLIC HighFive HighFiveWarnings) + target_link_libraries(${example_name} PUBLIC HighFive HighFiveWarnings HighFiveFlags) if(${ARGC} EQUAL 2) target_link_libraries(${example_name} PUBLIC ${ARGV1}) endif() diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 048ccd9b6..c5a07e8e8 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -8,7 +8,7 @@ endif() ## Base tests foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_legacy) add_executable(${test_name} "${test_name}.cpp") - target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) + target_link_libraries(${test_name} HighFive HighFiveWarnings HighFiveFlags Catch2::Catch2WithMain) target_link_libraries(${test_name} HighFiveOptionalDependencies) catch_discover_tests(${test_name}) @@ -19,7 +19,7 @@ if(HIGHFIVE_PARALLEL_HDF5) ## parallel MPI tests add_executable(tests_parallel_bin ${tests_parallel_src}) - target_link_libraries(tests_parallel_bin HighFive HighFiveWarnings Catch2::Catch2) + target_link_libraries(tests_parallel_bin HighFive HighFiveWarnings HighFiveFlags Catch2::Catch2) target_link_libraries(tests_parallel_bin HighFiveOptionalDependencies) # We need to patch in a call to `mpirun` or equivalent when using @@ -62,6 +62,6 @@ if(HIGHFIVE_TEST_SINGLE_INCLUDES) get_filename_component(CLASS_NAME ${PUBLIC_HEADER} NAME_WE) configure_file(tests_import_public_headers.cpp "tests_${CLASS_NAME}.cpp" @ONLY) add_executable("tests_include_${CLASS_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/tests_${CLASS_NAME}.cpp") - target_link_libraries("tests_include_${CLASS_NAME}" HighFive HighFiveWarnings) + target_link_libraries("tests_include_${CLASS_NAME}" HighFive HighFiveWarnings HighFiveFlags) endforeach() endif() From 5e0204e272e4f71e3dd768287e400758f785527d Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 19 Apr 2024 17:05:38 +0200 Subject: [PATCH 71/97] Also check macos-13. (#988) --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e400ba18d..49f415e82 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -296,6 +296,10 @@ jobs: os: [ "macOS-12" ] cxxstd: ["14", "17", "20"] + include: + - os: "macOS-13" + cxxstd: "20" + steps: - uses: actions/checkout@v3 with: From d51d6cc0c87737e5e2d68adfe297576161eae1a2 Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Mon, 6 May 2024 09:05:00 +0200 Subject: [PATCH 72/97] Disable boost::multi_array with fortran_storage_order (#964) Prevent serializing `boost::multi_array`s that are internally stored as column major, because this would cause them to be incorrectly written/read as if they were row-major. --------- Co-authored-by: Luc Grosheintz --- include/highfive/boost.hpp | 10 ++++++++++ tests/unit/tests_high_five_multi_dims.cpp | 9 +++++++++ 2 files changed, 19 insertions(+) diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp index fb8a709c5..49e5d7acf 100644 --- a/include/highfive/boost.hpp +++ b/include/highfive/boost.hpp @@ -51,16 +51,25 @@ struct inspector> { } } + static void assert_c_order(const type& val) { + if (!(val.storage_order() == boost::c_storage_order())) { + throw DataTypeException("Only C storage order is supported for 'boost::multi_array'."); + } + } + static hdf5_type* data(type& val) { + assert_c_order(val); return inspector::data(*val.data()); } static const hdf5_type* data(const type& val) { + assert_c_order(val); return inspector::data(*val.data()); } template static void serialize(const type& val, const std::vector& dims, It m) { + assert_c_order(val); size_t size = val.num_elements(); auto subdims = std::vector(dims.begin() + ndim, dims.end()); size_t subsize = compute_total_size(subdims); @@ -71,6 +80,7 @@ struct inspector> { template static void unserialize(It vec_align, const std::vector& dims, type& val) { + assert_c_order(val); std::vector next_dims(dims.begin() + ndim, dims.end()); size_t subsize = compute_total_size(next_dims); for (size_t i = 0; i < val.num_elements(); ++i) { diff --git a/tests/unit/tests_high_five_multi_dims.cpp b/tests/unit/tests_high_five_multi_dims.cpp index 60ec66cae..a261360e0 100644 --- a/tests/unit/tests_high_five_multi_dims.cpp +++ b/tests/unit/tests_high_five_multi_dims.cpp @@ -169,6 +169,15 @@ TEMPLATE_LIST_TEST_CASE("MultiArray3D", "[template]", numerical_test_types) { MultiArray3DTest(); } +TEST_CASE("Test boost::multi_array with fortran_storage_order") { + const std::string file_name("h5_multi_array_fortran.h5"); + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + boost::multi_array ma(boost::extents[2][2], boost::fortran_storage_order()); + auto dset = file.createDataSet("main_dset", DataSpace::From(ma)); + CHECK_THROWS_AS(dset.write(ma), DataTypeException); +} + template void ublas_matrix_Test() { using Matrix = boost::numeric::ublas::matrix; From cabc50fb5c466f5ff57c87d0052c27217931b671 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 6 May 2024 14:10:52 +0200 Subject: [PATCH 73/97] Assert that XTensor objects are row-major. (#989) Easy serializes XTensor objects by obtaining a pointer to the first element, and then using `write_raw`. Same for reading using `read_raw`. Therefore, it only supports (a subset of) row-major arrays. This commit as a runtime check. --- .../highfive/h5easy_bits/H5Easy_xtensor.hpp | 10 +++++ tests/unit/tests_high_five_easy.cpp | 38 +++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/include/highfive/h5easy_bits/H5Easy_xtensor.hpp b/include/highfive/h5easy_bits/H5Easy_xtensor.hpp index 9b737f03b..ba27bc84a 100644 --- a/include/highfive/h5easy_bits/H5Easy_xtensor.hpp +++ b/include/highfive/h5easy_bits/H5Easy_xtensor.hpp @@ -20,6 +20,12 @@ namespace detail { template struct io_impl::value>::type> { + inline static void assert_row_major(const File& file, const std::string& path, const T& data) { + if (data.layout() != xt::layout_type::row_major) { + throw detail::error(file, path, "Only row-major XTensor object are supported."); + } + } + inline static std::vector shape(const T& data) { return std::vector(data.shape().cbegin(), data.shape().cend()); } @@ -28,6 +34,7 @@ struct io_impl::value>::type> { const std::string& path, const T& data, const DumpOptions& options) { + assert_row_major(file, path, data); using value_type = typename std::decay_t::value_type; DataSet dataset = initDataset(file, path, shape(data), options); dataset.write_raw(data.data()); @@ -44,6 +51,7 @@ struct io_impl::value>::type> { DataSet dataset = file.getDataSet(path); std::vector dims = dataset.getDimensions(); T data = T::from_shape(dims); + assert_row_major(file, path, data); dataset.read_raw(data.data()); return data; } @@ -53,6 +61,7 @@ struct io_impl::value>::type> { const std::string& key, const T& data, const DumpOptions& options) { + assert_row_major(file, path, data); using value_type = typename std::decay_t::value_type; Attribute attribute = initAttribute(file, path, key, shape(data), options); attribute.write_raw(data.data()); @@ -73,6 +82,7 @@ struct io_impl::value>::type> { DataSpace dataspace = attribute.getSpace(); std::vector dims = dataspace.getDimensions(); T data = T::from_shape(dims); + assert_row_major(file, path, data); attribute.read_raw(data.data()); return data; } diff --git a/tests/unit/tests_high_five_easy.cpp b/tests/unit/tests_high_five_easy.cpp index aa30b4e96..d10ef941b 100644 --- a/tests/unit/tests_high_five_easy.cpp +++ b/tests/unit/tests_high_five_easy.cpp @@ -243,6 +243,44 @@ TEST_CASE("H5Easy_xtensor") { CHECK(xt::all(xt::equal(B, B_r))); } +TEST_CASE("H5Easy_xtensor_column_major") { + H5Easy::File file("h5easy_xtensor_colum_major.h5", H5Easy::File::Overwrite); + + using column_major_t = xt::xtensor; + + xt::xtensor A = 100. * xt::random::randn({20, 5}); + + H5Easy::dump(file, "/path/to/A", A); + + SECTION("Write column major") { + column_major_t B = A; + REQUIRE_THROWS(H5Easy::dump(file, "path/to/B", B)); + } + + SECTION("Read column major") { + REQUIRE_THROWS(H5Easy::load(file, "/path/to/A")); + } +} + +TEST_CASE("H5Easy_xarray_column_major") { + H5Easy::File file("h5easy_xarray_colum_major.h5", H5Easy::File::Overwrite); + + using column_major_t = xt::xarray; + + xt::xarray A = 100. * xt::random::randn({20, 5}); + + H5Easy::dump(file, "/path/to/A", A); + + SECTION("Write column major") { + column_major_t B = A; + REQUIRE_THROWS(H5Easy::dump(file, "path/to/B", B)); + } + + SECTION("Read column major") { + REQUIRE_THROWS(H5Easy::load(file, "/path/to/A")); + } +} + TEST_CASE("H5Easy_xarray") { H5Easy::File file("h5easy_xarray.h5", H5Easy::File::Overwrite); From b77955e51098a8af92b402b1d2d3f4b23e2ac49e Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 6 May 2024 16:23:46 +0200 Subject: [PATCH 74/97] Add `inspector::is_trivially_nestable`. (#986) --- include/highfive/bits/H5Inspector_misc.hpp | 22 +++++++++++++++++----- include/highfive/boost.hpp | 5 ++++- include/highfive/eigen.hpp | 3 ++- 3 files changed, 23 insertions(+), 7 deletions(-) diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 3f69276c4..b69888043 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -132,6 +132,10 @@ inspector { // If this value is false: serialize, unserialize are mandatory static constexpr bool is_trivially_copyable + // Is this type trivially nestable, i.e. is type[n] a contiguous + // array of `base_type[N]`? + static constexpr bool is_trivially_nestable + // Reading: // Allocate the value following dims (should be recursive) static void prepare(type& val, const std::vector dims) @@ -162,6 +166,7 @@ struct type_helper { static constexpr size_t ndim = 0; static constexpr size_t recursive_ndim = ndim; static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value; + static constexpr bool is_trivially_nestable = is_trivially_copyable; static std::vector getDimensions(const type& /* val */) { return {}; @@ -206,6 +211,7 @@ struct inspector: type_helper { using hdf5_type = int8_t; static constexpr bool is_trivially_copyable = false; + static constexpr bool is_trivially_nestable = false; static hdf5_type* data(type& /* val */) { throw DataSpaceException("A boolean cannot be read directly."); @@ -255,6 +261,7 @@ struct inspector: type_helper { using hdf5_type = hobj_ref_t; static constexpr bool is_trivially_copyable = false; + static constexpr bool is_trivially_nestable = false; static hdf5_type* data(type& /* val */) { throw DataSpaceException("A Reference cannot be read directly."); @@ -287,7 +294,8 @@ struct inspector> { static constexpr size_t ndim = 1; static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - inspector::is_trivially_copyable; + inspector::is_trivially_nestable; + static constexpr bool is_trivially_nestable = false; static std::vector getDimensions(const type& val) { std::vector sizes(recursive_ndim, 1ul); @@ -350,6 +358,7 @@ struct inspector> { static constexpr size_t ndim = 1; static constexpr size_t recursive_ndim = ndim; static constexpr bool is_trivially_copyable = false; + static constexpr bool is_trivially_nestable = false; static std::vector getDimensions(const type& val) { std::vector sizes{val.size()}; @@ -396,8 +405,9 @@ struct inspector> { static constexpr size_t ndim = 1; static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - sizeof(type) == N * sizeof(T) && - inspector::is_trivially_copyable; + inspector::is_trivially_nestable; + static constexpr bool is_trivially_nestable = (sizeof(type) == N * sizeof(T)) && + is_trivially_copyable; static std::vector getDimensions(const type& val) { std::vector sizes{N}; @@ -466,7 +476,8 @@ struct inspector { static constexpr size_t ndim = 1; static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - inspector::is_trivially_copyable; + inspector::is_trivially_nestable; + static constexpr bool is_trivially_nestable = false; static std::vector getDimensions(const type& /* val */) { throw DataSpaceException("Not possible to have size of a T*"); @@ -496,7 +507,8 @@ struct inspector { static constexpr size_t ndim = 1; static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - inspector::is_trivially_copyable; + inspector::is_trivially_nestable; + static constexpr bool is_trivially_nestable = is_trivially_copyable; static void prepare(type& val, const std::vector& dims) { if (dims.size() < 1) { diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp index 49e5d7acf..3e42a5b60 100644 --- a/include/highfive/boost.hpp +++ b/include/highfive/boost.hpp @@ -19,7 +19,9 @@ struct inspector> { static constexpr size_t ndim = Dims; static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && - inspector::is_trivially_copyable; + inspector::is_trivially_nestable; + static constexpr bool is_trivially_nestable = false; + static std::vector getDimensions(const type& val) { std::vector sizes; @@ -102,6 +104,7 @@ struct inspector> { static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_copyable; + static constexpr bool is_trivially_nestable = false; static std::vector getDimensions(const type& val) { std::vector sizes{val.size1(), val.size2()}; diff --git a/include/highfive/eigen.hpp b/include/highfive/eigen.hpp index aaad280ef..4a0b293fd 100644 --- a/include/highfive/eigen.hpp +++ b/include/highfive/eigen.hpp @@ -30,7 +30,8 @@ struct eigen_inspector { static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; static constexpr bool is_trivially_copyable = is_row_major() && std::is_trivially_copyable::value && - inspector::is_trivially_copyable; + inspector::is_trivially_nestable; + static constexpr bool is_trivially_nestable = false; static std::vector getDimensions(const type& val) { std::vector sizes{static_cast(val.rows()), static_cast(val.cols())}; From 070badf6935e17bb74c5a9d0e08969f640b4a87b Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 6 May 2024 17:01:12 +0200 Subject: [PATCH 75/97] Add CI for `macos-14`. (#993) --- .github/workflows/ci.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 49f415e82..cb9d92ea0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -293,10 +293,13 @@ jobs: runs-on: ${{matrix.os}} strategy: matrix: - os: [ "macOS-12" ] + os: [ "macOS-14" ] cxxstd: ["14", "17", "20"] include: + - os: "macOS-12" + cxxstd: "14" + - os: "macOS-13" cxxstd: "20" From d3f82e250ee08196ca49a26e0d0490c15c143739 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 7 May 2024 07:20:27 +0200 Subject: [PATCH 76/97] Remove duplicates of unifying test API. (#990) * Remove `ReadWrite{Attribute,DataSet}` helper. There's three copies of unifying API for DataSet and Attribute. This removes one copy. * Remove `ForwardTo{Attribute,DataSet}`. This removes a second copy of the unifying API used for testing `Attribute`s and `DataSet`s. --- tests/unit/tests_high_five_base.cpp | 163 +++++++++------------------- 1 file changed, 54 insertions(+), 109 deletions(-) diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index bc33d5dad..fde643e04 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -27,6 +27,7 @@ #include #include "tests_high_five.hpp" +#include "create_traits.hpp" #ifdef HIGHFIVE_TEST_BOOST #include @@ -1717,28 +1718,6 @@ void check_empty_dimensions(const std::vector& dims) { check_empty_dimensions(input_data, dims); } -struct ReadWriteAttribute { - template - static void create(HighFive::File& file, const std::string& name, const Container& container) { - file.createAttribute(name, container); - } - - static HighFive::Attribute get(HighFive::File& file, const std::string& name) { - return file.getAttribute(name); - } -}; - -struct ReadWriteDataSet { - template - static void create(HighFive::File& file, const std::string& name, const Container& container) { - file.createDataSet(name, container); - } - - static HighFive::DataSet get(HighFive::File& file, const std::string& name) { - return file.getDataSet(name); - } -}; - template void check_empty_read_write_cycle(const std::vector& dims) { using container_type = typename CreateContainer::container_type; @@ -1781,12 +1760,12 @@ void check_empty_read_write_cycle(const std::vector& dims) { template void check_empty_dataset(const std::vector& dims) { - check_empty_read_write_cycle(dims); + check_empty_read_write_cycle(dims); } template void check_empty_attribute(const std::vector& dims) { - check_empty_read_write_cycle(dims); + check_empty_read_write_cycle(dims); } template @@ -2284,56 +2263,8 @@ TEST_CASE("DirectWriteBool") { } -class ForwardToAttribute { - public: - ForwardToAttribute(const HighFive::File& file) - : _file(file) {} - - template - HighFive::Attribute create(const std::string& name, const T& value) { - return _file.createAttribute(name, value); - } - - HighFive::Attribute create(const std::string& name, - const HighFive::DataSpace filespace, - const HighFive::DataType& datatype) { - return _file.createAttribute(name, filespace, datatype); - } - - HighFive::Attribute get(const std::string& name) { - return _file.getAttribute(name); - } - - private: - HighFive::File _file; -}; - -class ForwardToDataSet { - public: - ForwardToDataSet(const HighFive::File& file) - : _file(file) {} - - template - HighFive::DataSet create(const std::string& name, const T& value) { - return _file.createDataSet(name, value); - } - - HighFive::DataSet create(const std::string& name, - const HighFive::DataSpace filespace, - const HighFive::DataType& datatype) { - return _file.createDataSet(name, filespace, datatype); - } - - HighFive::DataSet get(const std::string& name) { - return _file.getDataSet(name); - } - - private: - HighFive::File _file; -}; - -template -void check_single_string(Proxy proxy, size_t string_length) { +template +void check_single_string(File file, size_t string_length) { auto value = std::string(string_length, 'o'); auto dataspace = DataSpace::From(value); @@ -2348,42 +2279,49 @@ void check_single_string(Proxy proxy, size_t string_length) { auto variable_length = VariableLengthStringType(); SECTION("automatic") { - proxy.create("auto", value); - REQUIRE(proxy.get("auto").template read() == value); + auto obj = CreateTraits::create(file, "auto", value); + REQUIRE(obj.template read() == value); } SECTION("fixed length") { - proxy.create("fixed", dataspace, fixed_length).write(value); - REQUIRE(proxy.get("fixed").template read() == value); + auto obj = CreateTraits::create(file, "fixed", dataspace, fixed_length); + obj.write(value); + REQUIRE(obj.template read() == value); } SECTION("overlength null-terminated") { - proxy.create("overlength_nullterm", dataspace, overlength_nullterm).write(value); - REQUIRE(proxy.get("overlength_nullterm").template read() == value); + auto obj = + CreateTraits::create(file, "overlength_nullterm", dataspace, overlength_nullterm); + obj.write(value); + REQUIRE(obj.template read() == value); } SECTION("overlength null-padded") { - proxy.create("overlength_nullpad", dataspace, overlength_nullpad).write(value); + auto obj = CreateTraits::create(file, "overlength_nullpad", dataspace, overlength_nullpad); + obj.write(value); auto expected = std::string(n_chars_overlength, '\0'); expected.replace(0, value.size(), value.data()); - REQUIRE(proxy.get("overlength_nullpad").template read() == expected); + REQUIRE(obj.template read() == expected); } SECTION("overlength space-padded") { - proxy.create("overlength_spacepad", dataspace, overlength_spacepad).write(value); + auto obj = + CreateTraits::create(file, "overlength_spacepad", dataspace, overlength_spacepad); + obj.write(value); auto expected = std::string(n_chars_overlength, ' '); expected.replace(0, value.size(), value.data()); - REQUIRE(proxy.get("overlength_spacepad").template read() == expected); + REQUIRE(obj.template read() == expected); } SECTION("variable length") { - proxy.create("variable", dataspace, variable_length).write(value); - REQUIRE(proxy.get("variable").template read() == value); + auto obj = CreateTraits::create(file, "variable", dataspace, variable_length); + obj.write(value); + REQUIRE(obj.template read() == value); } } -template -void check_multiple_string(Proxy proxy, size_t string_length) { +template +void check_multiple_string(File file, size_t string_length) { using value_t = std::vector; auto value = value_t{std::string(string_length, 'o'), std::string(string_length, 'x')}; @@ -2407,13 +2345,14 @@ void check_multiple_string(Proxy proxy, size_t string_length) { }; SECTION("automatic") { - proxy.create("auto", value); - check(proxy.get("auto").template read(), value); + auto obj = CreateTraits::create(file, "auto", value); + check(obj.template read(), value); } SECTION("variable length") { - proxy.create("variable", dataspace, variable_length).write(value); - check(proxy.get("variable").template read(), value); + auto obj = CreateTraits::create(file, "variable", dataspace, variable_length); + obj.write(value); + check(obj.template read(), value); } auto make_padded_reference = [&](char pad, size_t n) { @@ -2428,22 +2367,25 @@ void check_multiple_string(Proxy proxy, size_t string_length) { auto check_fixed_length = [&](const std::string& label, size_t length) { SECTION(label + " null-terminated") { auto datatype = FixedLengthStringType(length + 1, StringPadding::NullTerminated); - proxy.create(label + "_nullterm", dataspace, datatype).write(value); - check(proxy.get(label + "_nullterm").template read(), value); + auto obj = CreateTraits::create(file, label + "_nullterm", dataspace, datatype); + obj.write(value); + check(obj.template read(), value); } SECTION(label + " null-padded") { auto datatype = FixedLengthStringType(length, StringPadding::NullPadded); - proxy.create(label + "_nullpad", dataspace, datatype).write(value); + auto obj = CreateTraits::create(file, label + "_nullpad", dataspace, datatype); + obj.write(value); auto expected = make_padded_reference('\0', length); - check(proxy.get(label + "_nullpad").template read(), expected); + check(obj.template read(), expected); } SECTION(label + " space-padded") { auto datatype = FixedLengthStringType(length, StringPadding::SpacePadded); - proxy.create(label + "_spacepad", dataspace, datatype).write(value); + auto obj = CreateTraits::create(file, label + "_spacepad", dataspace, datatype); + obj.write(value); auto expected = make_padded_reference(' ', length); - check(proxy.get(label + "_spacepad").template read(), expected); + check(obj.template read(), expected); } }; @@ -2453,58 +2395,61 @@ void check_multiple_string(Proxy proxy, size_t string_length) { SECTION("underlength null-terminated") { auto datatype = FixedLengthStringType(string_length, StringPadding::NullTerminated); - REQUIRE_THROWS(proxy.create("underlength_nullterm", dataspace, datatype).write(value)); + auto obj = CreateTraits::create(file, "underlength_nullterm", dataspace, datatype); + REQUIRE_THROWS(obj.write(value)); } SECTION("underlength nullpad") { auto datatype = FixedLengthStringType(string_length - 1, StringPadding::NullPadded); - REQUIRE_THROWS(proxy.create("underlength_nullpad", dataspace, datatype).write(value)); + auto obj = CreateTraits::create(file, "underlength_nullpad", dataspace, datatype); + REQUIRE_THROWS(obj.write(value)); } SECTION("underlength spacepad") { auto datatype = FixedLengthStringType(string_length - 1, StringPadding::NullTerminated); - REQUIRE_THROWS(proxy.create("underlength_spacepad", dataspace, datatype).write(value)); + auto obj = CreateTraits::create(file, "underlength_spacepad", dataspace, datatype); + REQUIRE_THROWS(obj.write(value)); } } TEST_CASE("HighFiveSTDString (dataset, single, short)") { File file("std_string_dataset_single_short.h5", File::Truncate); - check_single_string(ForwardToDataSet(file), 3); + check_single_string(file, 3); } TEST_CASE("HighFiveSTDString (attribute, single, short)") { File file("std_string_attribute_single_short.h5", File::Truncate); - check_single_string(ForwardToAttribute(file), 3); + check_single_string(file, 3); } TEST_CASE("HighFiveSTDString (dataset, single, long)") { File file("std_string_dataset_single_long.h5", File::Truncate); - check_single_string(ForwardToDataSet(file), 256); + check_single_string(file, 256); } TEST_CASE("HighFiveSTDString (attribute, single, long)") { File file("std_string_attribute_single_long.h5", File::Truncate); - check_single_string(ForwardToAttribute(file), 256); + check_single_string(file, 256); } TEST_CASE("HighFiveSTDString (dataset, multiple, short)") { File file("std_string_dataset_multiple_short.h5", File::Truncate); - check_multiple_string(ForwardToDataSet(file), 3); + check_multiple_string(file, 3); } TEST_CASE("HighFiveSTDString (attribute, multiple, short)") { File file("std_string_attribute_multiple_short.h5", File::Truncate); - check_multiple_string(ForwardToAttribute(file), 3); + check_multiple_string(file, 3); } TEST_CASE("HighFiveSTDString (dataset, multiple, long)") { File file("std_string_dataset_multiple_long.h5", File::Truncate); - check_multiple_string(ForwardToDataSet(file), 256); + check_multiple_string(file, 256); } TEST_CASE("HighFiveSTDString (attribute, multiple, long)") { File file("std_string_attribute_multiple_long.h5", File::Truncate); - check_multiple_string(ForwardToAttribute(file), 256); + check_multiple_string(file, 256); } TEST_CASE("HighFiveFixedString") { From 0a921cbc3d170100f2c3a81f5e22913f5b0c1034 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 7 May 2024 16:12:10 +0200 Subject: [PATCH 77/97] Support `std::span`. (#987) --- CMakeLists.txt | 8 ++ cmake/HighFiveOptionalDependencies.cmake | 8 ++ include/highfive/bits/H5Inspector_misc.hpp | 1 + include/highfive/span.hpp | 90 ++++++++++++++++++++++ src/examples/CMakeLists.txt | 10 +++ src/examples/read_write_std_span.cpp | 56 ++++++++++++++ tests/unit/data_generator.hpp | 51 ++++++++++++ tests/unit/supported_types.hpp | 15 ++++ tests/unit/tests_high_five_base.cpp | 3 + 9 files changed, 242 insertions(+) create mode 100644 include/highfive/span.hpp create mode 100644 src/examples/read_write_std_span.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 7060fe713..a1c5a120c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -47,10 +47,18 @@ else() set(HIGHFIVE_EXTRAS_DEFAULT OFF) endif() +if (CMAKE_CXX_STANDARD GREATER_EQUAL 20) + include(CheckIncludeFileCXX) + CHECK_INCLUDE_FILE_CXX(span HIGHFIVE_TEST_SPAN_DEFAULT) +else() + set(HIGHFIVE_TEST_SPAN_DEFAULT Off) +endif() + option(HIGHFIVE_UNIT_TESTS "Compile unit-tests" ${HIGHFIVE_EXTRAS_DEFAULT}) option(HIGHFIVE_EXAMPLES "Compile examples" ${HIGHFIVE_EXTRAS_DEFAULT}) option(HIGHFIVE_BUILD_DOCS "Build documentation" ${HIGHFIVE_EXTRAS_DEFAULT}) +option(HIGHFIVE_TEST_SPAN "Enable std::span testing, requires C++20" ${HIGHFIVE_TEST_SPAN_DEFAULT}) option(HIGHFIVE_TEST_BOOST "Enable Boost testing" OFF) option(HIGHFIVE_TEST_EIGEN "Enable Eigen testing" OFF) option(HIGHFIVE_TEST_OPENCV "Enable OpenCV testing" OFF) diff --git a/cmake/HighFiveOptionalDependencies.cmake b/cmake/HighFiveOptionalDependencies.cmake index 1b27edd10..861b80641 100644 --- a/cmake/HighFiveOptionalDependencies.cmake +++ b/cmake/HighFiveOptionalDependencies.cmake @@ -37,6 +37,13 @@ if(NOT TARGET HighFiveOpenCVDependency) endif() endif() +if(NOT TARGET HighFiveSpanDependency) + add_library(HighFiveSpanDependency INTERFACE) + if(HIGHFIVE_TEST_SPAN) + target_compile_definitions(HighFiveSpanDependency INTERFACE HIGHFIVE_TEST_SPAN=1) + endif() +endif() + if(NOT TARGET HighFiveOptionalDependencies) add_library(HighFiveOptionalDependencies INTERFACE) target_link_libraries(HighFiveOptionalDependencies INTERFACE @@ -44,5 +51,6 @@ if(NOT TARGET HighFiveOptionalDependencies) HighFiveEigenDependency HighFiveXTensorDependency HighFiveOpenCVDependency + HighFiveSpanDependency ) endif() diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index b69888043..6f52ff821 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -465,6 +465,7 @@ struct inspector> { } }; + // Cannot be use for reading template struct inspector { diff --git a/include/highfive/span.hpp b/include/highfive/span.hpp new file mode 100644 index 000000000..a70cbe081 --- /dev/null +++ b/include/highfive/span.hpp @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2024 Blue Brain Project + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ + +#pragma once + +#include "bits/H5Inspector_decl.hpp" + +#include + +namespace HighFive { +namespace details { + +template +struct inspector> { + using type = std::span; + using value_type = unqualified_t; + using base_type = typename inspector::base_type; + using hdf5_type = typename inspector::hdf5_type; + + static constexpr size_t ndim = 1; + static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && + inspector::is_trivially_nestable; + + static constexpr bool is_trivially_nestable = false; + + static std::vector getDimensions(const type& val) { + std::vector sizes(recursive_ndim, 1ul); + sizes[0] = val.size(); + if (!val.empty()) { + auto s = inspector::getDimensions(val[0]); + assert(s.size() + ndim == sizes.size()); + for (size_t i = 0; i < s.size(); ++i) { + sizes[i + ndim] = s[i]; + } + } + return sizes; + } + + static void prepare(type& val, const std::vector& expected_dims) { + auto actual_dims = getDimensions(val); + if (actual_dims.size() != expected_dims.size()) { + throw DataSpaceException("Mismatching rank."); + } + + for (size_t i = 0; i < actual_dims.size(); ++i) { + if (actual_dims[i] != expected_dims[i]) { + throw DataSpaceException("Mismatching dimensions."); + } + } + } + + static hdf5_type* data(type& val) { + return val.empty() ? nullptr : inspector::data(val[0]); + } + + static const hdf5_type* data(const type& val) { + return val.empty() ? nullptr : inspector::data(val[0]); + } + + template + static void serialize(const type& val, const std::vector& dims, It m) { + if (!val.empty()) { + auto subdims = std::vector(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(subdims); + for (const auto& e: val) { + inspector::serialize(e, subdims, m); + m += subsize; + } + } + } + + template + static void unserialize(const It& vec_align, const std::vector& dims, type& val) { + std::vector subdims(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(subdims); + for (size_t i = 0; i < dims[0]; ++i) { + inspector::unserialize(vec_align + i * subsize, subdims, val[i]); + } + } +}; + +} // namespace details +} // namespace HighFive diff --git a/src/examples/CMakeLists.txt b/src/examples/CMakeLists.txt index 5a1384557..ccc662185 100644 --- a/src/examples/CMakeLists.txt +++ b/src/examples/CMakeLists.txt @@ -18,6 +18,10 @@ set(core_examples ${CMAKE_CURRENT_SOURCE_DIR}/select_partial_dataset_cpp11.cpp ) +set(span_examples + ${CMAKE_CURRENT_SOURCE_DIR}/read_write_std_span.cpp +) + set(easy_examples ${CMAKE_CURRENT_SOURCE_DIR}/easy_attribute.cpp ${CMAKE_CURRENT_SOURCE_DIR}/easy_dumpoptions.cpp @@ -70,6 +74,12 @@ foreach(example_source ${easy_examples}) compile_example(${example_source}) endforeach() +if(HIGHFIVE_TEST_SPAN) + foreach(example_source ${span_examples}) + compile_example(${example_source}) + endforeach() +endif() + if(HIGHFIVE_TEST_BOOST) foreach(example_source ${boost_examples}) compile_example(${example_source} HighFiveBoostDependency) diff --git a/src/examples/read_write_std_span.cpp b/src/examples/read_write_std_span.cpp new file mode 100644 index 000000000..72465c46d --- /dev/null +++ b/src/examples/read_write_std_span.cpp @@ -0,0 +1,56 @@ +/* + * Copyright (c), 2024, Blue Brain Project + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ + +// This example demonstrates using `std::span`. An `std::span` is a pointer +// with a size. + +#include +#include + +#include + +#include + +int main(void) { + using namespace HighFive; + + std::string file_name = "read_write_span.h5"; + std::string dataset_name = "array"; + + File file(file_name, File::Truncate); + + // Let's write to file. + { + // Assume we have one-dimensional data in some unsupported format (we + // use `std::vector` for simplicity). Further, assume that the data is + // stored contiguously. Then one can create an `std::span`. + std::vector values{1.0, 2.0, 3.0}; + auto view = std::span(values.data(), values.size()); + + // Given the span, HighFive can deduce the shape of the dataset. Hence, + // spans are fully supported when writing. For example: + auto dataset = file.createDataSet(dataset_name, view); + } + + // Let's read from file. + { + auto dataset = file.getDataSet(dataset_name); + + // Since spans are views, HighFive can't (or wont) allocate memory. + // Instead one must preallocate memory and then create a span for that + // memory: + auto values = std::vector(dataset.getElementCount()); + auto view = std::span(values.data(), values.size()); + + // ... now we can read into the preallocated memory: + dataset.read(view); + } + + return 0; +} diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp index 2964bf9fd..5b1f095ca 100644 --- a/tests/unit/data_generator.hpp +++ b/tests/unit/data_generator.hpp @@ -17,6 +17,10 @@ #include #endif +#ifdef HIGHFIVE_TEST_SPAN +#include +#endif + namespace HighFive { namespace testing { @@ -221,6 +225,53 @@ struct ContainerTraits>: public STLLikeContainerTraits +struct ContainerTraits>: public STLLikeContainerTraits> { + private: + using super = STLLikeContainerTraits>; + + public: + using container_type = typename super::container_type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + + static constexpr bool is_view = true; + + static container_type allocate(const std::vector& dims) { + size_t n_elements = dims[0]; + value_type* ptr = new value_type[n_elements]; + + container_type array = container_type(ptr, n_elements); + + for (size_t i = 0; i < n_elements; ++i) { + auto element = ContainerTraits::allocate(lstrip(dims, 1)); + ContainerTraits::assign(array[i], element); + } + + return array; + } + + static void deallocate(container_type& array, const std::vector& dims) { + size_t n_elements = dims[0]; + for (size_t i = 0; i < n_elements; ++i) { + ContainerTraits::deallocate(array[i], lstrip(dims, 1)); + } + + delete[] array.data(); + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + if (Extent != std::dynamic_extent) { + dims[axis] = Extent; + ContainerTraits::sanitize_dims(dims, axis + 1); + } + } +}; +#endif + + // -- Boost ------------------------------------------------------------------- #ifdef HIGHFIVE_TEST_BOOST template diff --git a/tests/unit/supported_types.hpp b/tests/unit/supported_types.hpp index 75e442c60..4d703949d 100644 --- a/tests/unit/supported_types.hpp +++ b/tests/unit/supported_types.hpp @@ -34,6 +34,14 @@ struct STDArray { using type = std::array, n>; }; +#ifdef HIGHFIVE_TEST_SPAN +template +struct STDSpan { + template + using type = std::span>; +}; +#endif + #ifdef HIGHFIVE_TEST_BOOST template struct BoostMultiArray { @@ -150,6 +158,13 @@ using supported_array_types = typename ConcatenateTuples< typename ContainerProduct>, scalar_types_eigen>::type, typename ContainerProduct>, scalar_types_eigen>::type, std::tuple>, +#endif +#ifdef HIGHFIVE_TEST_SPAN + typename ContainerProduct, all_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type, + typename ContainerProduct>, some_scalar_types>::type, #endif typename ContainerProduct, all_scalar_types>::type, typename ContainerProduct>, some_scalar_types>::type, diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index fde643e04..965047430 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -37,6 +37,9 @@ #include #endif +#ifdef HIGHFIVE_TEST_SPAN +#include +#endif using namespace HighFive; using Catch::Matchers::Equals; From 18b46feac5e31f5f15201146461ff172f124c4ee Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 7 May 2024 17:54:50 +0200 Subject: [PATCH 78/97] Fix placement of `CONFIGURE_DEPENDS`. (#994) * Fix placement of `CONFIGURE_DEPENDS`. The location of `CONFIGURE_DEPENDS` is incorrect and leads to CMake silently creating an empty list. * Fix minor breakage due to not running these tests. --- include/highfive/span.hpp | 1 + tests/unit/CMakeLists.txt | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/include/highfive/span.hpp b/include/highfive/span.hpp index a70cbe081..1eca4a51b 100644 --- a/include/highfive/span.hpp +++ b/include/highfive/span.hpp @@ -10,6 +10,7 @@ #pragma once #include "bits/H5Inspector_decl.hpp" +#include "H5Exception.hpp" #include diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index c5a07e8e8..ab7c65749 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -45,8 +45,12 @@ endif() option(HIGHFIVE_TEST_SINGLE_INCLUDES "Enable testing single includes" FALSE) if(HIGHFIVE_TEST_SINGLE_INCLUDES) - file(GLOB CONFIGURE_DEPENDS public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) + file(GLOB public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include CONFIGURE_DEPENDS ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) foreach(PUBLIC_HEADER ${public_headers}) + if(PUBLIC_HEADER STREQUAL "highfive/span.hpp" AND NOT HIGHFIVE_TEST_SPAN) + continue() + endif() + if(PUBLIC_HEADER STREQUAL "highfive/boost.hpp" AND NOT HIGHFIVE_TEST_BOOST) continue() endif() @@ -62,6 +66,12 @@ if(HIGHFIVE_TEST_SINGLE_INCLUDES) get_filename_component(CLASS_NAME ${PUBLIC_HEADER} NAME_WE) configure_file(tests_import_public_headers.cpp "tests_${CLASS_NAME}.cpp" @ONLY) add_executable("tests_include_${CLASS_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/tests_${CLASS_NAME}.cpp") - target_link_libraries("tests_include_${CLASS_NAME}" HighFive HighFiveWarnings HighFiveFlags) + target_link_libraries( + "tests_include_${CLASS_NAME}" PUBLIC + HighFive + HighFiveWarnings + HighFiveFlags + HighFiveOptionalDependencies + ) endforeach() endif() From d79b5acc758cc9c5e1733afc88d3b3ca66316150 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 7 May 2024 18:03:27 +0200 Subject: [PATCH 79/97] Implement `squeeze` and `reshape`. (#991) These methods allow reshaping the memory space. This can be used as an alternative to broadcasting. Example: Let `dset` be a dataset with shape `[3, 1, 1]`. Then, dset.reshapeMemSpace({3}).read>(); dset.squeezeMemSpace({1, 2}).read>(); can be used to read into a one-dimensional dataset. * Move `detail::compute_total_size` to separate file. --- include/highfive/H5Attribute.hpp | 33 +++++++++++- include/highfive/bits/H5Attribute_misc.hpp | 27 +++++++++- include/highfive/bits/H5Inspector_decl.hpp | 10 +--- include/highfive/bits/H5Slice_traits.hpp | 22 ++++++++ include/highfive/bits/H5Slice_traits_misc.hpp | 38 +++++++++++++ include/highfive/bits/compute_total_size.hpp | 14 +++++ include/highfive/bits/squeeze.hpp | 54 +++++++++++++++++++ src/examples/CMakeLists.txt | 1 + src/examples/broadcasting_arrays.cpp | 51 ++++++++++++++++++ tests/unit/tests_high_five_base.cpp | 50 +++++++++++++++-- 10 files changed, 283 insertions(+), 17 deletions(-) create mode 100644 include/highfive/bits/compute_total_size.hpp create mode 100644 include/highfive/bits/squeeze.hpp create mode 100644 src/examples/broadcasting_arrays.cpp diff --git a/include/highfive/H5Attribute.hpp b/include/highfive/H5Attribute.hpp index c34f9e49f..eb7893ce8 100644 --- a/include/highfive/H5Attribute.hpp +++ b/include/highfive/H5Attribute.hpp @@ -13,6 +13,7 @@ #include #include "H5DataType.hpp" +#include "H5DataSpace.hpp" #include "H5Object.hpp" #include "bits/H5Friends.hpp" #include "bits/H5Path_traits.hpp" @@ -78,8 +79,12 @@ class Attribute: public Object, public PathTraits { /// \since 1.0 DataSpace getSpace() const; - /// \brief Get the DataSpace of the current Attribute. - /// \note This is an alias of getSpace(). + /// \brief Get the memory DataSpace of the current Attribute. + /// + /// HDF5 attributes don't support selections. Therefore, there's no need + /// for a memory dataspace. However, HighFive supports allocating arrays + /// and checking dimensions, this requires the dimensions of the memspace. + /// /// \since 1.0 DataSpace getMemSpace() const; @@ -245,10 +250,34 @@ class Attribute: public Object, public PathTraits { // No empty attributes Attribute() = delete; + /// + /// \brief Return an `Attribute` with `axes` squeezed from the memspace. + /// + /// Returns an `Attribute` in which the memspace has been modified + /// to not include the axes listed in `axes`. + /// + /// Throws if any axis to be squeezes has a dimension other than `1`. + /// + /// \since 3.0 + Attribute squeezeMemSpace(const std::vector& axes) const; + + /// + /// \brief Return a `Attribute` with a simple memspace with `dims`. + /// + /// Returns a `Attribute` in which the memspace has been modified + /// to be a simple dataspace with dimensions `dims`. + /// + /// Throws if the number of elements changes. + /// + /// \since 3.0 + Attribute reshapeMemSpace(const std::vector& dims) const; + protected: using Object::Object; private: + DataSpace _mem_space; + #if HIGHFIVE_HAS_FRIEND_DECLARATIONS template friend class ::HighFive::AnnotateTraits; diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 19eceb49f..e626d823d 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -8,6 +8,7 @@ */ #pragma once +#include #include #include #include @@ -22,6 +23,7 @@ #include "H5Utils.hpp" #include "h5a_wrapper.hpp" #include "h5d_wrapper.hpp" +#include "squeeze.hpp" namespace HighFive { @@ -51,7 +53,7 @@ inline DataSpace Attribute::getSpace() const { } inline DataSpace Attribute::getMemSpace() const { - return getSpace(); + return _mem_space.getId() == H5I_INVALID_HID ? getSpace() : _mem_space; } template @@ -160,4 +162,27 @@ inline void Attribute::write_raw(const T* buffer) { write_raw(buffer, mem_datatype); } +inline Attribute Attribute::squeezeMemSpace(const std::vector& axes) const { + auto mem_dims = this->getMemSpace().getDimensions(); + auto squeezed_dims = detail::squeeze(mem_dims, axes); + + auto attr = *this; + attr._mem_space = DataSpace(mem_dims); + return attr; +} + +inline Attribute Attribute::reshapeMemSpace(const std::vector& new_dims) const { + auto n_elements_old = this->getMemSpace().getElementCount(); + auto n_elements_new = compute_total_size(new_dims); + if (n_elements_old != n_elements_new) { + throw Exception("Invalid parameter `new_dims` number of elements differ: " + + std::to_string(n_elements_old) + " (old) vs. " + + std::to_string(n_elements_new) + " (new)"); + } + + auto attr = *this; + attr._mem_space = DataSpace(new_dims); + return attr; +} + } // namespace HighFive diff --git a/include/highfive/bits/H5Inspector_decl.hpp b/include/highfive/bits/H5Inspector_decl.hpp index 434545a60..24b547e21 100644 --- a/include/highfive/bits/H5Inspector_decl.hpp +++ b/include/highfive/bits/H5Inspector_decl.hpp @@ -1,20 +1,12 @@ #pragma once -#include -#include -#include -#include +#include "compute_total_size.hpp" namespace HighFive { -inline size_t compute_total_size(const std::vector& dims) { - return std::accumulate(dims.begin(), dims.end(), size_t{1u}, std::multiplies()); -} - template using unqualified_t = typename std::remove_const::type>::type; - namespace details { template diff --git a/include/highfive/bits/H5Slice_traits.hpp b/include/highfive/bits/H5Slice_traits.hpp index fd8c31d27..4d40b7797 100644 --- a/include/highfive/bits/H5Slice_traits.hpp +++ b/include/highfive/bits/H5Slice_traits.hpp @@ -368,6 +368,28 @@ class SliceTraits { /// template void write_raw(const T* buffer, const DataTransferProps& xfer_props = DataTransferProps()); + + /// + /// \brief Return a `Selection` with `axes` squeezed from the memspace. + /// + /// Returns a selection in which the memspace has been modified + /// to not include the axes listed in `axes`. + /// + /// Throws if any axis to be squeezes has a dimension other than `1`. + /// + /// \since 3.0 + Selection squeezeMemSpace(const std::vector& axes) const; + + /// + /// \brief Return a `Selection` with a simple memspace with `dims`. + /// + /// Returns a selection in which the memspace has been modified + /// to be a simple dataspace with dimensions `dims`. + /// + /// Throws if the number of elements changes. + /// + /// \since 3.0 + Selection reshapeMemSpace(const std::vector& dims) const; }; } // namespace HighFive diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 2ae6640b0..9983238d2 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -20,6 +20,8 @@ #include "H5ReadWrite_misc.hpp" #include "H5Converter_misc.hpp" +#include "squeeze.hpp" +#include "compute_total_size.hpp" namespace HighFive { @@ -288,5 +290,41 @@ inline void SliceTraits::write_raw(const T* buffer, const DataTransfer write_raw(buffer, mem_datatype, xfer_props); } +namespace detail { +inline const DataSet& getDataSet(const Selection& selection) { + return selection.getDataset(); +} + +inline const DataSet& getDataSet(const DataSet& dataset) { + return dataset; +} + +} // namespace detail + +template +inline Selection SliceTraits::squeezeMemSpace(const std::vector& axes) const { + auto slice = static_cast(*this); + auto mem_dims = slice.getMemSpace().getDimensions(); + auto squeezed_dims = detail::squeeze(mem_dims, axes); + + return detail::make_selection(DataSpace(squeezed_dims), + slice.getSpace(), + detail::getDataSet(slice)); +} + +template +inline Selection SliceTraits::reshapeMemSpace(const std::vector& new_dims) const { + auto slice = static_cast(*this); + + auto n_elements_old = slice.getMemSpace().getElementCount(); + auto n_elements_new = compute_total_size(new_dims); + if (n_elements_old != n_elements_new) { + throw Exception("Invalid parameter `new_dims` number of elements differ: " + + std::to_string(n_elements_old) + " (old) vs. " + + std::to_string(n_elements_new) + " (new)"); + } + + return detail::make_selection(DataSpace(new_dims), slice.getSpace(), detail::getDataSet(slice)); +} } // namespace HighFive diff --git a/include/highfive/bits/compute_total_size.hpp b/include/highfive/bits/compute_total_size.hpp new file mode 100644 index 000000000..5be8a5999 --- /dev/null +++ b/include/highfive/bits/compute_total_size.hpp @@ -0,0 +1,14 @@ +#pragma once + +#include +#include +#include +#include + +namespace HighFive { + +inline size_t compute_total_size(const std::vector& dims) { + return std::accumulate(dims.begin(), dims.end(), size_t{1u}, std::multiplies()); +} + +} // namespace HighFive diff --git a/include/highfive/bits/squeeze.hpp b/include/highfive/bits/squeeze.hpp new file mode 100644 index 000000000..4be610e34 --- /dev/null +++ b/include/highfive/bits/squeeze.hpp @@ -0,0 +1,54 @@ +/* + * Copyright (c), 2024, BlueBrain Project, EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#pragma once + +#include +#include "../H5Exception.hpp" + +namespace HighFive { +namespace detail { + +/// \brief Squeeze `axes` from `dims`. +/// +/// An axis can only be squeezed if it's dimension is `1`. The elements of +/// `axes` must be in the range `0, ..., dims.size()` (exclusive) and don't +/// have to be sorted. +/// +/// Example: +/// squeeze({1, 3, 2, 1}, {0, 3}) == {3, 2} +inline std::vector squeeze(const std::vector& dims, + const std::vector& axes) { + auto n_dims = dims.size(); + auto mask = std::vector(n_dims, false); + for (size_t i = 0; i < axes.size(); ++i) { + if (axes[i] >= n_dims) { + throw Exception("Out of range: axes[" + std::to_string(i) + + "] == " + std::to_string(axes[i]) + " >= " + std::to_string(n_dims)); + } + + mask[axes[i]] = true; + } + + auto squeezed_dims = std::vector{}; + for (size_t i = 0; i < n_dims; ++i) { + if (!mask[i]) { + squeezed_dims.push_back(dims[i]); + } else { + if (dims[i] != 1) { + throw Exception("Squeezing non-unity axis: axes[" + std::to_string(i) + + "] = " + std::to_string(axes[i])); + } + } + } + + return squeezed_dims; +} + +} // namespace detail +} // namespace HighFive diff --git a/src/examples/CMakeLists.txt b/src/examples/CMakeLists.txt index ccc662185..778aac91f 100644 --- a/src/examples/CMakeLists.txt +++ b/src/examples/CMakeLists.txt @@ -1,5 +1,6 @@ set(core_examples ${CMAKE_CURRENT_SOURCE_DIR}/compound_types.cpp + ${CMAKE_CURRENT_SOURCE_DIR}/broadcasting_arrays.cpp ${CMAKE_CURRENT_SOURCE_DIR}/create_attribute_string_integer.cpp ${CMAKE_CURRENT_SOURCE_DIR}/create_dataset_double.cpp ${CMAKE_CURRENT_SOURCE_DIR}/create_datatype.cpp diff --git a/src/examples/broadcasting_arrays.cpp b/src/examples/broadcasting_arrays.cpp new file mode 100644 index 000000000..9c1cf5a24 --- /dev/null +++ b/src/examples/broadcasting_arrays.cpp @@ -0,0 +1,51 @@ +#include + +// This example explains how to read a dataset with some shape into an array of +// some other shape. Naturally, this only makes sense if the number of elements +// doesn't change. +// +// Note that due to how HDF5 works, writing from one shape into some other +// shape is expected to work automatically. +// +// Same is true for reading. However, HighFive also allocates memory, the array +// into which the data is read is forced to have the same shape as the +// memspace. When performing selections it can often happen that one selects a +// one-dimensional slice from a higher dimensional array. In this case we want +// to be able to read into a one dimensional array, e.g. `std::vector`. +// +// Broadcasting is a common technique for hiding benign differences in +// dimensionality. In HighFive we suggest to either "squeeze" or "reshape" the +// memspace, rather than broadcasting. This example demonstrates the required +// syntax. +// +// Note: These techniques can also be used for general hyperslabs which the +// user knows are in fact hypercubes, i.e. regular. +// +// Note: HighFive v2 has support for broadcasting; but because it's quirky, +// less powerful than the demonstrated technique, relied on a compile-time +// constant rank and is quite complex to maintain, the functionality was +// removed from v3. + +using namespace HighFive; + +int main(void) { + File file("broadcasting_arrays.h5", File::Truncate); + + std::vector dims{3, 1}; + std::vector values{1.0, 2.0, 3.0}; + + auto dset = file.createDataSet("dset", DataSpace(dims), create_datatype()); + + // Note that even though `values` is one-dimensional, we can still write it + // to an array of dimensions `[3, 1]`. Only the number of elements needs to + // match. + dset.write(values); + + // When reading, (re-)allocation might occur. The shape to be allocated is + // the dimensions of the memspace. Therefore, one might want to either remove + // an axis: + dset.squeezeMemSpace({1}).read(values); + + // or reshape the memspace: + dset.reshapeMemSpace({3}).read(values); +} diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 965047430..8d2f021aa 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -1638,6 +1638,45 @@ TEST_CASE("ReadInBroadcastDims") { } } +TEST_CASE("squeeze") { + CHECK(detail::squeeze({}, {}) == std::vector{}); + CHECK(detail::squeeze({3, 1, 1}, {}) == std::vector{3, 1, 1}); + CHECK(detail::squeeze({3, 1, 1}, {2, 1}) == std::vector{3}); + CHECK(detail::squeeze({1, 3, 1, 2}, {2, 0}) == std::vector{3, 2}); + + CHECK_THROWS(detail::squeeze({3, 1, 1}, {3})); + CHECK_THROWS(detail::squeeze({3, 1, 1}, {0})); + CHECK_THROWS(detail::squeeze({}, {0})); +} + +TEST_CASE("SqueezeMemSpace") { + const std::string file_name("h5_squeeze_memspace.h5"); + const std::string dataset_name("dset"); + + File file(file_name, File::Truncate); + + auto expected_values = std::vector{1.0, 2.0, 3.0}; + auto values = std::vector>{expected_values}; + + auto dset = file.createDataSet(dataset_name, values); + SECTION("squeeze") { + auto actual_values = dset.squeezeMemSpace({0}).read>(); + + REQUIRE(actual_values.size() == expected_values.size()); + for (size_t i = 0; i < actual_values.size(); ++i) { + REQUIRE(actual_values[i] == expected_values[i]); + } + } + + SECTION("reshape") { + auto actual_values = dset.reshapeMemSpace({3}).read>(); + + REQUIRE(actual_values.size() == expected_values.size()); + for (size_t i = 0; i < actual_values.size(); ++i) { + REQUIRE(actual_values[i] == expected_values[i]); + } + } +} template struct CreateEmptyVector; @@ -1735,13 +1774,13 @@ void check_empty_read_write_cycle(const std::vector& dims) { SECTION("read; one-dimensional vector (empty)") { auto output_data = CreateEmptyVector<1>::create({0ul}); - ReadWriteInterface::get(file, dataset_name).read(output_data); + ReadWriteInterface::get(file, dataset_name).reshapeMemSpace({0ul}).read(output_data); check_empty_dimensions(output_data, {0ul}); } SECTION("read; pre-allocated (empty)") { auto output_data = CreateContainer::create(dims); - ReadWriteInterface::get(file, dataset_name).read(output_data); + ReadWriteInterface::get(file, dataset_name).reshapeMemSpace(dims).read(output_data); check_empty_dimensions(output_data, dims); } @@ -1749,14 +1788,15 @@ void check_empty_read_write_cycle(const std::vector& dims) { SECTION("read; pre-allocated (oversized)") { auto oversize_dims = std::vector(dims.size(), 2ul); auto output_data = CreateContainer::create(oversize_dims); - ReadWriteInterface::get(file, dataset_name).read(output_data); + ReadWriteInterface::get(file, dataset_name).reshapeMemSpace(dims).read(output_data); check_empty_dimensions(output_data, dims); } SECTION("read; auto-allocated") { - auto output_data = - ReadWriteInterface::get(file, dataset_name).template read(); + auto output_data = ReadWriteInterface::get(file, dataset_name) + .reshapeMemSpace(dims) + .template read(); check_empty_dimensions(output_data, dims); } } From 5afd83426605da51c25c80e34ec3fb3067ece32c Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Wed, 8 May 2024 11:44:19 +0200 Subject: [PATCH 80/97] Fix bug in Attribute::squeezeMemSpace. (#995) --- include/highfive/bits/H5Attribute_misc.hpp | 2 +- tests/unit/tests_high_five_base.cpp | 25 +++++++++++++++------- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index e626d823d..98b16ec1b 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -167,7 +167,7 @@ inline Attribute Attribute::squeezeMemSpace(const std::vector& axes) con auto squeezed_dims = detail::squeeze(mem_dims, axes); auto attr = *this; - attr._mem_space = DataSpace(mem_dims); + attr._mem_space = DataSpace(squeezed_dims); return attr; } diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 8d2f021aa..16da4bf80 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -1649,18 +1649,16 @@ TEST_CASE("squeeze") { CHECK_THROWS(detail::squeeze({}, {0})); } -TEST_CASE("SqueezeMemSpace") { - const std::string file_name("h5_squeeze_memspace.h5"); - const std::string dataset_name("dset"); - - File file(file_name, File::Truncate); +template +void check_modify_mem_space(File& file) { + const std::string name = "dset"; auto expected_values = std::vector{1.0, 2.0, 3.0}; auto values = std::vector>{expected_values}; - auto dset = file.createDataSet(dataset_name, values); + auto obj = CreateTraits::create(file, name, values); SECTION("squeeze") { - auto actual_values = dset.squeezeMemSpace({0}).read>(); + auto actual_values = obj.squeezeMemSpace({0}).template read>(); REQUIRE(actual_values.size() == expected_values.size()); for (size_t i = 0; i < actual_values.size(); ++i) { @@ -1669,7 +1667,7 @@ TEST_CASE("SqueezeMemSpace") { } SECTION("reshape") { - auto actual_values = dset.reshapeMemSpace({3}).read>(); + auto actual_values = obj.reshapeMemSpace({3}).template read>(); REQUIRE(actual_values.size() == expected_values.size()); for (size_t i = 0; i < actual_values.size(); ++i) { @@ -1678,6 +1676,17 @@ TEST_CASE("SqueezeMemSpace") { } } +TEST_CASE("Modify Mem Space, attr") { + File file("h5_modify_memspace_dset.h5", File::Truncate); + check_modify_mem_space(file); +} + +TEST_CASE("Modify Mem Space, dset") { + File file("h5_modify_memspace_attr.h5", File::Truncate); + check_modify_mem_space(file); +} + + template struct CreateEmptyVector; From 259e1ba7cbf3550a2a8c3f5fc37525508d4969f6 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 10 May 2024 08:47:21 +0200 Subject: [PATCH 81/97] Document change for `T**` in Migration Guide. (#996) --- doc/migration_guide.md | 46 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/doc/migration_guide.md b/doc/migration_guide.md index 2ffe9e257..0972c7e05 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -15,7 +15,6 @@ replaced with an `std::vector` (for example). If desired one can silence warnings by replacing `FixedLenStringArray` with `deprecated::FixedLenStringArray`. - ## Deprecation of `read(T*, ...)`. A "raw read" is when the user allocates sufficient bytes and provides HighFive with the pointer to the first byte. "Regular reads" take a detour via the @@ -40,7 +39,7 @@ dset.read(x); which is fine because is a contiguous sequence of doubles. It's equivalent to following `v3` code: ``` -double x[2][3]; +double x[n][m]; dset.read_raw((double*) x); ``` @@ -48,11 +47,11 @@ dset.read_raw((double*) x); We consider the example above to be accidentally using a raw read, when it could be performing a regular read. We suggest to not change the above, i.e. ``` -double x[2][3]; +double x[n][m]; dset.read(x); ``` continues to be correct in `v3` and can check that the dimensions match. The -inspector recognizes `double[2][3]` as a contiguous array of doubles. +inspector recognizes `double[n][m]` as a contiguous array of doubles. Therefore, it'll use the shallow-copy buffer and avoid the any additional allocations or copies. @@ -61,11 +60,48 @@ When genuinely performing a "raw read", one must replace `read` with `read_raw`. For example: ``` -double* x = malloc(2*3 * sizeof(double)); +double* x = malloc(n*m * sizeof(double)); dset.read_raw(x); ``` is correct in `v3`. +## Change for `T**`, `T***`, etc. +*The immediately preceding section is likely relevant.* + +In `v2` raw pointers could be used to indicate dimensionality. For example: +``` +double* x = malloc(n*m * sizeof(double)); +auto dset = file.createDataSet("foo", DataSpace({n, m}), ...); + +dset.write((double**) x); +dset.read((double**) x); +``` +was valid and would write the flat array `x` into the two-dimensional dataset +`"foo"`. This must be modernized as follows: +``` +double* x = malloc(n*m * sizeof(double)); +auto dset = file.createDataSet("foo", DataSpace({n, m}), ...); + +dset.write_raw(x); +dset.read_raw(x); +``` + +In `v3` the type `T**` will refer a pointer to a pointer (as usual). The +following: +``` +size_t n = 2, m = 3; +double** x = malloc(n * sizeof(double*)); +for(size_t i = 0; i < n; ++i) { + x[i] = malloc(m * sizeof(double)); +} + +auto dset = file.createDataSet("foo", DataSpace({n, m}), ...); +dset.write(x); +dset.read(x); +``` +is correct in `v3` but would probably segfault in `v2`. + + ## Reworked CMake In `v3` we completely rewrote the CMake code of HighFive. Since HighFive is a header only library, it needs to perform two tasks: From 12064079d9533c0636310f25606571b3dbb977cf Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 10 May 2024 09:29:40 +0200 Subject: [PATCH 82/97] Improve `noexcept` usage. (#997) * Make `File::getName()` not noexcept. It's likely not `noexcept`. Therefore, it would terminate when it encounters an issue. * Make `Selection::get*` not `noexcept`. --- include/highfive/H5File.hpp | 2 +- include/highfive/H5Selection.hpp | 8 ++++---- include/highfive/bits/H5File_misc.hpp | 2 +- include/highfive/bits/H5Selection_misc.hpp | 8 ++++---- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/include/highfive/H5File.hpp b/include/highfive/H5File.hpp index a8db5f2a1..b134aaa49 100644 --- a/include/highfive/H5File.hpp +++ b/include/highfive/H5File.hpp @@ -70,7 +70,7 @@ class File: public Object, public NodeTraits, public AnnotateTraits /// /// \brief Return the name of the file /// - const std::string& getName() const noexcept; + const std::string& getName() const; /// \brief Object path of a File is always "/" diff --git a/include/highfive/H5Selection.hpp b/include/highfive/H5Selection.hpp index c00c66d52..27681fe7a 100644 --- a/include/highfive/H5Selection.hpp +++ b/include/highfive/H5Selection.hpp @@ -30,21 +30,21 @@ class Selection: public SliceTraits { /// \brief getSpace /// \return Dataspace associated with this selection /// - DataSpace getSpace() const noexcept; + DataSpace getSpace() const; /// /// \brief getMemSpace /// \return Dataspace associated with the memory representation of this /// selection /// - DataSpace getMemSpace() const noexcept; + DataSpace getMemSpace() const; /// /// \brief getDataSet /// \return parent dataset of this selection /// - DataSet& getDataset() noexcept; - const DataSet& getDataset() const noexcept; + DataSet& getDataset(); + const DataSet& getDataset() const; /// /// \brief return the datatype of the selection diff --git a/include/highfive/bits/H5File_misc.hpp b/include/highfive/bits/H5File_misc.hpp index 52ae59516..6013953b1 100644 --- a/include/highfive/bits/H5File_misc.hpp +++ b/include/highfive/bits/H5File_misc.hpp @@ -82,7 +82,7 @@ inline File::File(const std::string& filename, _hid = detail::h5f_create(filename.c_str(), createMode, fcpl, fapl); } -inline const std::string& File::getName() const noexcept { +inline const std::string& File::getName() const { if (_filename.empty()) { _filename = details::get_name([this](char* buffer, size_t length) { return detail::h5f_get_name(getId(), buffer, length); diff --git a/include/highfive/bits/H5Selection_misc.hpp b/include/highfive/bits/H5Selection_misc.hpp index c35b7bbf3..d1c14e930 100644 --- a/include/highfive/bits/H5Selection_misc.hpp +++ b/include/highfive/bits/H5Selection_misc.hpp @@ -17,19 +17,19 @@ inline Selection::Selection(const DataSpace& memspace, , _file_space(file_space) , _set(set) {} -inline DataSpace Selection::getSpace() const noexcept { +inline DataSpace Selection::getSpace() const { return _file_space; } -inline DataSpace Selection::getMemSpace() const noexcept { +inline DataSpace Selection::getMemSpace() const { return _mem_space; } -inline DataSet& Selection::getDataset() noexcept { +inline DataSet& Selection::getDataset() { return _set; } -inline const DataSet& Selection::getDataset() const noexcept { +inline const DataSet& Selection::getDataset() const { return _set; } From abf4c697bec4537bb23c92010acb73d572c5286e Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 13 May 2024 13:25:51 +0200 Subject: [PATCH 83/97] Move test code: strings and empty arrays. (#999) --- doc/developer_guide.md | 5 +- tests/unit/CMakeLists.txt | 2 +- tests/unit/test_empty_arrays.cpp | 248 +++++++++++++ tests/unit/test_string.cpp | 351 ++++++++++++++++++ tests/unit/tests_high_five_base.cpp | 546 ---------------------------- 5 files changed, 604 insertions(+), 548 deletions(-) create mode 100644 tests/unit/test_empty_arrays.cpp create mode 100644 tests/unit/test_string.cpp diff --git a/doc/developer_guide.md b/doc/developer_guide.md index 13e360fc3..90867ca12 100644 --- a/doc/developer_guide.md +++ b/doc/developer_guide.md @@ -195,6 +195,9 @@ Write-read cycles for scalar values should be implemented in Unit-tests related to checking that `DataType` API, go in `tests/unit/tests_high_data_type.cpp`. +#### Empty Arrays +Check related to empty arrays to in `tests/unit/test_empty_arrays.cpp`. + #### Selections Anything selection related goes in `tests/unit/test_high_five_selection.cpp`. This includes things like `ElementSet` and `HyperSlab`. @@ -204,7 +207,7 @@ Regular write-read cycles for strings are performed along with the other types, see above. This should cover compatibility of `std::string` with all containers. However, additional testing is required, e.g. character set, padding, fixed vs. variable length. These all go in -`tests/unit/test_high_five_string.cpp`. +`tests/unit/test_string.cpp`. #### Specific Tests For Optional Containers If containers, e.g. `Eigen::Matrix` require special checks those go in files diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index ab7c65749..980fe077a 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -6,7 +6,7 @@ if(MSVC) endif() ## Base tests -foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_legacy) +foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_empty_arrays test_legacy test_string) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings HighFiveFlags Catch2::Catch2WithMain) target_link_libraries(${test_name} HighFiveOptionalDependencies) diff --git a/tests/unit/test_empty_arrays.cpp b/tests/unit/test_empty_arrays.cpp new file mode 100644 index 000000000..ea447ffaf --- /dev/null +++ b/tests/unit/test_empty_arrays.cpp @@ -0,0 +1,248 @@ +/* + * Copyright (c), 2017-2024, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#include +#include + +#include +#include +#include + +#include +#include "tests_high_five.hpp" +#include "create_traits.hpp" + +#ifdef HIGHFIVE_TEST_BOOST +#include +#endif + +#ifdef HIGHFIVE_TEST_EIGEN +#include +#endif + +#ifdef HIGHFIVE_TEST_SPAN +#include +#endif + +using namespace HighFive; +using Catch::Matchers::Equals; + + +template +struct CreateEmptyVector; + +template <> +struct CreateEmptyVector<1> { + using container_type = std::vector; + + static container_type create(const std::vector& dims) { + return container_type(dims[0], 2); + } +}; + +template +struct CreateEmptyVector { + using container_type = std::vector::container_type>; + + static container_type create(const std::vector& dims) { + auto subdims = std::vector(dims.begin() + 1, dims.end()); + return container_type(dims[0], CreateEmptyVector::create(subdims)); + } +}; + +#ifdef HIGHFIVE_TEST_BOOST +template +struct CreateEmptyBoostMultiArray { + using container_type = boost::multi_array(n_dim)>; + + static container_type create(const std::vector& dims) { + auto container = container_type(dims); + + auto raw_data = std::vector(compute_total_size(dims)); + container.assign(raw_data.begin(), raw_data.end()); + + return container; + } +}; +#endif + + +#ifdef HIGHFIVE_TEST_EIGEN +struct CreateEmptyEigenVector { + using container_type = Eigen::VectorXi; + + static container_type create(const std::vector& dims) { + return container_type::Constant(int(dims[0]), 2); + } +}; + +struct CreateEmptyEigenMatrix { + using container_type = Eigen::MatrixXi; + + static container_type create(const std::vector& dims) { + return container_type::Constant(int(dims[0]), int(dims[1]), 2); + } +}; +#endif + +template +void check_empty_dimensions(const Container& container, const std::vector& expected_dims) { + auto deduced_dims = details::inspector::getDimensions(container); + + REQUIRE(expected_dims.size() == deduced_dims.size()); + + // The dims after hitting the first `0` are finicky. We allow those to be deduced as either `1` + // or what the original dims said. The `1` allows broadcasting, the "same as original" enables + // statically sized objects, which conceptually have dims, even if there's no object. + bool allow_one = false; + for (size_t i = 0; i < expected_dims.size(); ++i) { + REQUIRE(((expected_dims[i] == deduced_dims[i]) || (allow_one && (deduced_dims[i] == 1ul)))); + + if (expected_dims[i] == 0) { + allow_one = true; + } + } +} + +template +void check_empty_dimensions(const std::vector& dims) { + auto input_data = CreateContainer::create(dims); + check_empty_dimensions(input_data, dims); +} + +template +void check_empty_read_write_cycle(const std::vector& dims) { + using container_type = typename CreateContainer::container_type; + + const std::string file_name("h5_empty_attr.h5"); + const std::string dataset_name("dset"); + File file(file_name, File::Truncate); + + auto input_data = CreateContainer::create(dims); + ReadWriteInterface::create(file, dataset_name, input_data); + + SECTION("read; one-dimensional vector (empty)") { + auto output_data = CreateEmptyVector<1>::create({0ul}); + + ReadWriteInterface::get(file, dataset_name).reshapeMemSpace({0ul}).read(output_data); + check_empty_dimensions(output_data, {0ul}); + } + + SECTION("read; pre-allocated (empty)") { + auto output_data = CreateContainer::create(dims); + ReadWriteInterface::get(file, dataset_name).reshapeMemSpace(dims).read(output_data); + + check_empty_dimensions(output_data, dims); + } + + SECTION("read; pre-allocated (oversized)") { + auto oversize_dims = std::vector(dims.size(), 2ul); + auto output_data = CreateContainer::create(oversize_dims); + ReadWriteInterface::get(file, dataset_name).reshapeMemSpace(dims).read(output_data); + + check_empty_dimensions(output_data, dims); + } + + SECTION("read; auto-allocated") { + auto output_data = ReadWriteInterface::get(file, dataset_name) + .reshapeMemSpace(dims) + .template read(); + check_empty_dimensions(output_data, dims); + } +} + +template +void check_empty_dataset(const std::vector& dims) { + check_empty_read_write_cycle(dims); +} + +template +void check_empty_attribute(const std::vector& dims) { + check_empty_read_write_cycle(dims); +} + +template +void check_empty_everything(const std::vector& dims) { + SECTION("Empty dimensions") { + check_empty_dimensions(dims); + } + + SECTION("Empty datasets") { + check_empty_dataset(dims); + } + + SECTION("Empty attribute") { + check_empty_attribute(dims); + } +} + +#ifdef HIGHFIVE_TEST_EIGEN +template +void check_empty_eigen(const std::vector&) {} + +template <> +void check_empty_eigen<1>(const std::vector& dims) { + SECTION("Eigen::Vector") { + check_empty_everything({dims[0], 1ul}); + } +} + +template <> +void check_empty_eigen<2>(const std::vector& dims) { + SECTION("Eigen::Matrix") { + check_empty_everything(dims); + } +} +#endif + +template +void check_empty(const std::vector& dims) { + REQUIRE(dims.size() == ndim); + + SECTION("std::vector") { + check_empty_everything>(dims); + } + +#ifdef HIGHFIVE_TEST_BOOST + SECTION("boost::multi_array") { + check_empty_everything>(dims); + } +#endif + +#ifdef HIGHFIVE_TEST_EIGEN + check_empty_eigen(dims); +#endif +} + +TEST_CASE("Empty arrays") { + SECTION("one-dimensional") { + check_empty<1>({0ul}); + } + + SECTION("two-dimensional") { + std::vector> testcases{{0ul, 1ul}, {1ul, 0ul}}; + + for (const auto& dims: testcases) { + SECTION(details::format_vector(dims)) { + check_empty<2>(dims); + } + } + } + + SECTION("three-dimensional") { + std::vector> testcases{{0ul, 1ul, 1ul}, + {1ul, 1ul, 0ul}, + {1ul, 0ul, 1ul}}; + + for (const auto& dims: testcases) { + SECTION(details::format_vector(dims)) { + check_empty<3>(dims); + } + } + } +} diff --git a/tests/unit/test_string.cpp b/tests/unit/test_string.cpp new file mode 100644 index 000000000..ed6a4a5bf --- /dev/null +++ b/tests/unit/test_string.cpp @@ -0,0 +1,351 @@ +/* + * Copyright (c), 2017-2024, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ + +#include +#include +#include + +#include +#include "tests_high_five.hpp" +#include "create_traits.hpp" + + +using namespace HighFive; +using Catch::Matchers::Equals; + +TEST_CASE("StringType") { + SECTION("enshrine-defaults") { + auto fixed_length = FixedLengthStringType(32, StringPadding::SpacePadded); + auto variable_length = VariableLengthStringType(); + + REQUIRE(fixed_length.getCharacterSet() == CharacterSet::Ascii); + REQUIRE(variable_length.getCharacterSet() == CharacterSet::Ascii); + } + + SECTION("fixed-length") { + auto fixed_length = + FixedLengthStringType(32, StringPadding::SpacePadded, CharacterSet::Utf8); + auto string_type = fixed_length.asStringType(); + + REQUIRE(string_type.getId() == fixed_length.getId()); + REQUIRE(string_type.getCharacterSet() == CharacterSet::Utf8); + REQUIRE(string_type.getPadding() == StringPadding::SpacePadded); + REQUIRE(string_type.getSize() == 32); + REQUIRE(!string_type.isVariableStr()); + REQUIRE(string_type.isFixedLenStr()); + } + + SECTION("variable-length") { + auto variable_length = VariableLengthStringType(CharacterSet::Utf8); + auto string_type = variable_length.asStringType(); + + REQUIRE(string_type.getId() == variable_length.getId()); + REQUIRE(string_type.getCharacterSet() == CharacterSet::Utf8); + REQUIRE(string_type.isVariableStr()); + REQUIRE(!string_type.isFixedLenStr()); + } + + SECTION("atomic") { + auto atomic = AtomicType(); + REQUIRE_THROWS(atomic.asStringType()); + } +} + +template +void check_single_string(File file, size_t string_length) { + auto value = std::string(string_length, 'o'); + auto dataspace = DataSpace::From(value); + + auto n_chars = value.size() + 1; + auto n_chars_overlength = n_chars + 10; + auto fixed_length = FixedLengthStringType(n_chars, StringPadding::NullTerminated); + auto overlength_nullterm = FixedLengthStringType(n_chars_overlength, + StringPadding::NullTerminated); + auto overlength_nullpad = FixedLengthStringType(n_chars_overlength, StringPadding::NullPadded); + auto overlength_spacepad = FixedLengthStringType(n_chars_overlength, + StringPadding::SpacePadded); + auto variable_length = VariableLengthStringType(); + + SECTION("automatic") { + auto obj = CreateTraits::create(file, "auto", value); + REQUIRE(obj.template read() == value); + } + + SECTION("fixed length") { + auto obj = CreateTraits::create(file, "fixed", dataspace, fixed_length); + obj.write(value); + REQUIRE(obj.template read() == value); + } + + SECTION("overlength null-terminated") { + auto obj = + CreateTraits::create(file, "overlength_nullterm", dataspace, overlength_nullterm); + obj.write(value); + REQUIRE(obj.template read() == value); + } + + SECTION("overlength null-padded") { + auto obj = CreateTraits::create(file, "overlength_nullpad", dataspace, overlength_nullpad); + obj.write(value); + auto expected = std::string(n_chars_overlength, '\0'); + expected.replace(0, value.size(), value.data()); + REQUIRE(obj.template read() == expected); + } + + SECTION("overlength space-padded") { + auto obj = + CreateTraits::create(file, "overlength_spacepad", dataspace, overlength_spacepad); + obj.write(value); + auto expected = std::string(n_chars_overlength, ' '); + expected.replace(0, value.size(), value.data()); + REQUIRE(obj.template read() == expected); + } + + SECTION("variable length") { + auto obj = CreateTraits::create(file, "variable", dataspace, variable_length); + obj.write(value); + REQUIRE(obj.template read() == value); + } +} + +template +void check_multiple_string(File file, size_t string_length) { + using value_t = std::vector; + auto value = value_t{std::string(string_length, 'o'), std::string(string_length, 'x')}; + + auto dataspace = DataSpace::From(value); + + auto string_overlength = string_length + 10; + auto onpoint_nullpad = FixedLengthStringType(string_length, StringPadding::NullPadded); + auto onpoint_spacepad = FixedLengthStringType(string_length, StringPadding::SpacePadded); + + auto overlength_nullterm = FixedLengthStringType(string_overlength, + StringPadding::NullTerminated); + auto overlength_nullpad = FixedLengthStringType(string_overlength, StringPadding::NullPadded); + auto overlength_spacepad = FixedLengthStringType(string_overlength, StringPadding::SpacePadded); + auto variable_length = VariableLengthStringType(); + + auto check = [](const value_t actual, const value_t& expected) { + REQUIRE(actual.size() == expected.size()); + for (size_t i = 0; i < actual.size(); ++i) { + REQUIRE(actual[i] == expected[i]); + } + }; + + SECTION("automatic") { + auto obj = CreateTraits::create(file, "auto", value); + check(obj.template read(), value); + } + + SECTION("variable length") { + auto obj = CreateTraits::create(file, "variable", dataspace, variable_length); + obj.write(value); + check(obj.template read(), value); + } + + auto make_padded_reference = [&](char pad, size_t n) { + auto expected = std::vector(value.size(), std::string(n, pad)); + for (size_t i = 0; i < value.size(); ++i) { + expected[i].replace(0, value[i].size(), value[i].data()); + } + + return expected; + }; + + auto check_fixed_length = [&](const std::string& label, size_t length) { + SECTION(label + " null-terminated") { + auto datatype = FixedLengthStringType(length + 1, StringPadding::NullTerminated); + auto obj = CreateTraits::create(file, label + "_nullterm", dataspace, datatype); + obj.write(value); + check(obj.template read(), value); + } + + SECTION(label + " null-padded") { + auto datatype = FixedLengthStringType(length, StringPadding::NullPadded); + auto obj = CreateTraits::create(file, label + "_nullpad", dataspace, datatype); + obj.write(value); + auto expected = make_padded_reference('\0', length); + check(obj.template read(), expected); + } + + SECTION(label + " space-padded") { + auto datatype = FixedLengthStringType(length, StringPadding::SpacePadded); + auto obj = CreateTraits::create(file, label + "_spacepad", dataspace, datatype); + obj.write(value); + auto expected = make_padded_reference(' ', length); + check(obj.template read(), expected); + } + }; + + check_fixed_length("onpoint", string_length); + check_fixed_length("overlength", string_length + 5); + + + SECTION("underlength null-terminated") { + auto datatype = FixedLengthStringType(string_length, StringPadding::NullTerminated); + auto obj = CreateTraits::create(file, "underlength_nullterm", dataspace, datatype); + REQUIRE_THROWS(obj.write(value)); + } + + SECTION("underlength nullpad") { + auto datatype = FixedLengthStringType(string_length - 1, StringPadding::NullPadded); + auto obj = CreateTraits::create(file, "underlength_nullpad", dataspace, datatype); + REQUIRE_THROWS(obj.write(value)); + } + + SECTION("underlength spacepad") { + auto datatype = FixedLengthStringType(string_length - 1, StringPadding::NullTerminated); + auto obj = CreateTraits::create(file, "underlength_spacepad", dataspace, datatype); + REQUIRE_THROWS(obj.write(value)); + } +} + +TEST_CASE("HighFiveSTDString (dataset, single, short)") { + File file("std_string_dataset_single_short.h5", File::Truncate); + check_single_string(file, 3); +} + +TEST_CASE("HighFiveSTDString (attribute, single, short)") { + File file("std_string_attribute_single_short.h5", File::Truncate); + check_single_string(file, 3); +} + +TEST_CASE("HighFiveSTDString (dataset, single, long)") { + File file("std_string_dataset_single_long.h5", File::Truncate); + check_single_string(file, 256); +} + +TEST_CASE("HighFiveSTDString (attribute, single, long)") { + File file("std_string_attribute_single_long.h5", File::Truncate); + check_single_string(file, 256); +} + +TEST_CASE("HighFiveSTDString (dataset, multiple, short)") { + File file("std_string_dataset_multiple_short.h5", File::Truncate); + check_multiple_string(file, 3); +} + +TEST_CASE("HighFiveSTDString (attribute, multiple, short)") { + File file("std_string_attribute_multiple_short.h5", File::Truncate); + check_multiple_string(file, 3); +} + +TEST_CASE("HighFiveSTDString (dataset, multiple, long)") { + File file("std_string_dataset_multiple_long.h5", File::Truncate); + check_multiple_string(file, 256); +} + +TEST_CASE("HighFiveSTDString (attribute, multiple, long)") { + File file("std_string_attribute_multiple_long.h5", File::Truncate); + check_multiple_string(file, 256); +} + +TEST_CASE("HighFiveFixedString") { + const std::string file_name("array_atomic_types.h5"); + const std::string group_1("group1"); + + // Create a new file using the default property lists. + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + char raw_strings[][10] = {"abcd", "1234"}; + + /// This will not compile - only char arrays - hits static_assert with a nice + /// error + // file.createDataSet(ds_name, DataSpace(2))); + + { // But char should be fine + auto ds = file.createDataSet("ds1", DataSpace(2)); + CHECK(ds.getDataType().getClass() == DataTypeClass::String); + ds.write(raw_strings); + } + + { // char[] is, by default, int8 + auto ds2 = file.createDataSet("ds2", raw_strings); + CHECK(ds2.getDataType().getClass() == DataTypeClass::Integer); + } + + { // String Truncate happens low-level if well setup + auto ds3 = file.createDataSet("ds3", DataSpace::FromCharArrayStrings(raw_strings)); + ds3.write(raw_strings); + } + + { // Write as raw elements from pointer (with const) + const char(*strings_fixed)[10] = raw_strings; + // With a pointer we dont know how many strings -> manual DataSpace + file.createDataSet("ds4", DataSpace(2)).write(strings_fixed); + } + + + { // Cant convert flex-length to fixed-length + const char* buffer[] = {"abcd", "1234"}; + SilenceHDF5 silencer; + CHECK_THROWS_AS(file.createDataSet("ds5", DataSpace(2)).write(buffer), + HighFive::DataSetException); + } + + { // scalar char strings + const char buffer[] = "abcd"; + file.createDataSet("ds6", DataSpace(1)).write(buffer); + } + + { + // Direct way of writing `std::string` as a fixed length + // HDF5 string. + + std::string value = "foo"; + auto n_chars = value.size() + 1; + + auto datatype = FixedLengthStringType(n_chars, StringPadding::NullTerminated); + auto dataspace = DataSpace(1); + + auto ds = file.createDataSet("ds8", dataspace, datatype); + ds.write_raw(value.data(), datatype); + + { + // Due to missing non-const overload of `data()` until C++17 we'll + // read into something else instead (don't forget the '\0'). + auto expected = std::vector(n_chars, '!'); + ds.read_raw(expected.data(), datatype); + + CHECK(expected.size() == value.size() + 1); + for (size_t i = 0; i < value.size(); ++i) { + REQUIRE(expected[i] == value[i]); + } + } + +#if HIGHFIVE_CXX_STD >= 17 + { + auto expected = std::string(value.size(), '-'); + ds.read_raw(expected.data(), datatype); + + REQUIRE(expected == value); + } +#endif + } + + { + size_t n_chars = 4; + size_t n_strings = 2; + + std::vector value(n_chars * n_strings, '!'); + + auto datatype = FixedLengthStringType(n_chars, StringPadding::NullTerminated); + auto dataspace = DataSpace(n_strings); + + auto ds = file.createDataSet("ds9", dataspace, datatype); + ds.write_raw(value.data(), datatype); + + auto expected = std::vector(value.size(), '-'); + ds.read_raw(expected.data(), datatype); + + CHECK(expected.size() == value.size()); + for (size_t i = 0; i < value.size(); ++i) { + REQUIRE(expected[i] == value[i]); + } + } +} diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 16da4bf80..609c6a7f8 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -819,45 +819,6 @@ TEST_CASE("Test simple listings") { } } -TEST_CASE("StringType") { - SECTION("enshrine-defaults") { - auto fixed_length = FixedLengthStringType(32, StringPadding::SpacePadded); - auto variable_length = VariableLengthStringType(); - - REQUIRE(fixed_length.getCharacterSet() == CharacterSet::Ascii); - REQUIRE(variable_length.getCharacterSet() == CharacterSet::Ascii); - } - - SECTION("fixed-length") { - auto fixed_length = - FixedLengthStringType(32, StringPadding::SpacePadded, CharacterSet::Utf8); - auto string_type = fixed_length.asStringType(); - - REQUIRE(string_type.getId() == fixed_length.getId()); - REQUIRE(string_type.getCharacterSet() == CharacterSet::Utf8); - REQUIRE(string_type.getPadding() == StringPadding::SpacePadded); - REQUIRE(string_type.getSize() == 32); - REQUIRE(!string_type.isVariableStr()); - REQUIRE(string_type.isFixedLenStr()); - } - - SECTION("variable-length") { - auto variable_length = VariableLengthStringType(CharacterSet::Utf8); - auto string_type = variable_length.asStringType(); - - REQUIRE(string_type.getId() == variable_length.getId()); - REQUIRE(string_type.getCharacterSet() == CharacterSet::Utf8); - REQUIRE(string_type.isVariableStr()); - REQUIRE(!string_type.isFixedLenStr()); - } - - SECTION("atomic") { - auto atomic = AtomicType(); - REQUIRE_THROWS(atomic.asStringType()); - } -} - - TEST_CASE("DataTypeEqualTakeBack") { const std::string file_name("h5tutr_dset.h5"); const std::string dataset_name("dset"); @@ -1687,220 +1648,6 @@ TEST_CASE("Modify Mem Space, dset") { } -template -struct CreateEmptyVector; - -template <> -struct CreateEmptyVector<1> { - using container_type = std::vector; - - static container_type create(const std::vector& dims) { - return container_type(dims[0], 2); - } -}; - -template -struct CreateEmptyVector { - using container_type = std::vector::container_type>; - - static container_type create(const std::vector& dims) { - auto subdims = std::vector(dims.begin() + 1, dims.end()); - return container_type(dims[0], CreateEmptyVector::create(subdims)); - } -}; - -#ifdef HIGHFIVE_TEST_BOOST -template -struct CreateEmptyBoostMultiArray { - using container_type = boost::multi_array(n_dim)>; - - static container_type create(const std::vector& dims) { - auto container = container_type(dims); - - auto raw_data = std::vector(compute_total_size(dims)); - container.assign(raw_data.begin(), raw_data.end()); - - return container; - } -}; -#endif - - -#ifdef HIGHFIVE_TEST_EIGEN -struct CreateEmptyEigenVector { - using container_type = Eigen::VectorXi; - - static container_type create(const std::vector& dims) { - return container_type::Constant(int(dims[0]), 2); - } -}; - -struct CreateEmptyEigenMatrix { - using container_type = Eigen::MatrixXi; - - static container_type create(const std::vector& dims) { - return container_type::Constant(int(dims[0]), int(dims[1]), 2); - } -}; -#endif - -template -void check_empty_dimensions(const Container& container, const std::vector& expected_dims) { - auto deduced_dims = details::inspector::getDimensions(container); - - REQUIRE(expected_dims.size() == deduced_dims.size()); - - // The dims after hitting the first `0` are finicky. We allow those to be deduced as either `1` - // or what the original dims said. The `1` allows broadcasting, the "same as original" enables - // statically sized objects, which conceptually have dims, even if there's no object. - bool allow_one = false; - for (size_t i = 0; i < expected_dims.size(); ++i) { - REQUIRE(((expected_dims[i] == deduced_dims[i]) || (allow_one && (deduced_dims[i] == 1ul)))); - - if (expected_dims[i] == 0) { - allow_one = true; - } - } -} - -template -void check_empty_dimensions(const std::vector& dims) { - auto input_data = CreateContainer::create(dims); - check_empty_dimensions(input_data, dims); -} - -template -void check_empty_read_write_cycle(const std::vector& dims) { - using container_type = typename CreateContainer::container_type; - - const std::string file_name("h5_empty_attr.h5"); - const std::string dataset_name("dset"); - File file(file_name, File::Truncate); - - auto input_data = CreateContainer::create(dims); - ReadWriteInterface::create(file, dataset_name, input_data); - - SECTION("read; one-dimensional vector (empty)") { - auto output_data = CreateEmptyVector<1>::create({0ul}); - - ReadWriteInterface::get(file, dataset_name).reshapeMemSpace({0ul}).read(output_data); - check_empty_dimensions(output_data, {0ul}); - } - - SECTION("read; pre-allocated (empty)") { - auto output_data = CreateContainer::create(dims); - ReadWriteInterface::get(file, dataset_name).reshapeMemSpace(dims).read(output_data); - - check_empty_dimensions(output_data, dims); - } - - SECTION("read; pre-allocated (oversized)") { - auto oversize_dims = std::vector(dims.size(), 2ul); - auto output_data = CreateContainer::create(oversize_dims); - ReadWriteInterface::get(file, dataset_name).reshapeMemSpace(dims).read(output_data); - - check_empty_dimensions(output_data, dims); - } - - SECTION("read; auto-allocated") { - auto output_data = ReadWriteInterface::get(file, dataset_name) - .reshapeMemSpace(dims) - .template read(); - check_empty_dimensions(output_data, dims); - } -} - -template -void check_empty_dataset(const std::vector& dims) { - check_empty_read_write_cycle(dims); -} - -template -void check_empty_attribute(const std::vector& dims) { - check_empty_read_write_cycle(dims); -} - -template -void check_empty_everything(const std::vector& dims) { - SECTION("Empty dimensions") { - check_empty_dimensions(dims); - } - - SECTION("Empty datasets") { - check_empty_dataset(dims); - } - - SECTION("Empty attribute") { - check_empty_attribute(dims); - } -} - -#ifdef HIGHFIVE_TEST_EIGEN -template -void check_empty_eigen(const std::vector&) {} - -template <> -void check_empty_eigen<1>(const std::vector& dims) { - SECTION("Eigen::Vector") { - check_empty_everything({dims[0], 1ul}); - } -} - -template <> -void check_empty_eigen<2>(const std::vector& dims) { - SECTION("Eigen::Matrix") { - check_empty_everything(dims); - } -} -#endif - -template -void check_empty(const std::vector& dims) { - REQUIRE(dims.size() == ndim); - - SECTION("std::vector") { - check_empty_everything>(dims); - } - -#ifdef HIGHFIVE_TEST_BOOST - SECTION("boost::multi_array") { - check_empty_everything>(dims); - } -#endif - -#ifdef HIGHFIVE_TEST_EIGEN - check_empty_eigen(dims); -#endif -} - -TEST_CASE("Empty arrays") { - SECTION("one-dimensional") { - check_empty<1>({0ul}); - } - - SECTION("two-dimensional") { - std::vector> testcases{{0ul, 1ul}, {1ul, 0ul}}; - - for (const auto& dims: testcases) { - SECTION(details::format_vector(dims)) { - check_empty<2>(dims); - } - } - } - - SECTION("three-dimensional") { - std::vector> testcases{{0ul, 1ul, 1ul}, - {1ul, 1ul, 0ul}, - {1ul, 0ul, 1ul}}; - - for (const auto& dims: testcases) { - SECTION(details::format_vector(dims)) { - check_empty<3>(dims); - } - } - } -} - TEST_CASE("HighFiveRecursiveGroups") { const std::string file_name("h5_ds_exist.h5"); const std::string group_1("group1"); @@ -2315,299 +2062,6 @@ TEST_CASE("DirectWriteBool") { } -template -void check_single_string(File file, size_t string_length) { - auto value = std::string(string_length, 'o'); - auto dataspace = DataSpace::From(value); - - auto n_chars = value.size() + 1; - auto n_chars_overlength = n_chars + 10; - auto fixed_length = FixedLengthStringType(n_chars, StringPadding::NullTerminated); - auto overlength_nullterm = FixedLengthStringType(n_chars_overlength, - StringPadding::NullTerminated); - auto overlength_nullpad = FixedLengthStringType(n_chars_overlength, StringPadding::NullPadded); - auto overlength_spacepad = FixedLengthStringType(n_chars_overlength, - StringPadding::SpacePadded); - auto variable_length = VariableLengthStringType(); - - SECTION("automatic") { - auto obj = CreateTraits::create(file, "auto", value); - REQUIRE(obj.template read() == value); - } - - SECTION("fixed length") { - auto obj = CreateTraits::create(file, "fixed", dataspace, fixed_length); - obj.write(value); - REQUIRE(obj.template read() == value); - } - - SECTION("overlength null-terminated") { - auto obj = - CreateTraits::create(file, "overlength_nullterm", dataspace, overlength_nullterm); - obj.write(value); - REQUIRE(obj.template read() == value); - } - - SECTION("overlength null-padded") { - auto obj = CreateTraits::create(file, "overlength_nullpad", dataspace, overlength_nullpad); - obj.write(value); - auto expected = std::string(n_chars_overlength, '\0'); - expected.replace(0, value.size(), value.data()); - REQUIRE(obj.template read() == expected); - } - - SECTION("overlength space-padded") { - auto obj = - CreateTraits::create(file, "overlength_spacepad", dataspace, overlength_spacepad); - obj.write(value); - auto expected = std::string(n_chars_overlength, ' '); - expected.replace(0, value.size(), value.data()); - REQUIRE(obj.template read() == expected); - } - - SECTION("variable length") { - auto obj = CreateTraits::create(file, "variable", dataspace, variable_length); - obj.write(value); - REQUIRE(obj.template read() == value); - } -} - -template -void check_multiple_string(File file, size_t string_length) { - using value_t = std::vector; - auto value = value_t{std::string(string_length, 'o'), std::string(string_length, 'x')}; - - auto dataspace = DataSpace::From(value); - - auto string_overlength = string_length + 10; - auto onpoint_nullpad = FixedLengthStringType(string_length, StringPadding::NullPadded); - auto onpoint_spacepad = FixedLengthStringType(string_length, StringPadding::SpacePadded); - - auto overlength_nullterm = FixedLengthStringType(string_overlength, - StringPadding::NullTerminated); - auto overlength_nullpad = FixedLengthStringType(string_overlength, StringPadding::NullPadded); - auto overlength_spacepad = FixedLengthStringType(string_overlength, StringPadding::SpacePadded); - auto variable_length = VariableLengthStringType(); - - auto check = [](const value_t actual, const value_t& expected) { - REQUIRE(actual.size() == expected.size()); - for (size_t i = 0; i < actual.size(); ++i) { - REQUIRE(actual[i] == expected[i]); - } - }; - - SECTION("automatic") { - auto obj = CreateTraits::create(file, "auto", value); - check(obj.template read(), value); - } - - SECTION("variable length") { - auto obj = CreateTraits::create(file, "variable", dataspace, variable_length); - obj.write(value); - check(obj.template read(), value); - } - - auto make_padded_reference = [&](char pad, size_t n) { - auto expected = std::vector(value.size(), std::string(n, pad)); - for (size_t i = 0; i < value.size(); ++i) { - expected[i].replace(0, value[i].size(), value[i].data()); - } - - return expected; - }; - - auto check_fixed_length = [&](const std::string& label, size_t length) { - SECTION(label + " null-terminated") { - auto datatype = FixedLengthStringType(length + 1, StringPadding::NullTerminated); - auto obj = CreateTraits::create(file, label + "_nullterm", dataspace, datatype); - obj.write(value); - check(obj.template read(), value); - } - - SECTION(label + " null-padded") { - auto datatype = FixedLengthStringType(length, StringPadding::NullPadded); - auto obj = CreateTraits::create(file, label + "_nullpad", dataspace, datatype); - obj.write(value); - auto expected = make_padded_reference('\0', length); - check(obj.template read(), expected); - } - - SECTION(label + " space-padded") { - auto datatype = FixedLengthStringType(length, StringPadding::SpacePadded); - auto obj = CreateTraits::create(file, label + "_spacepad", dataspace, datatype); - obj.write(value); - auto expected = make_padded_reference(' ', length); - check(obj.template read(), expected); - } - }; - - check_fixed_length("onpoint", string_length); - check_fixed_length("overlength", string_length + 5); - - - SECTION("underlength null-terminated") { - auto datatype = FixedLengthStringType(string_length, StringPadding::NullTerminated); - auto obj = CreateTraits::create(file, "underlength_nullterm", dataspace, datatype); - REQUIRE_THROWS(obj.write(value)); - } - - SECTION("underlength nullpad") { - auto datatype = FixedLengthStringType(string_length - 1, StringPadding::NullPadded); - auto obj = CreateTraits::create(file, "underlength_nullpad", dataspace, datatype); - REQUIRE_THROWS(obj.write(value)); - } - - SECTION("underlength spacepad") { - auto datatype = FixedLengthStringType(string_length - 1, StringPadding::NullTerminated); - auto obj = CreateTraits::create(file, "underlength_spacepad", dataspace, datatype); - REQUIRE_THROWS(obj.write(value)); - } -} - -TEST_CASE("HighFiveSTDString (dataset, single, short)") { - File file("std_string_dataset_single_short.h5", File::Truncate); - check_single_string(file, 3); -} - -TEST_CASE("HighFiveSTDString (attribute, single, short)") { - File file("std_string_attribute_single_short.h5", File::Truncate); - check_single_string(file, 3); -} - -TEST_CASE("HighFiveSTDString (dataset, single, long)") { - File file("std_string_dataset_single_long.h5", File::Truncate); - check_single_string(file, 256); -} - -TEST_CASE("HighFiveSTDString (attribute, single, long)") { - File file("std_string_attribute_single_long.h5", File::Truncate); - check_single_string(file, 256); -} - -TEST_CASE("HighFiveSTDString (dataset, multiple, short)") { - File file("std_string_dataset_multiple_short.h5", File::Truncate); - check_multiple_string(file, 3); -} - -TEST_CASE("HighFiveSTDString (attribute, multiple, short)") { - File file("std_string_attribute_multiple_short.h5", File::Truncate); - check_multiple_string(file, 3); -} - -TEST_CASE("HighFiveSTDString (dataset, multiple, long)") { - File file("std_string_dataset_multiple_long.h5", File::Truncate); - check_multiple_string(file, 256); -} - -TEST_CASE("HighFiveSTDString (attribute, multiple, long)") { - File file("std_string_attribute_multiple_long.h5", File::Truncate); - check_multiple_string(file, 256); -} - -TEST_CASE("HighFiveFixedString") { - const std::string file_name("array_atomic_types.h5"); - const std::string group_1("group1"); - - // Create a new file using the default property lists. - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - char raw_strings[][10] = {"abcd", "1234"}; - - /// This will not compile - only char arrays - hits static_assert with a nice - /// error - // file.createDataSet(ds_name, DataSpace(2))); - - { // But char should be fine - auto ds = file.createDataSet("ds1", DataSpace(2)); - CHECK(ds.getDataType().getClass() == DataTypeClass::String); - ds.write(raw_strings); - } - - { // char[] is, by default, int8 - auto ds2 = file.createDataSet("ds2", raw_strings); - CHECK(ds2.getDataType().getClass() == DataTypeClass::Integer); - } - - { // String Truncate happens low-level if well setup - auto ds3 = file.createDataSet("ds3", DataSpace::FromCharArrayStrings(raw_strings)); - ds3.write(raw_strings); - } - - { // Write as raw elements from pointer (with const) - const char(*strings_fixed)[10] = raw_strings; - // With a pointer we dont know how many strings -> manual DataSpace - file.createDataSet("ds4", DataSpace(2)).write(strings_fixed); - } - - - { // Cant convert flex-length to fixed-length - const char* buffer[] = {"abcd", "1234"}; - SilenceHDF5 silencer; - CHECK_THROWS_AS(file.createDataSet("ds5", DataSpace(2)).write(buffer), - HighFive::DataSetException); - } - - { // scalar char strings - const char buffer[] = "abcd"; - file.createDataSet("ds6", DataSpace(1)).write(buffer); - } - - { - // Direct way of writing `std::string` as a fixed length - // HDF5 string. - - std::string value = "foo"; - auto n_chars = value.size() + 1; - - auto datatype = FixedLengthStringType(n_chars, StringPadding::NullTerminated); - auto dataspace = DataSpace(1); - - auto ds = file.createDataSet("ds8", dataspace, datatype); - ds.write_raw(value.data(), datatype); - - { - // Due to missing non-const overload of `data()` until C++17 we'll - // read into something else instead (don't forget the '\0'). - auto expected = std::vector(n_chars, '!'); - ds.read_raw(expected.data(), datatype); - - CHECK(expected.size() == value.size() + 1); - for (size_t i = 0; i < value.size(); ++i) { - REQUIRE(expected[i] == value[i]); - } - } - -#if HIGHFIVE_CXX_STD >= 17 - { - auto expected = std::string(value.size(), '-'); - ds.read_raw(expected.data(), datatype); - - REQUIRE(expected == value); - } -#endif - } - - { - size_t n_chars = 4; - size_t n_strings = 2; - - std::vector value(n_chars * n_strings, '!'); - - auto datatype = FixedLengthStringType(n_chars, StringPadding::NullTerminated); - auto dataspace = DataSpace(n_strings); - - auto ds = file.createDataSet("ds9", dataspace, datatype); - ds.write_raw(value.data(), datatype); - - auto expected = std::vector(value.size(), '-'); - ds.read_raw(expected.data(), datatype); - - CHECK(expected.size() == value.size()); - for (size_t i = 0; i < value.size(); ++i) { - REQUIRE(expected[i] == value[i]); - } - } -} - TEST_CASE("HighFiveReference") { const std::string file_name("h5_ref_test.h5"); const std::string dataset1_name("dset1"); From 5bd727d5bf6de55c764621f807e6c1d582aba5c7 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 14 May 2024 07:51:18 +0200 Subject: [PATCH 84/97] Remove broadcasting. (#992) Removes broadcasting in favour of using `squeezeMemSpace` and `reshapeMemSpace`. Details can be found in the Migration Guide. --- doc/migration_guide.md | 66 +++++ include/highfive/H5Attribute.hpp | 2 +- include/highfive/bits/H5Attribute_misc.hpp | 14 +- include/highfive/bits/H5Converter_misc.hpp | 6 +- include/highfive/bits/H5Inspector_misc.hpp | 88 +------ include/highfive/bits/H5Slice_traits_misc.hpp | 10 +- .../bits/assert_compatible_spaces.hpp | 29 ++ src/examples/broadcasting_arrays.cpp | 7 +- tests/unit/tests_high_five_base.cpp | 249 +++++++----------- 9 files changed, 204 insertions(+), 267 deletions(-) create mode 100644 include/highfive/bits/assert_compatible_spaces.hpp diff --git a/doc/migration_guide.md b/doc/migration_guide.md index 0972c7e05..4870cd551 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -168,3 +168,69 @@ We felt that the savings in typing effort weren't worth introducing the concept of a "file driver". Removing the concept hopefully makes it easier to add a better abstraction for the handling of the property lists, when we discover such an abstraction. + +## Removal of broadcasting +HighFive v2 had a feature that a dataset (or attribute) of shape `[n, 1]` could +be read into a one-dimensional array automatically. + +The feature is prone to accidentally not failing. Consider an array that shape +`[n, m]` and in general both `n, m > 0`. Hence, one should always be reading +into a two-dimensional array, even if `n == 1` or `m == 1`. However, due to +broadcasting, if one of the dimensions (accidentally) happens to be one, then +the checks wont fails. This isn't a bug, however, it can hide a bug. For +example if the test happen to use `[n, 1]` datasets and a one-dimensional +array. + +Broadcasting in HighFive was different from broadcasting in NumPy. For reading +into one-dimensional data HighFive supports stripping all dimensions that are +not `1`. When extending the feature to multi-dimensional arrays it gets tricky. +We can't strip from both the front and back. If we allow stripping from both +ends, arrays such as `[1, n, m]` read into `[n, m]` if `m > 1` but into `[1, +n]` (instead of `[n, 1]`) if (coincidentally) `m == 1`. For HighFive because +avoiding being forced to read `[n, 1]` into `std::vector>` is +more important than `[1, n]`. Flattening the former requires copying +everything while the latter can be made flat by just accessing the first value. +Therefore, HighFive had a preference to strip from the right, while NumPy adds +`1`s to the front/left of the shape. + +In `v3` we've removed broadcasting. Instead users must use one of the two +alternatives: squeezing and reshaping. The examples show will use datasets and +reading, but it works the same for attributes and writing. + +### Squeezing +Often we know that the `k`th dimension is `1`, e.g. a column is `[n, 1]` and a +row is `[1, m]`. In this case it's convenient to state, remove dimension `k`. +The syntax to simultaneously remove the dimensions `{0, 2}` is: + +``` +dset.squeezeMemSpace({0, 2}).read(array); +``` +Which will read a dataset with dimensions `[1, n, 1]` into an array of shape +`[n]`. + +### Reshape +Sometimes it's easier to state what the new shape must be. For this we have the +syntax: +``` +dset.reshapeMemSpace(dims).read(array); +``` +To declare that `array` should have dimensions `dims` even if +`dset.getDimensions()` is something different. + +Example: +``` +dset.reshapeMemSpace({dset.getElementCount()}).read(array); +``` +to read into a one-dimensional array. + +### Scalars +There's a safe case that seems needlessly strict to enforce: if the dataset is +a multi-dimensional array with one element one should be able to read into +(write from) a scalar. + +The reverse, i.e. reading a scalar value in the HDF5 file into a +multi-dimensional array isn't supported, because if we want to support array +with runtime-defined rank, we can't deduce the correct shape, e.g. `[1]` vs. +`[1, 1, 1]`, when read into an array. + + diff --git a/include/highfive/H5Attribute.hpp b/include/highfive/H5Attribute.hpp index eb7893ce8..9fa3b63ba 100644 --- a/include/highfive/H5Attribute.hpp +++ b/include/highfive/H5Attribute.hpp @@ -71,7 +71,7 @@ class Attribute: public Object, public PathTraits { /// \since 1.0 DataType getDataType() const; - /// \brief Get the DataSpace of the current Attribute. + /// \brief Get a copy of the DataSpace of the current Attribute. /// \code{.cpp} /// Attribute attr = dset.createAttribute("foo", DataSpace(1, 2)); /// auto dspace = attr.getSpace(); // This will be a DataSpace of dimension 1 * 2 diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index 98b16ec1b..d339af1f1 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -24,6 +24,7 @@ #include "h5a_wrapper.hpp" #include "h5d_wrapper.hpp" #include "squeeze.hpp" +#include "assert_compatible_spaces.hpp" namespace HighFive { @@ -81,10 +82,7 @@ inline void Attribute::read(T& array) const { auto dims = mem_space.getDimensions(); if (mem_space.getElementCount() == 0) { - auto effective_dims = details::squeezeDimensions(dims, - details::inspector::recursive_ndim); - - details::inspector::prepare(array, effective_dims); + details::inspector::prepare(array, dims); return; } @@ -172,13 +170,7 @@ inline Attribute Attribute::squeezeMemSpace(const std::vector& axes) con } inline Attribute Attribute::reshapeMemSpace(const std::vector& new_dims) const { - auto n_elements_old = this->getMemSpace().getElementCount(); - auto n_elements_new = compute_total_size(new_dims); - if (n_elements_old != n_elements_new) { - throw Exception("Invalid parameter `new_dims` number of elements differ: " + - std::to_string(n_elements_old) + " (old) vs. " + - std::to_string(n_elements_new) + " (new)"); - } + detail::assert_compatible_spaces(this->getMemSpace(), new_dims); auto attr = *this; attr._mem_space = DataSpace(new_dims); diff --git a/include/highfive/bits/H5Converter_misc.hpp b/include/highfive/bits/H5Converter_misc.hpp index d1ba132c4..5fcbafb5e 100644 --- a/include/highfive/bits/H5Converter_misc.hpp +++ b/include/highfive/bits/H5Converter_misc.hpp @@ -415,10 +415,8 @@ struct data_converter { static Reader get_reader(const std::vector& dims, T& val, const DataType& file_datatype) { - // TODO Use bufferinfo for recursive_ndim - auto effective_dims = details::squeezeDimensions(dims, inspector::recursive_ndim); - inspector::prepare(val, effective_dims); - return Reader(effective_dims, val, file_datatype); + inspector::prepare(val, dims); + return Reader(dims, val, file_datatype); } }; diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index 6f52ff821..efb2343da 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -28,92 +28,16 @@ namespace HighFive { namespace details { inline bool checkDimensions(const std::vector& dims, size_t n_dim_requested) { - size_t n_dim_actual = dims.size(); - - // We should allow reading scalar from shapes like `(1, 1, 1)`. - if (n_dim_requested == 0) { - if (n_dim_actual == 0ul) { - return true; - } - - return size_t(std::count(dims.begin(), dims.end(), 1ul)) == n_dim_actual; - } - - // For non-scalar datasets, we can squeeze away singleton dimension, but - // we never add any. - if (n_dim_actual < n_dim_requested) { - return false; - } - - // Special case for 1-dimensional arrays, which can squeeze `1`s from either - // side simultaneously if needed. - if (n_dim_requested == 1ul) { - return n_dim_actual >= 1ul && - size_t(std::count(dims.begin(), dims.end(), 1ul)) >= n_dim_actual - 1ul; + if (dims.size() == n_dim_requested) { + return true; } - // All other cases strip front only. This avoid unstable behaviour when - // squeezing singleton dimensions. - size_t n_dim_excess = n_dim_actual - n_dim_requested; - - bool squeeze_back = true; - for (size_t i = 1; i <= n_dim_excess; ++i) { - if (dims[n_dim_actual - i] != 1) { - squeeze_back = false; - break; - } - } - - return squeeze_back; + // Scalar values still support broadcasting + // into arrays with one element. + size_t n_elements = compute_total_size(dims); + return n_elements == 1 && n_dim_requested == 0; } - -inline std::vector squeezeDimensions(const std::vector& dims, - size_t n_dim_requested) { - auto format_error_message = [&]() -> std::string { - return "Can't interpret dims = " + format_vector(dims) + " as " + - std::to_string(n_dim_requested) + "-dimensional."; - }; - - if (n_dim_requested == 0) { - if (!checkDimensions(dims, n_dim_requested)) { - throw std::invalid_argument("Failed dimensions check: " + format_error_message()); - } - - return {1ul}; - } - - auto n_dim = dims.size(); - if (n_dim < n_dim_requested) { - throw std::invalid_argument("Failed 'n_dim < n_dim_requested: " + format_error_message()); - } - - if (n_dim_requested == 1ul) { - size_t non_singleton_dim = size_t(-1); - for (size_t i = 0; i < n_dim; ++i) { - if (dims[i] != 1ul) { - if (non_singleton_dim == size_t(-1)) { - non_singleton_dim = i; - } else { - throw std::invalid_argument("Failed one-dimensional: " + - format_error_message()); - } - } - } - - return {dims[std::min(non_singleton_dim, n_dim - 1)]}; - } - - size_t n_dim_excess = dims.size() - n_dim_requested; - for (size_t i = 1; i <= n_dim_excess; ++i) { - if (dims[n_dim - i] != 1) { - throw std::invalid_argument("Failed stripping from back:" + format_error_message()); - } - } - - return std::vector(dims.begin(), - dims.end() - static_cast(n_dim_excess)); -} } // namespace details diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index 9983238d2..bf3f789eb 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -22,6 +22,7 @@ #include "H5Converter_misc.hpp" #include "squeeze.hpp" #include "compute_total_size.hpp" +#include "assert_compatible_spaces.hpp" namespace HighFive { @@ -316,14 +317,7 @@ template inline Selection SliceTraits::reshapeMemSpace(const std::vector& new_dims) const { auto slice = static_cast(*this); - auto n_elements_old = slice.getMemSpace().getElementCount(); - auto n_elements_new = compute_total_size(new_dims); - if (n_elements_old != n_elements_new) { - throw Exception("Invalid parameter `new_dims` number of elements differ: " + - std::to_string(n_elements_old) + " (old) vs. " + - std::to_string(n_elements_new) + " (new)"); - } - + detail::assert_compatible_spaces(slice.getMemSpace(), new_dims); return detail::make_selection(DataSpace(new_dims), slice.getSpace(), detail::getDataSet(slice)); } diff --git a/include/highfive/bits/assert_compatible_spaces.hpp b/include/highfive/bits/assert_compatible_spaces.hpp new file mode 100644 index 000000000..f4be279d4 --- /dev/null +++ b/include/highfive/bits/assert_compatible_spaces.hpp @@ -0,0 +1,29 @@ +/* + * Copyright (c), 2024, BlueBrain Project, EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#pragma once + +#include +#include "../H5Exception.hpp" +#include "../H5DataSpace.hpp" + +namespace HighFive { +namespace detail { + +inline void assert_compatible_spaces(const DataSpace& old, const std::vector& dims) { + auto n_elements_old = old.getElementCount(); + auto n_elements_new = dims.size() == 0 ? 1 : compute_total_size(dims); + + if (n_elements_old != n_elements_new) { + throw Exception("Invalid parameter `new_dims` number of elements differ: " + + std::to_string(n_elements_old) + " (old) vs. " + + std::to_string(n_elements_new) + " (new)"); + } +} +} // namespace detail +} // namespace HighFive diff --git a/src/examples/broadcasting_arrays.cpp b/src/examples/broadcasting_arrays.cpp index 9c1cf5a24..3684e17b0 100644 --- a/src/examples/broadcasting_arrays.cpp +++ b/src/examples/broadcasting_arrays.cpp @@ -36,10 +36,9 @@ int main(void) { auto dset = file.createDataSet("dset", DataSpace(dims), create_datatype()); - // Note that even though `values` is one-dimensional, we can still write it - // to an array of dimensions `[3, 1]`. Only the number of elements needs to - // match. - dset.write(values); + // Note that because `values` is one-dimensional, we can't write it + // to a dataset of dimensions `[3, 1]` directly. Instead we use: + dset.squeezeMemSpace({1}).write(values); // When reading, (re-)allocation might occur. The shape to be allocated is // the dimensions of the memspace. Therefore, one might want to either remove diff --git a/tests/unit/tests_high_five_base.cpp b/tests/unit/tests_high_five_base.cpp index 609c6a7f8..f52c2dcb8 100644 --- a/tests/unit/tests_high_five_base.cpp +++ b/tests/unit/tests_high_five_base.cpp @@ -1427,176 +1427,64 @@ TEMPLATE_LIST_TEST_CASE("ReadWriteSzip", "[template]", dataset_test_types) { } } -TEST_CASE("CheckDimensions") { - // List of dims which can all be one-dimensional. - std::vector> test_cases{ - {1ul, 3ul}, {3ul, 1ul}, {1ul, 1ul, 3ul}, {3ul, 1ul, 1ul}, {1ul, 3ul, 1ul}}; - - for (const auto& dims: test_cases) { - auto actual = details::checkDimensions(dims, 1ul); - - INFO("dims = " + details::format_vector(dims) + ", n_dims = 1"); - CHECK(actual); - - INFO("dims = " + details::format_vector(dims) + ", n_dims = 1"); - CHECK(!details::checkDimensions(dims, dims.size() + 1)); - } - - CHECK(details::checkDimensions(std::vector{1ul}, 0ul)); - CHECK(details::checkDimensions(std::vector{1ul}, 1ul)); - - CHECK(!details::checkDimensions(std::vector{0ul}, 0ul)); - CHECK(!details::checkDimensions(std::vector{2ul}, 0ul)); - - CHECK(!details::checkDimensions(std::vector{1ul, 2ul, 3ul}, 2ul)); - CHECK(details::checkDimensions(std::vector{3ul, 2ul, 1ul}, 2ul)); +template +void check_broadcast_scalar_memspace(File& file, + const std::string& name, + const std::vector& dims) { + auto datatype = create_datatype(); + auto obj = CreateTraits::create(file, name, DataSpace(dims), datatype); - CHECK(details::checkDimensions(std::vector{1ul, 1ul, 1ul, 1ul}, 1ul)); + double expected = 3.0; + obj.write(expected); - CHECK(details::checkDimensions(std::vector{}, 0ul)); - CHECK(!details::checkDimensions(std::vector{}, 1ul)); - CHECK(!details::checkDimensions(std::vector{}, 2ul)); + auto actual = obj.template read(); + CHECK(actual == expected); } +TEST_CASE("Broadcast scalar memspace, dset") { + File file("h5_broadcast_scalar_memspace_dset.h5", File::Truncate); -TEST_CASE("SqueezeDimensions") { - SECTION("possible") { - // List of testcases: the first number is n_dims then the input dimensions - // and finally the squeezed dimensions. - std::vector, std::vector>> test_cases{ - {1ul, {3ul, 1ul}, {3ul}}, - - {1ul, {1ul, 1ul, 1ul}, {1ul}}, - - {1ul, {1ul, 3ul, 1ul}, {3ul}}, - - {1ul, {3ul, 1ul, 1ul}, {3ul}}, - {2ul, {3ul, 1ul, 1ul}, {3ul, 1ul}}, - {3ul, {3ul, 1ul, 1ul}, {3ul, 1ul, 1ul}}, - - {3ul, {2ul, 1ul, 3ul}, {2ul, 1ul, 3ul}}}; - - for (const auto& tc: test_cases) { - auto n_dim_requested = std::get<0>(tc); - auto dims = std::get<1>(tc); - auto expected = std::get<2>(tc); - auto actual = details::squeezeDimensions(dims, n_dim_requested); - - CHECK(actual == expected); - } + SECTION("[1]") { + check_broadcast_scalar_memspace(file, "dset", {1}); } - SECTION("impossible") { - // List of testcases: the first number is n_dims then the input dimensions - // and finally the squeezed dimensions. - std::vector>> test_cases{{1ul, {1ul, 2ul, 3ul}}, - {2ul, {1ul, 2ul, 3ul, 1ul}}, - - {1ul, {2ul, 1ul, 3ul}}, - {2ul, {2ul, 1ul, 3ul}}}; - - for (const auto& tc: test_cases) { - auto n_dim_requested = std::get<0>(tc); - auto dims = std::get<1>(tc); - - CHECK_THROWS(details::squeezeDimensions(dims, n_dim_requested)); - } + SECTION("[1, 1, 1]") { + check_broadcast_scalar_memspace(file, "dset", {1, 1, 1}); } } -void check_broadcast_1d(HighFive::File& file, - const std::vector dims, - const std::string& dataset_name) { - // This checks that: - // - we can write 1D array into 2D dataset. - // - we can read 2D dataset into a 1D array. - std::vector input_data{5.0, 6.0, 7.0}; +TEST_CASE("Broadcast scalar memspace, attr") { + File file("h5_broadcast_scalar_memspace_attr.h5", File::Truncate); - - DataSpace dataspace(dims); - DataSet dataset = file.createDataSet(dataset_name, dataspace, AtomicType()); - - dataset.write(input_data); - - { - std::vector read_back; - dataset.read(read_back); - - CHECK(read_back == input_data); + SECTION("[1]") { + check_broadcast_scalar_memspace(file, "attr", {1}); } - { - auto read_back = dataset.read>(); - CHECK(read_back == input_data); + SECTION("[1, 1, 1]") { + check_broadcast_scalar_memspace(file, "attr", {1, 1, 1}); } } -// Broadcasting is supported -TEST_CASE("ReadInBroadcastDims") { - const std::string file_name("h5_broadcast_dset.h5"); - const std::string dataset_name("dset"); - - // Create a new file using the default property lists. - File file(file_name, File::Truncate); - - SECTION("one-dimensional (1, 3)") { - check_broadcast_1d(file, {1, 3}, dataset_name + "_a"); - } - - SECTION("one-dimensional (3, 1)") { - check_broadcast_1d(file, {3, 1}, dataset_name + "_b"); - } - - SECTION("two-dimensional (2, 3, 1)") { - std::vector dims{2, 3, 1}; - std::vector> input_data_2d{{2.0, 3.0, 4.0}, {10.0, 11.0, 12.0}}; - - DataSpace dataspace(dims); - DataSet dataset = file.createDataSet(dataset_name + "_c", dataspace, AtomicType()); - - dataset.write(input_data_2d); - - auto check = [](const std::vector>& lhs, - const std::vector>& rhs) { - CHECK(lhs.size() == rhs.size()); - for (size_t i = 0; i < rhs.size(); ++i) { - CHECK(lhs[i].size() == rhs[i].size()); - - for (size_t j = 0; j < rhs[i].size(); ++j) { - CHECK(lhs[i][j] == rhs[i][j]); - } - } - }; - - { - std::vector> read_back; - dataset.read(read_back); - - check(read_back, input_data_2d); - } - - { - auto read_back = dataset.read>>(); - check(read_back, input_data_2d); - } - } +template +void check_broadcast_scalar_filespace(File& file, const std::string& name) { + auto datatype = create_datatype(); + auto obj = CreateTraits::create(file, name, DataSpace::Scalar(), datatype); - SECTION("one-dimensional fixed length string") { - std::vector dims{1, 1, 2}; - char input_data[2] = "a"; + auto value = std::vector{3.0}; - DataSpace dataspace(dims); - DataSet dataset = file.createDataSet(dataset_name + "_d", dataspace, AtomicType()); - dataset.write(input_data); + REQUIRE_THROWS(obj.write(value)); + REQUIRE_THROWS(obj.template read>()); + REQUIRE_THROWS(obj.read(value)); +} - { - char read_back[2]; - dataset.read(read_back); +TEST_CASE("Broadcast scalar filespace, dset") { + File file("h5_broadcast_scalar_filespace_dset.h5", File::Truncate); + check_broadcast_scalar_filespace(file, "dset"); +} - CHECK(read_back[0] == 'a'); - CHECK(read_back[1] == '\0'); - } - } +TEST_CASE("Broadcast scalar filespace, attr") { + File file("h5_broadcast_scalar_filespace_attr.h5", File::Truncate); + check_broadcast_scalar_filespace(file, "attr"); } TEST_CASE("squeeze") { @@ -1611,9 +1499,7 @@ TEST_CASE("squeeze") { } template -void check_modify_mem_space(File& file) { - const std::string name = "dset"; - +void check_modify_memspace(File& file, const std::string& name) { auto expected_values = std::vector{1.0, 2.0, 3.0}; auto values = std::vector>{expected_values}; @@ -1637,14 +1523,63 @@ void check_modify_mem_space(File& file) { } } -TEST_CASE("Modify Mem Space, attr") { +TEST_CASE("Modify MemSpace, dset") { File file("h5_modify_memspace_dset.h5", File::Truncate); - check_modify_mem_space(file); + check_modify_memspace(file, "dset"); } -TEST_CASE("Modify Mem Space, dset") { +TEST_CASE("Modify MemSpace, attr") { File file("h5_modify_memspace_attr.h5", File::Truncate); - check_modify_mem_space(file); + check_modify_memspace(file, "attr"); +} + +template +void check_modify_scalar_filespace(File& file, const std::string& name) { + auto expected_value = 3.0; + + auto obj = CreateTraits::create(file, name, expected_value); + SECTION("reshape") { + auto actual_values = obj.reshapeMemSpace({1}).template read>(); + + REQUIRE(actual_values.size() == 1); + REQUIRE(actual_values[0] == expected_value); + } +} + +TEST_CASE("Modify Scalar FileSpace, dset") { + File file("h5_modify_scalar_filespace_dset.h5", File::Truncate); + check_modify_scalar_filespace(file, "dset"); +} + +TEST_CASE("Modify Scalar FileSpace, attr") { + File file("h5_modify_scalar_filespace_attr.h5", File::Truncate); + check_modify_scalar_filespace(file, "attr"); +} + +template +void check_modify_scalar_memspace(File& file, const std::string& name) { + auto expected_value = std::vector{3.0}; + + auto obj = CreateTraits::create(file, name, expected_value); + SECTION("squeeze") { + auto actual_value = obj.squeezeMemSpace({0}).template read(); + REQUIRE(actual_value == expected_value[0]); + } + + SECTION("reshape") { + auto actual_value = obj.reshapeMemSpace({}).template read(); + REQUIRE(actual_value == expected_value[0]); + } +} + +TEST_CASE("Modify Scalar MemSpace, dset") { + File file("h5_modify_scalar_memspace_dset.h5", File::Truncate); + check_modify_scalar_memspace(file, "dset"); +} + +TEST_CASE("Modify Scalar MemSpace, attr") { + File file("h5_modify_scalar_memspace_attr.h5", File::Truncate); + check_modify_scalar_memspace(file, "attr"); } From 1ba7102fdda33f0cbcbea8a0b775226a3b34b44c Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 14 May 2024 10:21:11 +0200 Subject: [PATCH 85/97] Test `std::vector. (#998) --- tests/unit/test_legacy.cpp | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/unit/test_legacy.cpp b/tests/unit/test_legacy.cpp index 7d7e67f26..698131473 100644 --- a/tests/unit/test_legacy.cpp +++ b/tests/unit/test_legacy.cpp @@ -36,3 +36,39 @@ TEST_CASE("HighFiveReadWriteConsts") { } } } + +TEST_CASE("Array of char pointers") { + // Currently, serializing an `std::vector` as + // fixed or variable length strings doesn't work. + // + // This isn't a test of correctness. Rather it asserts the fact that + // something doesn't work in HighFive. Knowing it doesn't work is useful + // for developers, but could change in the future. + + const std::string file_name = "vector_char_pointer.h5"; + + File file(file_name, File::Truncate); + + size_t n_strings = 3; + size_t n_chars = 4; + char storage[3][4] = {"foo", "bar", "000"}; + auto strings = std::vector(n_strings); + + for (size_t i = 0; i < n_strings; ++i) { + strings[i] = static_cast(storage[i]); + } + + auto filespace = DataSpace({n_strings}); + + SECTION("fixed length") { + auto datatype = FixedLengthStringType(n_chars, StringPadding::NullTerminated); + auto dset = file.createDataSet("dset", filespace, datatype); + REQUIRE_THROWS(dset.write(strings)); + } + + SECTION("variable length") { + auto datatype = VariableLengthStringType(); + auto dset = file.createDataSet("dset", filespace, datatype); + REQUIRE_THROWS(dset.write(strings)); + } +} From b3f34bd95d0bdd357dfcb8a34174ef5f1301af05 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 14 May 2024 12:28:49 +0200 Subject: [PATCH 86/97] No constexpr rank via constexpr min-max rank. (#938) The commit removes the requirement for a constexpr rank. Instead containers need a minimum and maximum constexpr rank. The difficulties are: * Empty arrays can't figure out their runtime rank. This is an issue when deducing the dimension when writing to disk. The "solution" is to deduce with lowest rank possible. --- include/highfive/bits/H5Attribute_misc.hpp | 15 ++-- include/highfive/bits/H5DataType_misc.hpp | 6 +- include/highfive/bits/H5Dataspace_misc.hpp | 7 +- include/highfive/bits/H5Inspector_misc.hpp | 84 ++++++++++++++----- include/highfive/bits/H5ReadWrite_misc.hpp | 27 +++++- include/highfive/bits/H5Slice_traits_misc.hpp | 13 +-- include/highfive/boost.hpp | 30 +++++-- include/highfive/eigen.hpp | 9 +- include/highfive/span.hpp | 17 +++- tests/unit/data_generator.hpp | 9 +- 10 files changed, 163 insertions(+), 54 deletions(-) diff --git a/include/highfive/bits/H5Attribute_misc.hpp b/include/highfive/bits/H5Attribute_misc.hpp index d339af1f1..7ad38699a 100644 --- a/include/highfive/bits/H5Attribute_misc.hpp +++ b/include/highfive/bits/H5Attribute_misc.hpp @@ -19,6 +19,7 @@ #include "../H5DataSpace.hpp" #include "H5Converter_misc.hpp" +#include "H5Inspector_misc.hpp" #include "H5ReadWrite_misc.hpp" #include "H5Utils.hpp" #include "h5a_wrapper.hpp" @@ -73,10 +74,11 @@ inline void Attribute::read(T& array) const { [this]() -> std::string { return this->getName(); }, details::BufferInfo::Operation::read); - if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { + if (!details::checkDimensions(mem_space, buffer_info.getMinRank(), buffer_info.getMaxRank())) { std::ostringstream ss; - ss << "Impossible to read Attribute of dimensions " << mem_space.getNumberDimensions() - << " into arrays of dimensions " << buffer_info.n_dimensions; + ss << "Impossible to read attribute of dimensions " << mem_space.getNumberDimensions() + << " into arrays of dimensions: " << buffer_info.getMinRank() << "(min) to " + << buffer_info.getMaxRank() << "(max)"; throw DataSpaceException(ss.str()); } auto dims = mem_space.getDimensions(); @@ -137,10 +139,11 @@ inline void Attribute::write(const T& buffer) { [this]() -> std::string { return this->getName(); }, details::BufferInfo::Operation::write); - if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { + if (!details::checkDimensions(mem_space, buffer_info.getMinRank(), buffer_info.getMaxRank())) { std::ostringstream ss; - ss << "Impossible to write buffer of dimensions " << buffer_info.n_dimensions - << " into dataset of dimensions " << mem_space.getNumberDimensions(); + ss << "Impossible to write attribute of dimensions " << mem_space.getNumberDimensions() + << " into arrays of dimensions: " << buffer_info.getMinRank() << "(min) to " + << buffer_info.getMaxRank() << "(max)"; throw DataSpaceException(ss.str()); } auto w = details::data_converter::serialize(buffer, dims, file_datatype); diff --git a/include/highfive/bits/H5DataType_misc.hpp b/include/highfive/bits/H5DataType_misc.hpp index 4321a4658..797702188 100644 --- a/include/highfive/bits/H5DataType_misc.hpp +++ b/include/highfive/bits/H5DataType_misc.hpp @@ -226,9 +226,9 @@ inline EnumType create_enum_boolean() { // Other cases not supported. Fail early with a user message template AtomicType::AtomicType() { - static_assert(details::inspector::recursive_ndim == 0, - "Atomic types cant be arrays, except for char[] (fixed-length strings)"); - static_assert(details::inspector::recursive_ndim > 0, "Type not supported"); + static_assert( + true, + "Missing specialization of AtomicType. Therefore, type T is not supported by HighFive."); } diff --git a/include/highfive/bits/H5Dataspace_misc.hpp b/include/highfive/bits/H5Dataspace_misc.hpp index ceae1e531..4382c14c1 100644 --- a/include/highfive/bits/H5Dataspace_misc.hpp +++ b/include/highfive/bits/H5Dataspace_misc.hpp @@ -131,9 +131,10 @@ inline DataSpace DataSpace::FromCharArrayStrings(const char (&)[N][Width]) { namespace details { -/// dimension checks @internal -inline bool checkDimensions(const DataSpace& mem_space, size_t n_dim_requested) { - return checkDimensions(mem_space.getDimensions(), n_dim_requested); +inline bool checkDimensions(const DataSpace& mem_space, + size_t min_dim_requested, + size_t max_dim_requested) { + return checkDimensions(mem_space.getDimensions(), min_dim_requested, max_dim_requested); } } // namespace details diff --git a/include/highfive/bits/H5Inspector_misc.hpp b/include/highfive/bits/H5Inspector_misc.hpp index efb2343da..59bf85422 100644 --- a/include/highfive/bits/H5Inspector_misc.hpp +++ b/include/highfive/bits/H5Inspector_misc.hpp @@ -27,15 +27,18 @@ namespace HighFive { namespace details { -inline bool checkDimensions(const std::vector& dims, size_t n_dim_requested) { - if (dims.size() == n_dim_requested) { +inline bool checkDimensions(const std::vector& dims, + size_t min_dim_requested, + size_t max_dim_requested) { + if (min_dim_requested <= dims.size() && dims.size() <= max_dim_requested) { return true; } + // Scalar values still support broadcasting // into arrays with one element. size_t n_elements = compute_total_size(dims); - return n_elements == 1 && n_dim_requested == 0; + return n_elements == 1 && min_dim_requested == 0; } } // namespace details @@ -49,8 +52,6 @@ inspector { // hdf5_type is the base read by hdf5 (c-type) (e.g. std::vector => const char*) using hdf5_type - // Number of dimensions starting from here - static constexpr size_t recursive_ndim // Is the inner type trivially copyable for optimisation // If this value is true: data() is mandatory // If this value is false: serialize, unserialize are mandatory @@ -88,10 +89,16 @@ struct type_helper { using hdf5_type = base_type; static constexpr size_t ndim = 0; - static constexpr size_t recursive_ndim = ndim; + static constexpr size_t min_ndim = ndim; + static constexpr size_t max_ndim = ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value; static constexpr bool is_trivially_nestable = is_trivially_copyable; + static size_t getRank(const type& /* val */) { + return ndim; + } + static std::vector getDimensions(const type& /* val */) { return {}; } @@ -216,17 +223,27 @@ struct inspector> { using hdf5_type = typename inspector::hdf5_type; static constexpr size_t ndim = 1; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_nestable; static constexpr bool is_trivially_nestable = false; + static size_t getRank(const type& val) { + if (!val.empty()) { + return ndim + inspector::getRank(val[0]); + } else { + return min_ndim; + } + } + static std::vector getDimensions(const type& val) { - std::vector sizes(recursive_ndim, 1ul); + auto rank = getRank(val); + std::vector sizes(rank, 1ul); sizes[0] = val.size(); if (!val.empty()) { auto s = inspector::getDimensions(val[0]); - assert(s.size() + ndim == sizes.size()); for (size_t i = 0; i < s.size(); ++i) { sizes[i + ndim] = s[i]; } @@ -280,10 +297,16 @@ struct inspector> { using hdf5_type = uint8_t; static constexpr size_t ndim = 1; - static constexpr size_t recursive_ndim = ndim; + static constexpr size_t min_ndim = ndim; + static constexpr size_t max_ndim = ndim; + static constexpr bool is_trivially_copyable = false; static constexpr bool is_trivially_nestable = false; + static size_t getRank(const type& /* val */) { + return ndim; + } + static std::vector getDimensions(const type& val) { std::vector sizes{val.size()}; return sizes; @@ -327,18 +350,22 @@ struct inspector> { using hdf5_type = typename inspector::hdf5_type; static constexpr size_t ndim = 1; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_nestable; static constexpr bool is_trivially_nestable = (sizeof(type) == N * sizeof(T)) && is_trivially_copyable; + static size_t getRank(const type& val) { + return ndim + inspector::getRank(val[0]); + } + static std::vector getDimensions(const type& val) { std::vector sizes{N}; - if (!val.empty()) { - auto s = inspector::getDimensions(val[0]); - sizes.insert(sizes.end(), s.begin(), s.end()); - } + auto s = inspector::getDimensions(val[0]); + sizes.insert(sizes.end(), s.begin(), s.end()); return sizes; } @@ -399,11 +426,21 @@ struct inspector { using hdf5_type = typename inspector::hdf5_type; static constexpr size_t ndim = 1; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_nestable; static constexpr bool is_trivially_nestable = false; + static size_t getRank(const type& val) { + if (val != nullptr) { + return ndim + inspector::getRank(val[0]); + } else { + return min_ndim; + } + } + static std::vector getDimensions(const type& /* val */) { throw DataSpaceException("Not possible to have size of a T*"); } @@ -430,7 +467,9 @@ struct inspector { using hdf5_type = typename inspector::hdf5_type; static constexpr size_t ndim = 1; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_nestable; static constexpr bool is_trivially_nestable = is_trivially_copyable; @@ -450,12 +489,14 @@ struct inspector { } } + static size_t getRank(const type& val) { + return ndim + inspector::getRank(val[0]); + } + static std::vector getDimensions(const type& val) { std::vector sizes{N}; - if (N > 0) { - auto s = inspector::getDimensions(val[0]); - sizes.insert(sizes.end(), s.begin(), s.end()); - } + auto s = inspector::getDimensions(val[0]); + sizes.insert(sizes.end(), s.begin(), s.end()); return sizes; } @@ -478,5 +519,6 @@ struct inspector { } }; + } // namespace details } // namespace HighFive diff --git a/include/highfive/bits/H5ReadWrite_misc.hpp b/include/highfive/bits/H5ReadWrite_misc.hpp index 05bb49888..e5c862bc5 100644 --- a/include/highfive/bits/H5ReadWrite_misc.hpp +++ b/include/highfive/bits/H5ReadWrite_misc.hpp @@ -9,6 +9,7 @@ #pragma once #include +#include "H5Inspector_misc.hpp" #include "H5Utils.hpp" namespace HighFive { @@ -57,10 +58,14 @@ struct BufferInfo { template BufferInfo(const DataType& dtype, F getName, Operation _op); + size_t getRank(const T& array) const; + size_t getMinRank() const; + size_t getMaxRank() const; + // member data for info depending on the destination dataset type const bool is_fixed_len_string; - const size_t n_dimensions; const DataType data_type; + const size_t rank_correction; }; // details implementation @@ -135,10 +140,9 @@ BufferInfo::BufferInfo(const DataType& file_data_type, F getName, Operation _ : op(_op) , is_fixed_len_string(file_data_type.isFixedLenStr()) // In case we are using Fixed-len strings we need to subtract one dimension - , n_dimensions(details::inspector::recursive_ndim - - ((is_fixed_len_string && is_char_array) ? 1 : 0)) , data_type(string_type_checker::getDataType(create_datatype(), - file_data_type)) { + file_data_type)) + , rank_correction((is_fixed_len_string && is_char_array) ? 1 : 0) { // We warn. In case they are really not convertible an exception will rise on read/write if (file_data_type.getClass() != data_type.getClass()) { HIGHFIVE_LOG_WARN(getName() + "\": data and hdf5 dataset have different types: " + @@ -157,6 +161,21 @@ BufferInfo::BufferInfo(const DataType& file_data_type, F getName, Operation _ } } +template +size_t BufferInfo::getRank(const T& array) const { + return details::inspector::getRank(array) - rank_correction; +} + +template +size_t BufferInfo::getMinRank() const { + return details::inspector::min_ndim - rank_correction; +} + +template +size_t BufferInfo::getMaxRank() const { + return details::inspector::max_ndim - rank_correction; +} + } // namespace details } // namespace HighFive diff --git a/include/highfive/bits/H5Slice_traits_misc.hpp b/include/highfive/bits/H5Slice_traits_misc.hpp index bf3f789eb..711d57ac2 100644 --- a/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/include/highfive/bits/H5Slice_traits_misc.hpp @@ -180,10 +180,11 @@ inline void SliceTraits::read(T& array, const DataTransferProps& xfer_ [&slice]() -> std::string { return details::get_dataset(slice).getPath(); }, details::BufferInfo::Operation::read); - if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { + if (!details::checkDimensions(mem_space, buffer_info.getMinRank(), buffer_info.getMaxRank())) { std::ostringstream ss; ss << "Impossible to read DataSet of dimensions " << mem_space.getNumberDimensions() - << " into arrays of dimensions " << buffer_info.n_dimensions; + << " into arrays of dimensions: " << buffer_info.getMinRank() << "(min) to " + << buffer_info.getMaxRank() << "(max)"; throw DataSpaceException(ss.str()); } auto dims = mem_space.getDimensions(); @@ -254,11 +255,11 @@ inline void SliceTraits::write(const T& buffer, const DataTransferProp [&slice]() -> std::string { return details::get_dataset(slice).getPath(); }, details::BufferInfo::Operation::write); - if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { + if (!details::checkDimensions(mem_space, buffer_info.getMinRank(), buffer_info.getMaxRank())) { std::ostringstream ss; - ss << "Impossible to write buffer of dimensions " - << details::format_vector(mem_space.getDimensions()) - << " into dataset with n = " << buffer_info.n_dimensions << " dimensions."; + ss << "Impossible to write buffer with dimensions n = " << buffer_info.getRank(buffer) + << "into dataset with dimensions " << details::format_vector(mem_space.getDimensions()) + << "."; throw DataSpaceException(ss.str()); } auto w = details::data_converter::serialize(buffer, dims, file_datatype); diff --git a/include/highfive/boost.hpp b/include/highfive/boost.hpp index 3e42a5b60..33c1458df 100644 --- a/include/highfive/boost.hpp +++ b/include/highfive/boost.hpp @@ -17,19 +17,31 @@ struct inspector> { using hdf5_type = typename inspector::hdf5_type; static constexpr size_t ndim = Dims; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_nestable; static constexpr bool is_trivially_nestable = false; + static size_t getRank(const type& val) { + return ndim + inspector::getRank(val.data()[0]); + } + static std::vector getDimensions(const type& val) { - std::vector sizes; + auto rank = getRank(val); + std::vector sizes(rank, 1ul); for (size_t i = 0; i < ndim; ++i) { - sizes.push_back(val.shape()[i]); + sizes[i] = val.shape()[i]; + } + if (val.size() != 0) { + auto s = inspector::getDimensions(val.data()[0]); + sizes.resize(ndim + s.size()); + for (size_t i = 0; i < s.size(); ++i) { + sizes[ndim + i] = s[i]; + } } - auto s = inspector::getDimensions(val.data()[0]); - sizes.insert(sizes.end(), s.begin(), s.end()); return sizes; } @@ -101,11 +113,17 @@ struct inspector> { using hdf5_type = typename inspector::hdf5_type; static constexpr size_t ndim = 2; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_copyable; static constexpr bool is_trivially_nestable = false; + static size_t getRank(const type& val) { + return ndim + inspector::getRank(val(0, 0)); + } + static std::vector getDimensions(const type& val) { std::vector sizes{val.size1(), val.size2()}; auto s = inspector::getDimensions(val(0, 0)); diff --git a/include/highfive/eigen.hpp b/include/highfive/eigen.hpp index 4a0b293fd..462769e4b 100644 --- a/include/highfive/eigen.hpp +++ b/include/highfive/eigen.hpp @@ -16,6 +16,7 @@ struct eigen_inspector { using base_type = typename inspector::base_type; using hdf5_type = base_type; + static_assert(int(EigenType::ColsAtCompileTime) == int(EigenType::MaxColsAtCompileTime), "Padding isn't supported."); static_assert(int(EigenType::RowsAtCompileTime) == int(EigenType::MaxRowsAtCompileTime), @@ -26,13 +27,19 @@ struct eigen_inspector { EigenType::IsRowMajor; } + static constexpr size_t ndim = 2; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; static constexpr bool is_trivially_copyable = is_row_major() && std::is_trivially_copyable::value && inspector::is_trivially_nestable; static constexpr bool is_trivially_nestable = false; + static size_t getRank(const type& val) { + return ndim + inspector::getRank(val.data()[0]); + } + static std::vector getDimensions(const type& val) { std::vector sizes{static_cast(val.rows()), static_cast(val.cols())}; auto s = inspector::getDimensions(val.data()[0]); diff --git a/include/highfive/span.hpp b/include/highfive/span.hpp index 1eca4a51b..ab53319ee 100644 --- a/include/highfive/span.hpp +++ b/include/highfive/span.hpp @@ -25,14 +25,25 @@ struct inspector> { using hdf5_type = typename inspector::hdf5_type; static constexpr size_t ndim = 1; - static constexpr size_t recursive_ndim = ndim + inspector::recursive_ndim; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && inspector::is_trivially_nestable; - static constexpr bool is_trivially_nestable = false; + + static size_t getRank(const type& val) { + if (!val.empty()) { + return ndim + inspector::getRank(val[0]); + } else { + return min_ndim; + } + } + static std::vector getDimensions(const type& val) { - std::vector sizes(recursive_ndim, 1ul); + auto rank = getRank(val); + std::vector sizes(rank, 1ul); sizes[0] = val.size(); if (!val.empty()) { auto s = inspector::getDimensions(val[0]); diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp index 5b1f095ca..a50284d5c 100644 --- a/tests/unit/data_generator.hpp +++ b/tests/unit/data_generator.hpp @@ -79,6 +79,7 @@ struct ScalarContainerTraits { using base_type = T; static constexpr bool is_view = false; + static constexpr size_t rank = 0; static void set(container_type& array, std::vector /* indices */, base_type value) { array = value; @@ -120,6 +121,7 @@ struct ContainerTraits> { using base_type = bool; static constexpr bool is_view = false; + static constexpr size_t rank = 1; static void set(container_type& array, const std::vector& indices, @@ -154,6 +156,7 @@ struct STLLikeContainerTraits { using base_type = typename ContainerTraits::base_type; static constexpr bool is_view = ContainerTraits::is_view; + static constexpr size_t rank = 1 + ContainerTraits::rank; static void set(container_type& array, const std::vector& indices, @@ -281,6 +284,7 @@ struct ContainerTraits> { using base_type = typename ContainerTraits::base_type; static constexpr bool is_view = ContainerTraits::is_view; + static constexpr size_t rank = n + ContainerTraits::rank; static void set(container_type& array, const std::vector& indices, @@ -333,6 +337,7 @@ struct ContainerTraits> { using base_type = typename ContainerTraits::base_type; static constexpr bool is_view = ContainerTraits::is_view; + static constexpr size_t rank = 2 + ContainerTraits::rank; static void set(container_type& array, const std::vector& indices, @@ -392,6 +397,7 @@ struct EigenContainerTraits { using base_type = typename ContainerTraits::base_type; static constexpr bool is_view = ContainerTraits::is_view; + static constexpr size_t rank = 2 + ContainerTraits::rank; static void set(container_type& array, const std::vector& indices, @@ -629,11 +635,12 @@ struct MultiDimVector { template class DataGenerator { public: - constexpr static size_t rank = details::inspector::recursive_ndim; using traits = ContainerTraits; using base_type = typename traits::base_type; using container_type = Container; + constexpr static size_t rank = traits::rank; + public: static container_type allocate(const std::vector& dims) { return traits::allocate(dims); From 2e1067333498185c6e452d060c77ccbaea76b6e3 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 17 May 2024 11:57:42 +0200 Subject: [PATCH 87/97] Update Windows CI. (#1000) Since a recent update it seems that Windows 2022 now uses Visual Studio 2022 17.9 and doesn't include toolset v141 anymore. --- .github/workflows/ci.yml | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cb9d92ea0..d1495c177 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -346,19 +346,9 @@ jobs: runs-on: ${{matrix.os}} strategy: matrix: - os: [ "windows-2022"] - vs-toolset: [ "v141", "v143" ] + os: [ "windows-2019", "windows-2022"] cxxstd: ["14", "17", "20"] - include: - - os: "windows-2019" - vs-toolset: "v142" - cxxstd: "14" - - - os: "windows-2019" - vs-toolset: "v142" - cxxstd: "17" - steps: - uses: actions/checkout@v3 with: @@ -373,7 +363,6 @@ jobs: shell: bash -l {0} run: | CMAKE_OPTIONS=( - -T ${{matrix.vs-toolset}} -DCMAKE_CXX_STANDARD=${{matrix.cxxstd}} -DHIGHFIVE_UNIT_TESTS=ON -DHIGHFIVE_TEST_BOOST:BOOL=ON From 2de17efb71f4aea0bd4327bac745f7e2c7840db5 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 17 May 2024 12:15:13 +0200 Subject: [PATCH 88/97] Remove `Object{Create,Access}Props`. (#1002) These are abstract concepts in the documentation of HDF5. In their current implementation they can't be used to express "one of ..." as they should. --- doc/migration_guide.md | 4 +++- include/highfive/H5PropertyList.hpp | 6 ------ include/highfive/bits/H5PropertyList_misc.hpp | 9 --------- 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/doc/migration_guide.md b/doc/migration_guide.md index 4870cd551..ee72e52c7 100644 --- a/doc/migration_guide.md +++ b/doc/migration_guide.md @@ -233,4 +233,6 @@ multi-dimensional array isn't supported, because if we want to support array with runtime-defined rank, we can't deduce the correct shape, e.g. `[1]` vs. `[1, 1, 1]`, when read into an array. - +# Removal of `Object*Props`. +To out knowledge these could not be used meaningfully. Please create an issue +if you relied on these. diff --git a/include/highfive/H5PropertyList.hpp b/include/highfive/H5PropertyList.hpp index 2368f5ca9..5d467dbb3 100644 --- a/include/highfive/H5PropertyList.hpp +++ b/include/highfive/H5PropertyList.hpp @@ -87,7 +87,6 @@ namespace HighFive { /// \brief Types of property lists /// enum class PropertyType : int { - OBJECT_CREATE, FILE_CREATE, FILE_ACCESS, DATASET_CREATE, @@ -99,7 +98,6 @@ enum class PropertyType : int { DATATYPE_ACCESS, STRING_CREATE, ATTRIBUTE_CREATE, - OBJECT_COPY, LINK_CREATE, LINK_ACCESS, }; @@ -195,7 +193,6 @@ class PropertyList: public PropertyListBase { void _initializeIfNeeded(); }; -using ObjectCreateProps = PropertyList; using FileCreateProps = PropertyList; using FileAccessProps = PropertyList; using DataSetCreateProps = PropertyList; @@ -207,7 +204,6 @@ using DataTypeCreateProps = PropertyList; using DataTypeAccessProps = PropertyList; using StringCreateProps = PropertyList; using AttributeCreateProps = PropertyList; -using ObjectCopyProps = PropertyList; using LinkCreateProps = PropertyList; using LinkAccessProps = PropertyList; @@ -606,7 +602,6 @@ class CreateIntermediateGroup { public: explicit CreateIntermediateGroup(bool create = true); - explicit CreateIntermediateGroup(const ObjectCreateProps& ocpl); explicit CreateIntermediateGroup(const LinkCreateProps& lcpl); bool isSet() const; @@ -615,7 +610,6 @@ class CreateIntermediateGroup { void fromPropertyList(hid_t hid); private: - friend ObjectCreateProps; friend LinkCreateProps; void apply(hid_t hid) const; bool _create; diff --git a/include/highfive/bits/H5PropertyList_misc.hpp b/include/highfive/bits/H5PropertyList_misc.hpp index 1fa2101f2..cfeb7685d 100644 --- a/include/highfive/bits/H5PropertyList_misc.hpp +++ b/include/highfive/bits/H5PropertyList_misc.hpp @@ -17,8 +17,6 @@ inline hid_t convert_plist_type(PropertyType propertyType) { // The HP5_XXX are macros with function calls so we can't assign // them as the enum values switch (propertyType) { - case PropertyType::OBJECT_CREATE: - return H5P_OBJECT_CREATE; case PropertyType::FILE_CREATE: return H5P_FILE_CREATE; case PropertyType::FILE_ACCESS: @@ -41,8 +39,6 @@ inline hid_t convert_plist_type(PropertyType propertyType) { return H5P_STRING_CREATE; case PropertyType::ATTRIBUTE_CREATE: return H5P_ATTRIBUTE_CREATE; - case PropertyType::OBJECT_COPY: - return H5P_OBJECT_COPY; case PropertyType::LINK_CREATE: return H5P_LINK_CREATE; case PropertyType::LINK_ACCESS: @@ -390,11 +386,6 @@ inline double Caching::getW0() const { inline CreateIntermediateGroup::CreateIntermediateGroup(bool create) : _create(create) {} -inline CreateIntermediateGroup::CreateIntermediateGroup(const ObjectCreateProps& ocpl) { - fromPropertyList(ocpl.getId()); -} - - inline void CreateIntermediateGroup::apply(const hid_t hid) const { detail::h5p_set_create_intermediate_group(hid, _create ? 1 : 0); } From 96d2100501c879ace98406c0f9aad1d54956cefe Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 17 May 2024 14:14:32 +0200 Subject: [PATCH 89/97] Remove HIGHFIVE_PARALLEL_HDF5. (#1003) Choose parallel HDF5 is done via `HDF5_PREFER_PARALLEL`. Internally HighFive checks `HDF5_IS_PARALLEL`. --- .github/workflows/ci.yml | 2 +- tests/unit/CMakeLists.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d1495c177..68f5c741b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -76,7 +76,7 @@ jobs: - name: Build run: | - CMAKE_OPTIONS=(-DHIGHFIVE_PARALLEL_HDF5:BOOL=ON ${{ matrix.config.flags }}) + CMAKE_OPTIONS=(-DHDF5_PREFER_PARALLEL:BOOL=ON ${{ matrix.config.flags }}) source $GITHUB_WORKSPACE/.github/build.sh - name: Test diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 980fe077a..a62c70b61 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -14,7 +14,7 @@ foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_fiv catch_discover_tests(${test_name}) endforeach() -if(HIGHFIVE_PARALLEL_HDF5) +if(HDF5_IS_PARALLEL) set(tests_parallel_src "tests_high_five_parallel.cpp") ## parallel MPI tests From a5e35154f489e1197b49397fce0f025d93d8bc56 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 21 May 2024 08:59:40 +0200 Subject: [PATCH 90/97] Cleanup random Test/CMake related stuff. (#1004) * Test concepts. * No more travis.yml * Remove travis.yml * Improve README * Always do single includes. They're cheap, reliable and valuable, no point skipping them. If one's compiling the unit-tests. --- .github/workflows/ci.yml | 5 +- .travis.yml | 138 -------------------------------------- README.md | 2 +- tests/unit/CMakeLists.txt | 57 ++++++++-------- 4 files changed, 30 insertions(+), 172 deletions(-) delete mode 100644 .travis.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 68f5c741b..66069123d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,7 +57,7 @@ jobs: flags: '-DCMAKE_CXX_STANDARD=17 -DHIGHFIVE_TEST_BOOST:Bool=ON' - config: os: ubuntu-22.04 - flags: '-DHIGHFIVE_TEST_BOOST=Off -DCMAKE_CXX_STANDARD=20' + flags: '-DHIGHFIVE_TEST_BOOST=Off -DCMAKE_CXX_STANDARD=20 -DHIGHFIVE_HAS_CONCEPTS=On' steps: - uses: actions/checkout@v3 @@ -319,8 +319,6 @@ jobs: -DHIGHFIVE_TEST_EIGEN:BOOL=ON -DHIGHFIVE_TEST_XTENSOR:BOOL=ON -DHIGHFIVE_BUILD_DOCS:BOOL=FALSE - -DHIGHFIVE_TEST_SINGLE_INCLUDES=ON - -DCMAKE_CXX_FLAGS="-coverage -O0" -DCMAKE_CXX_STANDARD=${{matrix.cxxstd}} ) source $GITHUB_WORKSPACE/.github/build.sh @@ -368,7 +366,6 @@ jobs: -DHIGHFIVE_TEST_BOOST:BOOL=ON -DHIGHFIVE_TEST_EIGEN:BOOL=ON -DHIGHFIVE_TEST_XTENSOR:BOOL=ON - -DHIGHFIVE_TEST_SINGLE_INCLUDES=ON ) source $GITHUB_WORKSPACE/.github/build.sh diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index bc5d34081..000000000 --- a/.travis.yml +++ /dev/null @@ -1,138 +0,0 @@ -# Adapted from various sources, including: -# - Louis Dionne's Hana: https://github.com/ldionne/hana -# - Paul Fultz II's FIT: https://github.com/pfultz2/Fit -# - Eric Niebler's range-v3: https://github.com/ericniebler/range-v3 -# - Gabi Melman spdlog: https://github.com/gabime/spdlog - -sudo: required -language: cpp - -addons: &gcc7 - apt: - packages: - - g++-7 - - libboost-all-dev - - libhdf5-openmpi-dev - - libeigen3-dev - - ninja-build - sources: - - ubuntu-toolchain-r-test - -matrix: - include: - # Older linux (trusty) with default gcc - # Install serial hdf5 + build serial - - os: linux - dist: trusty - env: - - HIGHFIVE_USE_XTENSOR=False - - HIGHFIVE_USE_OPENCV=False - - HIGHFIVE_PARALLEL_HDF5=False - - IS_BASE_ENVIRON=1 - addons: - apt: - packages: - - libboost-all-dev - - libeigen3-dev - - libhdf5-serial-dev - - ninja-build - - # Linux gcc-7 - # Install parallel hdf5 + build parallel - - os: linux - dist: xenial - env: - - GCC_VERSION=7 - - HIGHFIVE_USE_XTENSOR=True - - HIGHFIVE_USE_OPENCV=False - - HIGHFIVE_PARALLEL_HDF5=True - addons: *gcc7 - - # Mac OSX XCode 10 - - os: osx - osx_image: xcode10.3 - env: - - HIGHFIVE_USE_XTENSOR=True - - HIGHFIVE_USE_OPENCV=True - - HIGHFIVE_PARALLEL_HDF5=False - - # Windows - - os: windows - env: - - HIGHFIVE_USE_XTENSOR=True - - HIGHFIVE_USE_OPENCV=True - - HIGHFIVE_PARALLEL_HDF5=False - -env: - global: - - MINCONDA_VERSION="latest" - - MINCONDA_LINUX="Linux-x86_64" - - MINCONDA_OSX="MacOSX-x86_64" - -install: - - export HOMEBREW_NO_AUTO_UPDATE=1 # for reproducibility, dont autoupdate - - - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then - MINCONDA_OS=$MINCONDA_LINUX; - elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then - if [ "$BREW_USE_LATEST" ]; then - brew update; - brew install hdf5; brew upgrade hdf5; - fi; - brew install boost hdf5 eigen ninja; - MINCONDA_OS=$MINCONDA_OSX; - fi - - - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then - export CMAKE_GENERATOR="Visual Studio 15 2017 Win64" ; - export TESTS_TARGET="RUN_TESTS"; - choco install --yes miniconda3 ; - source C:/Tools/miniconda3/Scripts/activate ; - else - export CMAKE_GENERATOR="Ninja" ; - export TESTS_TARGET="test"; - wget "http://repo.continuum.io/miniconda/Miniconda3-$MINCONDA_VERSION-$MINCONDA_OS.sh" -O miniconda.sh; - bash miniconda.sh -b -p $HOME/miniconda ; - source $HOME/miniconda/bin/activate; - hash -r ; - fi - - conda config --set always_yes yes --set changeps1 no - - conda update -q conda - - conda install -c conda-forge mamba - - if [[ "$HIGHFIVE_USE_XTENSOR" == "True" ]]; then - mamba install -c conda-forge xtl xsimd xtensor; - fi - - if [[ "$HIGHFIVE_USE_OPENCV" == "True" ]]; then - mamba install -c conda-forge libopencv opencv; - fi - - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then - mamba install -c conda-forge boost-cpp hdf5 eigen; - fi - -before_script: - - if [ -n "$GCC_VERSION" ]; then export CXX="g++-${GCC_VERSION}" CC="gcc-${GCC_VERSION}"; fi - - if [ -n "$CLANG_VERSION" ]; then export CXX="clang++-${CLANG_VERSION}" CC="clang-${CLANG_VERSION}"; fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then export CXX="clang++" CC="clang"; fi - - which $CXX - - which $CC - - $CXX --version - - cmake --version - -script: - - cd ${TRAVIS_BUILD_DIR} - - mkdir -p build && pushd build - - > - cmake --warn-uninitialized --debug-output - -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON - -DHIGHFIVE_TEST_SINGLE_INCLUDES:BOOL=ON - -DHIGHFIVE_PARALLEL_HDF5:BOOL=${HIGHFIVE_PARALLEL_HDF5} - -DHIGHFIVE_USE_EIGEN:BOOL=ON - -DHIGHFIVE_USE_XTENSOR:BOOL=${HIGHFIVE_USE_XTENSOR} - -DHIGHFIVE_USE_OPENCV:BOOL=${HIGHFIVE_USE_OPENCV} - -G "${CMAKE_GENERATOR}" ../ - - cmake --build . - - CTEST_OUTPUT_ON_FAILURE=1 cmake --build . --target ${TESTS_TARGET} - - popd - - if [ $IS_BASE_ENVIRON ]; then - bash tests/test_project_integration.sh; - fi diff --git a/README.md b/README.md index 8e04eb13c..b8d71b357 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ It integrates nicely with other CMake projects by defining (and exporting) a Hig ### Known flaws - HighFive is not thread-safe. At best it has the same limitations as the HDF5 library. However, HighFive objects modify their members without protecting these writes. Users have reported that HighFive is not thread-safe even when using the threadsafe HDF5 library, e.g., https://github.com/BlueBrain/HighFive/discussions/675. -- Eigen support in core HighFive is broken. See https://github.com/BlueBrain/HighFive/issues/532. H5Easy is not +- Eigen support in core HighFive was broken until v3.0. See https://github.com/BlueBrain/HighFive/issues/532. H5Easy was not affected. - The support of fixed length strings isn't ideal. diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index a62c70b61..a282da319 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -42,36 +42,35 @@ if(HDF5_IS_PARALLEL) set(_CATCH_DISCOVER_TESTS_SCRIPT "${original_catch_script}") endif() -option(HIGHFIVE_TEST_SINGLE_INCLUDES "Enable testing single includes" FALSE) +# Test that each public header is self-sufficient. This is done by +# creating a file for each header, that only includes the header. The +# test succeeds if it compiles. +file(GLOB public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include CONFIGURE_DEPENDS ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) +foreach(PUBLIC_HEADER ${public_headers}) + if(PUBLIC_HEADER STREQUAL "highfive/span.hpp" AND NOT HIGHFIVE_TEST_SPAN) + continue() + endif() -if(HIGHFIVE_TEST_SINGLE_INCLUDES) - file(GLOB public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include CONFIGURE_DEPENDS ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) - foreach(PUBLIC_HEADER ${public_headers}) - if(PUBLIC_HEADER STREQUAL "highfive/span.hpp" AND NOT HIGHFIVE_TEST_SPAN) - continue() - endif() + if(PUBLIC_HEADER STREQUAL "highfive/boost.hpp" AND NOT HIGHFIVE_TEST_BOOST) + continue() + endif() - if(PUBLIC_HEADER STREQUAL "highfive/boost.hpp" AND NOT HIGHFIVE_TEST_BOOST) - continue() - endif() + if(PUBLIC_HEADER STREQUAL "highfive/half_float.hpp" AND NOT HIGHFIVE_TEST_HALF_FLOAT) + continue() + endif() - if(PUBLIC_HEADER STREQUAL "highfive/half_float.hpp" AND NOT HIGHFIVE_TEST_HALF_FLOAT) - continue() - endif() + if(PUBLIC_HEADER STREQUAL "highfive/eigen.hpp" AND NOT HIGHFIVE_TEST_EIGEN) + continue() + endif() - if(PUBLIC_HEADER STREQUAL "highfive/eigen.hpp" AND NOT HIGHFIVE_TEST_EIGEN) - continue() - endif() - - get_filename_component(CLASS_NAME ${PUBLIC_HEADER} NAME_WE) - configure_file(tests_import_public_headers.cpp "tests_${CLASS_NAME}.cpp" @ONLY) - add_executable("tests_include_${CLASS_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/tests_${CLASS_NAME}.cpp") - target_link_libraries( - "tests_include_${CLASS_NAME}" PUBLIC - HighFive - HighFiveWarnings - HighFiveFlags - HighFiveOptionalDependencies - ) - endforeach() -endif() + get_filename_component(CLASS_NAME ${PUBLIC_HEADER} NAME_WE) + configure_file(tests_import_public_headers.cpp "tests_${CLASS_NAME}.cpp" @ONLY) + add_executable("tests_include_${CLASS_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/tests_${CLASS_NAME}.cpp") + target_link_libraries( + "tests_include_${CLASS_NAME}" PUBLIC + HighFive + HighFiveWarnings + HighFiveFlags + HighFiveOptionalDependencies + ) +endforeach() From c1e8b578bedf891104174d7bc3f58822fc657e91 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 21 May 2024 09:00:38 +0200 Subject: [PATCH 91/97] Update doxygen-awesome to v2.3.3 (#1007) --- doc/doxygen-awesome-css/doxygen-awesome.css | 22 +++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/doc/doxygen-awesome-css/doxygen-awesome.css b/doc/doxygen-awesome-css/doxygen-awesome.css index a44945b36..a2715e268 100644 --- a/doc/doxygen-awesome-css/doxygen-awesome.css +++ b/doc/doxygen-awesome-css/doxygen-awesome.css @@ -1046,7 +1046,7 @@ blockquote::after { blockquote p { margin: var(--spacing-small) 0 var(--spacing-medium) 0; } -.paramname { +.paramname, .paramname em { font-weight: 600; color: var(--primary-dark-color); } @@ -1096,7 +1096,7 @@ div.contents .toc { border: 0; border-left: 1px solid var(--separator-color); border-radius: 0; - background-color: transparent; + background-color: var(--page-background-color); box-shadow: none; position: sticky; top: var(--toc-sticky-top); @@ -1988,14 +1988,16 @@ hr { } .contents hr { - box-shadow: 100px 0 0 var(--separator-color), - -100px 0 0 var(--separator-color), - 500px 0 0 var(--separator-color), - -500px 0 0 var(--separator-color), - 1500px 0 0 var(--separator-color), - -1500px 0 0 var(--separator-color), - 2000px 0 0 var(--separator-color), - -2000px 0 0 var(--separator-color); + box-shadow: 100px 0 var(--separator-color), + -100px 0 var(--separator-color), + 500px 0 var(--separator-color), + -500px 0 var(--separator-color), + 900px 0 var(--separator-color), + -900px 0 var(--separator-color), + 1400px 0 var(--separator-color), + -1400px 0 var(--separator-color), + 1900px 0 var(--separator-color), + -1900px 0 var(--separator-color); } .contents img, .contents .center, .contents center, .contents div.image object { From adf6b05c2b0ab83bd0510a72da5bb2ea28dd01d0 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Tue, 21 May 2024 14:29:25 +0200 Subject: [PATCH 92/97] Move OpenCV support to core. (#1006) --- include/highfive/bits/H5Slice_traits.hpp | 12 +- include/highfive/bits/convert_size_vector.hpp | 31 ++++ include/highfive/experimental/opencv.hpp | 149 ++++++++++++++++++ tests/unit/CMakeLists.txt | 6 +- tests/unit/test_opencv.cpp | 59 +++++++ 5 files changed, 245 insertions(+), 12 deletions(-) create mode 100644 include/highfive/bits/convert_size_vector.hpp create mode 100644 include/highfive/experimental/opencv.hpp create mode 100644 tests/unit/test_opencv.cpp diff --git a/include/highfive/bits/H5Slice_traits.hpp b/include/highfive/bits/H5Slice_traits.hpp index 4d40b7797..65c0d39e0 100644 --- a/include/highfive/bits/H5Slice_traits.hpp +++ b/include/highfive/bits/H5Slice_traits.hpp @@ -13,6 +13,7 @@ #include "H5_definitions.hpp" #include "H5Utils.hpp" +#include "convert_size_vector.hpp" #include "../H5PropertyList.hpp" #include "h5s_wrapper.hpp" @@ -51,17 +52,6 @@ class ElementSet { friend class SliceTraits; }; -namespace detail { - -template -inline std::vector convertSizeVector(const std::vector& from) { - std::vector to(from.size()); - std::copy(from.cbegin(), from.cend(), to.begin()); - - return to; -} -} // namespace detail - inline std::vector toHDF5SizeVector(const std::vector& from) { return detail::convertSizeVector(from); } diff --git a/include/highfive/bits/convert_size_vector.hpp b/include/highfive/bits/convert_size_vector.hpp new file mode 100644 index 000000000..62a815ae5 --- /dev/null +++ b/include/highfive/bits/convert_size_vector.hpp @@ -0,0 +1,31 @@ +/* + * Copyright (c), 2017, Adrien Devresse + * Copyright (c), 2017-2024, BlueBrain Project, EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#pragma once + +#include + +namespace HighFive { +namespace detail { + +template +inline std::vector convertSizeVector(const It& begin, const It& end) { + std::vector to(static_cast(end - begin)); + std::copy(begin, end, to.begin()); + + return to; +} + +template +inline std::vector convertSizeVector(const std::vector& from) { + return convertSizeVector(from.cbegin(), from.cend()); +} + +} // namespace detail +} // namespace HighFive diff --git a/include/highfive/experimental/opencv.hpp b/include/highfive/experimental/opencv.hpp new file mode 100644 index 000000000..224160975 --- /dev/null +++ b/include/highfive/experimental/opencv.hpp @@ -0,0 +1,149 @@ +#pragma once + +#include "../bits/H5Inspector_decl.hpp" +#include "../H5Exception.hpp" + +#include + +#include "../bits/convert_size_vector.hpp" + +namespace HighFive { +namespace details { + + +template +struct inspector> { + using type = cv::Mat_; + using value_type = T; + using base_type = typename inspector::base_type; + using hdf5_type = base_type; + + static void assert_row_major(const type& type) { + // Documentation claims that Mat_ is always row-major. However, it + // could be padded. The steps/strides are in bytes. + int rank = type.dims; + size_t ld = sizeof(T); + for (int i = rank - 1; i >= 0; --i) { + if (static_cast(type.step[i]) != ld) { + throw DataSetException("Padded cv::Mat_ are not supported."); + } + + ld *= static_cast(type.size[i]); + } + } + + + static constexpr size_t min_ndim = 2 + inspector::min_ndim; + static constexpr size_t max_ndim = 1024 + inspector::max_ndim; + + // HighFive doesn't support padded OpenCV arrays. Therefore, pretend + // that they themselves are trivially copyable. And error out if the + // assumption is violated. + static constexpr bool is_trivially_copyable = std::is_trivially_copyable::value && + inspector::is_trivially_nestable; + static constexpr bool is_trivially_nestable = false; + + static size_t getRank(const type& val) { + if (val.empty()) { + return min_ndim; + + } else { + return static_cast(val.dims) + + inspector::getRank(getAnyElement(val)); + } + } + + static const T& getAnyElement(const type& val) { + return *reinterpret_cast(val.data); + } + + static T& getAnyElement(type& val) { + return *reinterpret_cast(val.data); + } + + static size_t getLocalRank(const type& val) { + return static_cast(val.dims); + } + + static std::vector getDimensions(const type& val) { + auto local_rank = getLocalRank(val); + auto rank = getRank(val); + std::vector dims(rank, 1ul); + + if (val.empty()) { + dims[0] = 0ul; + dims[1] = 1ul; + return dims; + } + + for (size_t i = 0; i < local_rank; ++i) { + dims[i] = static_cast(val.size[static_cast(i)]); + } + + auto s = inspector::getDimensions(getAnyElement(val)); + std::copy(s.cbegin(), s.cend(), dims.begin() + static_cast(local_rank)); + return dims; + } + + static void prepare(type& val, const std::vector& dims) { + auto subdims = detail::convertSizeVector(dims); + val.create(static_cast(subdims.size()), subdims.data()); + } + + static hdf5_type* data(type& val) { + assert_row_major(val); + + if (!is_trivially_copyable) { + throw DataSetException("Invalid used of `inspector>::data`."); + } + + if (val.empty()) { + return nullptr; + } + + return inspector::data(getAnyElement(val)); + } + + static const hdf5_type* data(const type& val) { + assert_row_major(val); + + if (!is_trivially_copyable) { + throw DataSetException("Invalid used of `inspector>::data`."); + } + + if (val.empty()) { + return nullptr; + } + + return inspector::data(getAnyElement(val)); + } + + static void serialize(const type& val, const std::vector& dims, hdf5_type* m) { + if (val.empty()) { + return; + } + + auto local_rank = val.dims; + auto subdims = std::vector(dims.begin() + local_rank, dims.end()); + auto subsize = compute_total_size(subdims); + for (auto it = val.begin(); it != val.end(); ++it) { + inspector::serialize(*it, subdims, m); + m += subsize; + } + } + + static void unserialize(const hdf5_type* vec_align, + const std::vector& dims, + type& val) { + auto local_rank = val.dims; + auto subdims = std::vector(dims.begin() + local_rank, dims.end()); + auto subsize = compute_total_size(subdims); + for (auto it = val.begin(); it != val.end(); ++it) { + inspector::unserialize(vec_align, subdims, *it); + vec_align += subsize; + } + } +}; + +} // namespace details +} // namespace HighFive diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index a282da319..0f795812d 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -6,7 +6,7 @@ if(MSVC) endif() ## Base tests -foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_empty_arrays test_legacy test_string) +foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_empty_arrays test_legacy test_opencv test_string) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings HighFiveFlags Catch2::Catch2WithMain) target_link_libraries(${test_name} HighFiveOptionalDependencies) @@ -63,6 +63,10 @@ foreach(PUBLIC_HEADER ${public_headers}) continue() endif() + if(PUBLIC_HEADER STREQUAL "highfive/opencv.hpp" AND NOT HIGHFIVE_TEST_OPENCV) + continue() + endif() + get_filename_component(CLASS_NAME ${PUBLIC_HEADER} NAME_WE) configure_file(tests_import_public_headers.cpp "tests_${CLASS_NAME}.cpp" @ONLY) add_executable("tests_include_${CLASS_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/tests_${CLASS_NAME}.cpp") diff --git a/tests/unit/test_opencv.cpp b/tests/unit/test_opencv.cpp new file mode 100644 index 000000000..b27c06daa --- /dev/null +++ b/tests/unit/test_opencv.cpp @@ -0,0 +1,59 @@ +/* + * Copyright (c), 2024, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ + +#if HIGHFIVE_TEST_OPENCV + +#include +#include +#include + +#include +#include +#include "tests_high_five.hpp" +#include "create_traits.hpp" + +using namespace HighFive; +using Catch::Matchers::Equals; + +TEST_CASE("OpenCV") { + auto file = File("rw_opencv.h5", File::Truncate); + + auto a = cv::Mat_(3, 5); + auto dset = file.createDataSet("a", a); + auto b = dset.read>(); + REQUIRE(a(0, 0) == b(0, 0)); + + auto va = std::vector>(7, cv::Mat_(3, 5)); + auto vdset = file.createDataSet("va", va); + auto vb = vdset.read>>(); + REQUIRE(vb.size() == va.size()); + REQUIRE(vb[0](0, 0) == va[0](0, 0)); +} + +TEST_CASE("OpenCV subarrays") { + auto file = File("rw_opencv_subarray.h5", File::Truncate); + + auto a = cv::Mat_(3, 13); + + SECTION("write") { + auto sa = cv::Mat_(a.colRange(1, 4)); + REQUIRE_THROWS(file.createDataSet("a", sa)); + } + + SECTION("read") { + auto b = cv::Mat_(3, 17); + auto sb = cv::Mat_(a.colRange(0, 13)); + auto dset = file.createDataSet("a", a); + + // Creates a new `Mat_` in `sb`. + dset.read(sb); + } +} + +#endif From ce46f866755bb0452243644fbb57c6d3d331a073 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 23 May 2024 09:09:48 +0200 Subject: [PATCH 93/97] Make `~Object` protected. (#1009) Since deleting HighFive objects through their common base class `Object` is not supported, we should make the dtor protected. See Core Guidelines C35, e.g. https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Rc-dtor-virtual --- include/highfive/H5Object.hpp | 26 +++----------------- include/highfive/bits/H5Node_traits.hpp | 3 --- include/highfive/bits/H5Node_traits_misc.hpp | 16 +++++------- include/highfive/bits/H5Object_misc.hpp | 6 ----- include/highfive/bits/h5o_wrapper.hpp | 9 +++++++ 5 files changed, 18 insertions(+), 42 deletions(-) diff --git a/include/highfive/H5Object.hpp b/include/highfive/H5Object.hpp index 4cf4e7de0..b6058bf78 100644 --- a/include/highfive/H5Object.hpp +++ b/include/highfive/H5Object.hpp @@ -31,34 +31,12 @@ enum class ObjectType { Other // Internal/custom object type }; -namespace detail { -/// \brief Internal hack to create an `Object` from an ID. -/// -/// WARNING: Creating an Object from an ID has implications w.r.t. the lifetime of the object -/// that got passed via its ID. Using this method careless opens up the suite of issues -/// related to C-style resource management, including the analog of double free, dangling -/// pointers, etc. -/// -/// NOTE: This is not part of the API and only serves to work around a compiler issue in GCC which -/// prevents us from using `friend`s instead. This function should only be used for internal -/// purposes. The problematic construct is: -/// -/// template -/// friend class SomeCRTP; -/// -/// \private -Object make_object(hid_t hid); -} // namespace detail - class Object { public: // move constructor, reuse hid Object(Object&& other) noexcept; - // decrease reference counter - ~Object(); - /// /// \brief isValid /// \return true if current Object is a valid HDF5Object @@ -99,13 +77,15 @@ class Object { // Init with an low-level object id explicit Object(hid_t); + // decrease reference counter + ~Object(); + // Copy-Assignment operator Object& operator=(const Object& other); hid_t _hid; private: - friend Object detail::make_object(hid_t); friend class Reference; friend class CompoundType; diff --git a/include/highfive/bits/H5Node_traits.hpp b/include/highfive/bits/H5Node_traits.hpp index 56d9f8d3a..8076bd9b4 100644 --- a/include/highfive/bits/H5Node_traits.hpp +++ b/include/highfive/bits/H5Node_traits.hpp @@ -217,9 +217,6 @@ class NodeTraits { // It makes behavior consistent among versions and by default transforms // errors to exceptions bool _exist(const std::string& node_name, bool raise_errors = true) const; - - // Opens an arbitrary object to obtain info - Object _open(const std::string& node_name) const; }; diff --git a/include/highfive/bits/H5Node_traits_misc.hpp b/include/highfive/bits/H5Node_traits_misc.hpp index 49cfc639d..b26257779 100644 --- a/include/highfive/bits/H5Node_traits_misc.hpp +++ b/include/highfive/bits/H5Node_traits_misc.hpp @@ -254,7 +254,12 @@ inline LinkType NodeTraits::getLinkType(const std::string& node_name) template inline ObjectType NodeTraits::getObjectType(const std::string& node_name) const { - return _open(node_name).getType(); + const auto id = detail::h5o_open(static_cast(this)->getId(), + node_name.c_str(), + H5P_DEFAULT); + auto object_type = _convert_object_type(detail::h5i_get_type(id)); + detail::h5o_close(id); + return object_type; } @@ -314,13 +319,4 @@ inline void NodeTraits::createHardLink(const std::string& link_name, } -template -inline Object NodeTraits::_open(const std::string& node_name) const { - const auto id = detail::h5o_open(static_cast(this)->getId(), - node_name.c_str(), - H5P_DEFAULT); - return detail::make_object(id); -} - - } // namespace HighFive diff --git a/include/highfive/bits/H5Object_misc.hpp b/include/highfive/bits/H5Object_misc.hpp index c5a1f3999..eefddc1ed 100644 --- a/include/highfive/bits/H5Object_misc.hpp +++ b/include/highfive/bits/H5Object_misc.hpp @@ -15,12 +15,6 @@ #include "h5i_wrapper.hpp" namespace HighFive { -namespace detail { -inline Object make_object(hid_t hid) { - return Object(hid); -} -} // namespace detail - inline Object::Object() : _hid(H5I_INVALID_HID) {} diff --git a/include/highfive/bits/h5o_wrapper.hpp b/include/highfive/bits/h5o_wrapper.hpp index 75b91bb6a..df97c3ca1 100644 --- a/include/highfive/bits/h5o_wrapper.hpp +++ b/include/highfive/bits/h5o_wrapper.hpp @@ -15,5 +15,14 @@ inline hid_t h5o_open(hid_t loc_id, const char* name, hid_t lapl_id) { return hid; } +inline herr_t h5o_close(hid_t id) { + herr_t err = H5Oclose(id); + if (err < 0) { + HDF5ErrMapper::ToException("Unable to close object."); + } + + return err; +} + } // namespace detail } // namespace HighFive From 7498b80841bc9d4953f233ebec6e175857948487 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Thu, 23 May 2024 20:18:02 +0200 Subject: [PATCH 94/97] Don't hard code CMAKE_CXX_STANDARD. (#1010) --- CMakeLists.txt | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a1c5a120c..bd40b66dc 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -77,15 +77,16 @@ mark_as_advanced(HIGHFIVE_SANITIZER) # Check compiler cxx_std requirements # ----------------------------------- +set(HIGHFIVE_CXX_STANDARD_DEFAULT 14) if(NOT DEFINED CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 14) + set(CMAKE_CXX_STANDARD ${HIGHFIVE_CXX_STANDARD_DEFAULT}) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) endif() -if(CMAKE_CXX_STANDARD EQUAL 98 OR CMAKE_CXX_STANDARD LESS 14) - message(FATAL_ERROR "HighFive needs to be compiled with at least C++14") +if(CMAKE_CXX_STANDARD EQUAL 98 OR CMAKE_CXX_STANDARD LESS ${HIGHFIVE_CXX_STANDARD_DEFAULT}) + message(FATAL_ERROR "HighFive needs to be compiled with at least C++${HIGHFIVE_CXX_STANDARD_DEFAULT}") endif() add_compile_definitions(HIGHFIVE_CXX_STD=${CMAKE_CXX_STANDARD}) From 5f3ded67b4a9928f4b9b5f691bc0a60aade32232 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Fri, 24 May 2024 07:39:32 +0200 Subject: [PATCH 95/97] Implement XTensor support in core. (#976) * Implement XTensor support in core. Adds support for `xt::xtensor`, `xt::xarray` and `xt::xview`, both row and column major. This works by wrapping the internal row-major with `xt::adapt`. Therefore, the `T` in `xt::xtensor` must be scalar (trivial). * In tests/examples: Define `NOMINMAX` for Windows. --- include/highfive/xtensor.hpp | 212 +++++++++++++++++++++++++++++++++ tests/unit/CMakeLists.txt | 8 +- tests/unit/data_generator.hpp | 99 ++++++++++++++- tests/unit/supported_types.hpp | 24 ++++ tests/unit/test_xtensor.cpp | 142 ++++++++++++++++++++++ tests/unit/tests_high_five.hpp | 1 + 6 files changed, 480 insertions(+), 6 deletions(-) create mode 100644 include/highfive/xtensor.hpp create mode 100644 tests/unit/test_xtensor.cpp diff --git a/include/highfive/xtensor.hpp b/include/highfive/xtensor.hpp new file mode 100644 index 000000000..729a1e349 --- /dev/null +++ b/include/highfive/xtensor.hpp @@ -0,0 +1,212 @@ +#pragma once + +#include "bits/H5Inspector_decl.hpp" +#include "H5Exception.hpp" + +#include +#include +#include + +namespace HighFive { +namespace details { + +template +struct xtensor_get_rank; + +template +struct xtensor_get_rank> { + static constexpr size_t value = N; +}; + +template +struct xtensor_get_rank> { + static constexpr size_t value = N; +}; + +template +struct xtensor_inspector_base { + using type = XTensorType; + using value_type = typename type::value_type; + using base_type = typename inspector::base_type; + using hdf5_type = base_type; + + static_assert(std::is_same::value, + "HighFive's XTensor support only works for scalar elements."); + + static constexpr bool IsConstExprRowMajor = L == xt::layout_type::row_major; + static constexpr bool is_trivially_copyable = IsConstExprRowMajor && + std::is_trivially_copyable::value && + inspector::is_trivially_copyable; + + static constexpr bool is_trivially_nestable = false; + + static size_t getRank(const type& val) { + // Non-scalar elements are not supported. + return val.shape().size(); + } + + static const value_type& getAnyElement(const type& val) { + return val.unchecked(0); + } + + static value_type& getAnyElement(type& val) { + return val.unchecked(0); + } + + static std::vector getDimensions(const type& val) { + auto shape = val.shape(); + return {shape.begin(), shape.end()}; + } + + static void prepare(type& val, const std::vector& dims) { + val.resize(Derived::shapeFromDims(dims)); + } + + static hdf5_type* data(type& val) { + if (!is_trivially_copyable) { + throw DataSetException("Invalid used of `inspector::data`."); + } + + if (val.size() == 0) { + return nullptr; + } + + return inspector::data(getAnyElement(val)); + } + + static const hdf5_type* data(const type& val) { + if (!is_trivially_copyable) { + throw DataSetException("Invalid used of `inspector::data`."); + } + + if (val.size() == 0) { + return nullptr; + } + + return inspector::data(getAnyElement(val)); + } + + static void serialize(const type& val, const std::vector& dims, hdf5_type* m) { + // since we only support scalar types we know all dims belong to us. + size_t size = compute_total_size(dims); + xt::adapt(m, size, xt::no_ownership(), dims) = val; + } + + static void unserialize(const hdf5_type* vec_align, + const std::vector& dims, + type& val) { + // since we only support scalar types we know all dims belong to us. + size_t size = compute_total_size(dims); + val = xt::adapt(vec_align, size, xt::no_ownership(), dims); + } +}; + +template +struct xtensor_inspector + : public xtensor_inspector_base, XTensorType, L> { + private: + using super = xtensor_inspector_base, XTensorType, L>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; + + static constexpr size_t ndim = xtensor_get_rank::value; + static constexpr size_t min_ndim = ndim + inspector::min_ndim; + static constexpr size_t max_ndim = ndim + inspector::max_ndim; + + static std::array shapeFromDims(const std::vector& dims) { + std::array shape; + std::copy(dims.cbegin(), dims.cend(), shape.begin()); + return shape; + } +}; + +template +struct xarray_inspector + : public xtensor_inspector_base, XArrayType, L> { + private: + using super = xtensor_inspector_base, XArrayType, L>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; + + static constexpr size_t min_ndim = 0 + inspector::min_ndim; + static constexpr size_t max_ndim = 1024 + inspector::max_ndim; + + static const std::vector& shapeFromDims(const std::vector& dims) { + return dims; + } +}; + +template +struct inspector>: public xtensor_inspector, L> { + private: + using super = xtensor_inspector, L>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; +}; + +template +struct inspector>: public xarray_inspector, L> { + private: + using super = xarray_inspector, L>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; +}; + +template +struct inspector> + : public xarray_inspector, xt::layout_type::any> { + private: + using super = xarray_inspector, xt::layout_type::any>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; +}; + + +template +struct inspector> + : public xarray_inspector, xt::layout_type::any> { + private: + using super = xarray_inspector, xt::layout_type::any>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; +}; + +template +struct inspector> + : public xtensor_inspector, xt::layout_type::any> { + private: + using super = xtensor_inspector, xt::layout_type::any>; + + public: + using type = typename super::type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; + using hdf5_type = typename super::hdf5_type; +}; + +} // namespace details +} // namespace HighFive diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 0f795812d..c8835ba34 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -6,7 +6,7 @@ if(MSVC) endif() ## Base tests -foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_empty_arrays test_legacy test_opencv test_string) +foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type test_empty_arrays test_legacy test_opencv test_string test_xtensor) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings HighFiveFlags Catch2::Catch2WithMain) target_link_libraries(${test_name} HighFiveOptionalDependencies) @@ -47,7 +47,7 @@ endif() # test succeeds if it compiles. file(GLOB public_headers LIST_DIRECTORIES false RELATIVE ${PROJECT_SOURCE_DIR}/include CONFIGURE_DEPENDS ${PROJECT_SOURCE_DIR}/include/highfive/*.hpp) foreach(PUBLIC_HEADER ${public_headers}) - if(PUBLIC_HEADER STREQUAL "highfive/span.hpp" AND NOT HIGHFIVE_TEST_SPAN) + if(PUBLIC_HEADER STREQUAL "highfive/span.hpp" AND NOT HIGHFIVE_TEST_SPAN) continue() endif() @@ -67,6 +67,10 @@ foreach(PUBLIC_HEADER ${public_headers}) continue() endif() + if(PUBLIC_HEADER STREQUAL "highfive/xtensor.hpp" AND NOT HIGHFIVE_TEST_XTENSOR) + continue() + endif() + get_filename_component(CLASS_NAME ${PUBLIC_HEADER} NAME_WE) configure_file(tests_import_public_headers.cpp "tests_${CLASS_NAME}.cpp" @ONLY) add_executable("tests_include_${CLASS_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/tests_${CLASS_NAME}.cpp") diff --git a/tests/unit/data_generator.hpp b/tests/unit/data_generator.hpp index a50284d5c..d513c3420 100644 --- a/tests/unit/data_generator.hpp +++ b/tests/unit/data_generator.hpp @@ -21,11 +21,16 @@ #include #endif +#ifdef HIGHFIVE_TEST_XTENSOR +#include +#endif + namespace HighFive { namespace testing { -std::vector lstrip(const std::vector& indices, size_t n) { +template +std::vector lstrip(const Dims& indices, size_t n) { std::vector subindices(indices.size() - n); for (size_t i = 0; i < subindices.size(); ++i) { subindices[i] = indices[i + n]; @@ -34,7 +39,8 @@ std::vector lstrip(const std::vector& indices, size_t n) { return subindices; } -size_t ravel(std::vector& indices, const std::vector dims) { +template +size_t ravel(std::vector& indices, const Dims& dims) { size_t rank = dims.size(); size_t linear_index = 0; size_t ld = 1; @@ -47,7 +53,8 @@ size_t ravel(std::vector& indices, const std::vector dims) { return linear_index; } -std::vector unravel(size_t flat_index, const std::vector dims) { +template +std::vector unravel(size_t flat_index, const Dims& dims) { size_t rank = dims.size(); size_t ld = 1; std::vector indices(rank); @@ -60,7 +67,8 @@ std::vector unravel(size_t flat_index, const std::vector dims) { return indices; } -static size_t flat_size(const std::vector& dims) { +template +static size_t flat_size(const Dims& dims) { size_t n = 1; for (auto d: dims) { n *= d; @@ -388,6 +396,7 @@ struct ContainerTraits> { #endif +// -- Eigen ------------------------------------------------------------------- #if HIGHFIVE_TEST_EIGEN template @@ -525,6 +534,88 @@ struct ContainerTraits> }; +#endif + +// -- XTensor ----------------------------------------------------------------- + +#if HIGHFIVE_TEST_XTENSOR +template +struct XTensorContainerTraits { + using container_type = XTensorType; + using value_type = typename container_type::value_type; + using base_type = typename ContainerTraits::base_type; + + static constexpr size_t rank = Rank; + static constexpr bool is_view = ContainerTraits::is_view; + + static void set(container_type& array, + const std::vector& indices, + const base_type& value) { + std::vector local_indices(indices.begin(), indices.begin() + rank); + return ContainerTraits::set(array[local_indices], lstrip(indices, rank), value); + } + + static base_type get(const container_type& array, const std::vector& indices) { + std::vector local_indices(indices.begin(), indices.begin() + rank); + return ContainerTraits::get(array[local_indices], lstrip(indices, rank)); + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector& dims) { + const auto& local_dims = details::inspector::shapeFromDims(dims); + auto array = container_type(local_dims); + + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + auto element = ContainerTraits::allocate(lstrip(dims, rank)); + set(array, unravel(i, local_dims), element); + } + + return array; + } + + static void deallocate(container_type& array, const std::vector& dims) { + auto local_dims = std::vector(dims.begin(), dims.begin() + rank); + size_t n_elements = flat_size(local_dims); + for (size_t i_flat = 0; i_flat < n_elements; ++i_flat) { + auto indices = unravel(i_flat, local_dims); + std::vector local_indices(indices.begin(), indices.begin() + rank); + ContainerTraits::deallocate(array[local_indices], lstrip(dims, rank)); + } + } + + static void sanitize_dims(std::vector& dims, size_t axis) { + ContainerTraits::sanitize_dims(dims, axis + rank); + } +}; + +template +struct ContainerTraits> + : public XTensorContainerTraits, rank> { + private: + using super = XTensorContainerTraits, rank>; + + public: + using container_type = typename super::container_type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; +}; + +template +struct ContainerTraits> + : public XTensorContainerTraits, 2> { + private: + using super = XTensorContainerTraits, 2>; + + public: + using container_type = typename super::container_type; + using value_type = typename super::value_type; + using base_type = typename super::base_type; +}; + #endif template diff --git a/tests/unit/supported_types.hpp b/tests/unit/supported_types.hpp index 4d703949d..5f1bf6453 100644 --- a/tests/unit/supported_types.hpp +++ b/tests/unit/supported_types.hpp @@ -82,6 +82,20 @@ struct EigenMapMatrix { }; #endif +#ifdef HIGHFIVE_TEST_XTENSOR +template +struct XTensor { + template + using type = xt::xtensor, rank, layout>; +}; + +template +struct XArray { + template + using type = xt::xarray, layout>; +}; +#endif + template struct ContainerProduct; @@ -165,6 +179,16 @@ using supported_array_types = typename ConcatenateTuples< typename ContainerProduct>, some_scalar_types>::type, typename ContainerProduct>, some_scalar_types>::type, typename ContainerProduct>, some_scalar_types>::type, +#endif +#ifdef HIGHFIVE_TEST_XTENSOR + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct>, scalar_types_eigen>::type, + typename ContainerProduct>, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, + typename ContainerProduct>, scalar_types_eigen>::type, + typename ContainerProduct>, scalar_types_eigen>::type, + typename ContainerProduct, scalar_types_eigen>::type, #endif typename ContainerProduct, all_scalar_types>::type, typename ContainerProduct>, some_scalar_types>::type, diff --git a/tests/unit/test_xtensor.cpp b/tests/unit/test_xtensor.cpp new file mode 100644 index 000000000..ac0b4d743 --- /dev/null +++ b/tests/unit/test_xtensor.cpp @@ -0,0 +1,142 @@ +/* + * Copyright (c), 2024, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#if HIGHFIVE_TEST_XTENSOR +#include +#include + +#include + +#include +#include +#include +#include +#include + +#include "data_generator.hpp" + +using namespace HighFive; + +template +std::array asStaticShape(const std::vector& dims) { + assert(dims.size() == N); + + std::array shape; + std::copy(dims.cbegin(), dims.cend(), shape.begin()); + + return shape; +} + +TEST_CASE("xt::xarray reshape", "[xtensor]") { + const std::string file_name("rw_dataset_xarray.h5"); + + File file(file_name, File::Truncate); + + std::vector shape{3, 2, 4}; + std::vector compatible_shape{1, 3, 2, 4}; + std::vector incompatible_shape{5, 2, 4}; + + xt::xarray a = testing::DataGenerator>::create(shape); + xt::xarray b(compatible_shape); + xt::xarray c(incompatible_shape); + + auto dset = file.createDataSet("baz", a); + + SECTION("xarray_adaptor") { + // Changes the shape. + auto b_adapt = xt::adapt(b.data(), b.size(), xt::no_ownership(), b.shape()); + dset.read(b_adapt); + REQUIRE(b_adapt.shape() == shape); + + // But can't change the number of elements. + auto c_adapt = xt::adapt(c.data(), c.size(), xt::no_ownership(), c.shape()); + REQUIRE_THROWS(dset.read(c_adapt)); + } + + SECTION("xtensor_adaptor") { + auto b_shape = asStaticShape<4>(compatible_shape); + auto c_shape = asStaticShape<3>(incompatible_shape); + + // Doesn't change the shape: + auto b_adapt = xt::adapt(b.data(), b.size(), xt::no_ownership(), b_shape); + REQUIRE_THROWS(dset.read(b_adapt)); + + // and can't change the number of elements: + auto c_adapt = xt::adapt(c.data(), c.size(), xt::no_ownership(), c_shape); + REQUIRE_THROWS(dset.read(c_adapt)); + } +} + +TEST_CASE("xt::xview example", "[xtensor]") { + File file("rw_dataset_xview.h5", File::Truncate); + + std::vector shape{13, 5, 7}; + xt::xarray a = testing::DataGenerator>::create(shape); + auto c = xt::view(a, xt::range(3, 31, 4), xt::all(), xt::drop(0, 3, 4, 5)); + + auto dset = file.createDataSet("c", c); + auto d = dset.read>(); + auto e = dset.read>(); + + REQUIRE(d == c); + REQUIRE(e == c); +} + +template +void check_xtensor_scalar(File& file) { + XTensor a; + a = 42.0; + REQUIRE(a.shape() == std::vector{}); + + SECTION("read") { + auto dset = file.createDataSet("a", a); + REQUIRE(dset.template read() == a(0)); + } + + SECTION("write") { + double b = -42.0; + auto dset = file.createDataSet("b", b); + REQUIRE(dset.template read>()(0) == b); + } +} + +TEST_CASE("xt::xarray scalar", "[xtensor]") { + File file("rw_dataset_xarray_scalar.h5", File::Truncate); + check_xtensor_scalar>(file); +} + +TEST_CASE("xt::xtensor scalar", "[xtensor]") { + File file("rw_dataset_xtensor_scalar.h5", File::Truncate); + check_xtensor_scalar>(file); +} + +template +void check_xtensor_empty(File& file, const XTensor& a, const std::vector& expected_dims) { + auto dset = file.createDataSet("a", a); + auto b = dset.template read(); + REQUIRE(b.size() == 0); + REQUIRE(b == a); + + auto c = std::vector{}; + auto c_shape = details::inspector::getDimensions(c); + REQUIRE(c_shape == expected_dims); +} + +TEST_CASE("xt::xtensor empty", "[xtensor]") { + File file("rw_dataset_xtensor_empty.h5", File::Truncate); + xt::xtensor a({0, 1, 1}); + check_xtensor_empty(file, a, {0, 1, 1, 1}); +} + +TEST_CASE("xt::xarray empty", "[xtensor]") { + File file("rw_dataset_xarray_empty.h5", File::Truncate); + xt::xarray a(std::vector{1, 0, 1}); + check_xtensor_empty(file, a, {0}); +} + +#endif diff --git a/tests/unit/tests_high_five.hpp b/tests/unit/tests_high_five.hpp index 25839c69e..d9a4ed34d 100644 --- a/tests/unit/tests_high_five.hpp +++ b/tests/unit/tests_high_five.hpp @@ -21,6 +21,7 @@ // The list of identifiers is taken from `Boost::Predef`. #if defined(_WIN32) || defined(_WIN64) || defined(__WIN32__) || defined(__TOS_WIN__) || \ defined(__WINDOWS__) +#define NOMINMAX #include #endif From dd1a39625709a10756b948c263d6bdbbe2341b52 Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 10 Jun 2024 09:37:52 +0200 Subject: [PATCH 96/97] Fix CMake code. (#1015) Includes changes: * an inconsistency in `HIGHFIVE_MAX_ERRORS`, * less deeply nest `if-if` statements, * removes debug output. --- CMakeLists.txt | 2 +- cmake/HighFiveWarnings.cmake | 44 +++++++++++++++++++----------------- doc/developer_guide.md | 2 +- src/examples/CMakeLists.txt | 1 - 4 files changed, 25 insertions(+), 24 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index bd40b66dc..2358e4172 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -68,7 +68,7 @@ option(HIGHFIVE_TEST_HALF_FLOAT "Enable half-precision floats" OFF) # TODO remove entirely. option(HIGHFIVE_HAS_CONCEPTS "Print readable compiler errors w/ C++20 concepts" OFF) -set(HIGHFIVE_MAX_ERROR 0 "Maximum number of compiler errors.") +set(HIGHFIVE_MAX_ERRORS 0 CACHE STRING "Maximum number of compiler errors.") option(HIGHFIVE_HAS_WERROR "Convert warnings to errors." OFF) option(HIGHFIVE_GLIBCXX_ASSERTIONS "Enable bounds check for STL." OFF) # TODO these some magic to get a drop down menu in ccmake diff --git a/cmake/HighFiveWarnings.cmake b/cmake/HighFiveWarnings.cmake index 3f569d5d5..a1dee19dc 100644 --- a/cmake/HighFiveWarnings.cmake +++ b/cmake/HighFiveWarnings.cmake @@ -24,33 +24,35 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" -Wconversion -Wsign-conversion ) +endif() + +if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU") + target_compile_options(HighFiveWarnings + INTERFACE + -Wpedantic + -Wcast-align + -Wdouble-promotion + ) + + target_compile_options(HighFiveWarnings + INTERFACE + -ftemplate-backtrace-limit=0 + ) - if(NOT CMAKE_CXX_COMPILER_ID MATCHES "Intel") + if(HIGHFIVE_HAS_WERROR) target_compile_options(HighFiveWarnings INTERFACE - -Wpedantic - -Wcast-align - -Wdouble-promotion + -Werror + -Wno-error=deprecated-declarations ) + endif() +endif() - target_compile_options(HighFiveWarnings +if(CMAKE_CXX_COMPILER_ID MATCHES "GNU") + if(HIGHFIVE_MAX_ERRORS) + target_compile_options(HighFiveFlags INTERFACE - -ftemplate-backtrace-limit=0 + -fmax-errors=${HIGHFIVE_MAX_ERRORS} ) - - if(HIGHFIVE_MAX_ERRORS) - target_compile_options(HighFiveFlags - INTERFACE - -fmax-errors=${HIGHFIVE_MAX_ERRORS} - ) - endif() - - if(HIGHFIVE_HAS_WERROR) - target_compile_options(HighFiveWarnings - INTERFACE - -Werror - -Wno-error=deprecated-declarations - ) - endif() endif() endif() diff --git a/doc/developer_guide.md b/doc/developer_guide.md index 90867ca12..0f7af3e01 100644 --- a/doc/developer_guide.md +++ b/doc/developer_guide.md @@ -25,7 +25,7 @@ ctest --test-dir build You might want to add: * `-DHIGHFIVE_TEST_BOOST=On` or other optional dependencies on, -* `-DHIGHFIVE_MAX_ERROR=3` to only show the first three errors. +* `-DHIGHFIVE_MAX_ERRORS=3` to only show the first three errors. Generic CMake reminders: * `-DCMAKE_INSTALL_PREFIX` defines where HighFive will be installed, diff --git a/src/examples/CMakeLists.txt b/src/examples/CMakeLists.txt index 778aac91f..5f4b4edf8 100644 --- a/src/examples/CMakeLists.txt +++ b/src/examples/CMakeLists.txt @@ -57,7 +57,6 @@ set(half_float_examples function(compile_example example_source) get_filename_component(example_filename ${example_source} NAME) string(REPLACE ".cpp" "_bin" example_name ${example_filename}) - message("example_name: ${example_name}") add_executable(${example_name} ${example_source}) target_link_libraries(${example_name} PUBLIC HighFive HighFiveWarnings HighFiveFlags) From b74fabbea7bc4422a6891b2f5a27305b8dd8cd1b Mon Sep 17 00:00:00 2001 From: Luc Grosheintz Date: Mon, 10 Jun 2024 09:38:05 +0200 Subject: [PATCH 97/97] Clean up copy-and-move pattern. (#1017) --- include/highfive/bits/H5Slice_traits.hpp | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/include/highfive/bits/H5Slice_traits.hpp b/include/highfive/bits/H5Slice_traits.hpp index 65c0d39e0..e2b481a36 100644 --- a/include/highfive/bits/H5Slice_traits.hpp +++ b/include/highfive/bits/H5Slice_traits.hpp @@ -63,10 +63,10 @@ inline std::vector toSTLSizeVector(const std::vector& from) { struct RegularHyperSlab { RegularHyperSlab() = default; - RegularHyperSlab(std::vector offset_, - std::vector count_ = {}, - std::vector stride_ = {}, - std::vector block_ = {}) + RegularHyperSlab(const std::vector& offset_, + const std::vector& count_ = {}, + const std::vector& stride_ = {}, + const std::vector& block_ = {}) : offset(toHDF5SizeVector(offset_)) , count(toHDF5SizeVector(count_)) , stride(toHDF5SizeVector(stride_)) @@ -77,10 +77,10 @@ struct RegularHyperSlab { std::vector stride_ = {}, std::vector block_ = {}) { RegularHyperSlab slab; - slab.offset = offset_; - slab.count = count_; - slab.stride = stride_; - slab.block = block_; + slab.offset = std::move(offset_); + slab.count = std::move(count_); + slab.stride = std::move(stride_); + slab.block = std::move(block_); return slab; }