diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake index 200634e1cb3..3a1a0de3a78 100644 --- a/CMakeFilters.cmake +++ b/CMakeFilters.cmake @@ -9,7 +9,7 @@ # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # -option (USE_LIBAEC "Use AEC library as SZip Filter" OFF) +option (USE_LIBAEC "Use AEC library as SZip Filter" ON) option (USE_LIBAEC_STATIC "Use static AEC library " OFF) option (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0) option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 1fb77141240..71186ffc404 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -390,7 +390,9 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) set(CPACK_WIX_PROPERTY_ARPURLINFOABOUT "${HDF5_PACKAGE_URL}") set(CPACK_WIX_PROPERTY_ARPHELPLINK "${HDF5_PACKAGE_BUGREPORT}") if (BUILD_SHARED_LIBS) - set(CPACK_WIX_PATCH_FILE "${HDF_RESOURCES_DIR}/patch.xml") + set (WIX_CMP_NAME "${HDF5_LIB_NAME}${CMAKE_DEBUG_POSTFIX}") + configure_file (${HDF_RESOURCES_DIR}/patch.xml.in ${HDF5_BINARY_DIR}/patch.xml @ONLY) + set(CPACK_WIX_PATCH_FILE "${HDF5_BINARY_DIR}/patch.xml") endif () elseif (APPLE) list (APPEND CPACK_GENERATOR "STGZ") diff --git a/config/cmake/HDFFortranCompilerFlags.cmake b/config/cmake/HDFFortranCompilerFlags.cmake index 838211f3422..65a1cdc677e 100644 --- a/config/cmake/HDFFortranCompilerFlags.cmake +++ b/config/cmake/HDFFortranCompilerFlags.cmake @@ -58,7 +58,7 @@ if (NOT MSVC AND NOT MINGW) # General flags if (CMAKE_Fortran_COMPILER_ID STREQUAL "Intel") ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/intel-warnings/ifort-general") - list (APPEND HDF5_CMAKE_Fortran_FLAGS "-stand:f03" "-free") + list (APPEND HDF5_CMAKE_Fortran_FLAGS "-free") elseif (CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-general") if (HDF5_ENABLE_DEV_WARNINGS) diff --git a/config/cmake/hdf5-config.cmake.in b/config/cmake/hdf5-config.cmake.in index 1a3fb7bbf2f..496d2607db2 100644 --- a/config/cmake/hdf5-config.cmake.in +++ b/config/cmake/hdf5-config.cmake.in @@ -38,12 +38,13 @@ set (${HDF5_PACKAGE_NAME}_BUILD_CPP_LIB @HDF5_BUILD_CPP_LIB@) set (${HDF5_PACKAGE_NAME}_BUILD_JAVA @HDF5_BUILD_JAVA@) set (${HDF5_PACKAGE_NAME}_BUILD_TOOLS @HDF5_BUILD_TOOLS@) set (${HDF5_PACKAGE_NAME}_BUILD_HL_LIB @HDF5_BUILD_HL_LIB@) -set (${HDF5_PACKAGE_NAME}_BUILD_HL_TOOLS @HDF5_BUILD_HL_TOOLS@) +set (${HDF5_PACKAGE_NAME}_BUILD_HL_GIF_TOOLS @HDF5_BUILD_HL_GIF_TOOLS@) set (${HDF5_PACKAGE_NAME}_ENABLE_THREADSAFE @HDF5_ENABLE_THREADSAFE@) set (${HDF5_PACKAGE_NAME}_ENABLE_PLUGIN_SUPPORT @HDF5_ENABLE_PLUGIN_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_Z_LIB_SUPPORT @HDF5_ENABLE_Z_LIB_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_SZIP_SUPPORT @HDF5_ENABLE_SZIP_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_SZIP_ENCODING @HDF5_ENABLE_SZIP_ENCODING@) +set (${HDF5_PACKAGE_NAME}_ENABLE_ROS3_VFD @HDF5_ENABLE_ROS3_VFD@) set (${HDF5_PACKAGE_NAME}_BUILD_SHARED_LIBS @H5_ENABLE_SHARED_LIB@) set (${HDF5_PACKAGE_NAME}_BUILD_STATIC_LIBS @H5_ENABLE_STATIC_LIB@) set (${HDF5_PACKAGE_NAME}_PACKAGE_EXTLIBS @HDF5_PACKAGE_EXTLIBS@) @@ -51,7 +52,8 @@ set (${HDF5_PACKAGE_NAME}_EXPORT_LIBRARIES @HDF5_LIBRARIES_TO_EXPORT@) set (${HDF5_PACKAGE_NAME}_ARCHITECTURE "@CMAKE_GENERATOR_ARCHITECTURE@") set (${HDF5_PACKAGE_NAME}_TOOLSET "@CMAKE_GENERATOR_TOOLSET@") set (${HDF5_PACKAGE_NAME}_DEFAULT_API_VERSION "@DEFAULT_API_VERSION@") -set (${HDF5_PACKAGE_NAME}_PARALLEL_FILTERED_WRITES "@PARALLEL_FILTERED_WRITES@") +set (${HDF5_PACKAGE_NAME}_PARALLEL_FILTERED_WRITES @PARALLEL_FILTERED_WRITES@) +set (${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN "@HDF5_INSTALL_MOD_FORTRAN@") #----------------------------------------------------------------------------- # Dependencies @@ -67,11 +69,16 @@ if (${HDF5_PACKAGE_NAME}_ENABLE_PARALLEL) find_package(MPI QUIET REQUIRED) endif () +if (${HDF5_PACKAGE_NAME}_ENABLE_THREADSAFE) + set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads QUIET REQUIRED) +endif () + if (${HDF5_PACKAGE_NAME}_BUILD_JAVA) set (${HDF5_PACKAGE_NAME}_JAVA_INCLUDE_DIRS @PACKAGE_CURRENT_BUILD_DIR@/lib/jarhdf5-@HDF5_VERSION_STRING@.jar - @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-api-1.7.33.jar - @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-nop-1.7.33.jar + @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-api-2.0.6.jar + @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-nop-2.0.6.jar ) set (${HDF5_PACKAGE_NAME}_JAVA_LIBRARY "@PACKAGE_CURRENT_BUILD_DIR@/lib") set (${HDF5_PACKAGE_NAME}_JAVA_LIBRARIES "${${HDF5_PACKAGE_NAME}_JAVA_LIBRARY}") @@ -143,14 +150,14 @@ foreach (comp IN LISTS ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS) list (REMOVE_ITEM ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS ${comp}) set (${HDF5_PACKAGE_NAME}_LIB_TYPE ${${HDF5_PACKAGE_NAME}_LIB_TYPE} ${comp}) - if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN) + if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN AND ${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN STREQUAL "SHARED") set (${HDF5_PACKAGE_NAME}_INCLUDE_DIR_FORTRAN "@PACKAGE_INCLUDE_INSTALL_DIR@/shared") endif () elseif (comp STREQUAL "static") list (REMOVE_ITEM ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS ${comp}) set (${HDF5_PACKAGE_NAME}_LIB_TYPE ${${HDF5_PACKAGE_NAME}_LIB_TYPE} ${comp}) - if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN) + if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN AND ${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN STREQUAL "STATIC") set (${HDF5_PACKAGE_NAME}_INCLUDE_DIR_FORTRAN "@PACKAGE_INCLUDE_INSTALL_DIR@/static") endif () endif () diff --git a/config/cmake/mccacheinit.cmake b/config/cmake/mccacheinit.cmake index dd0c9ece455..4aa604175a5 100644 --- a/config/cmake/mccacheinit.cmake +++ b/config/cmake/mccacheinit.cmake @@ -11,9 +11,9 @@ # # This is the CMakeCache file. -######################## +######################### # EXTERNAL cache entries -######################## +######################### set (CMAKE_INSTALL_FRAMEWORK_PREFIX "Library/Frameworks" CACHE STRING "Frameworks installation directory" FORCE) @@ -25,14 +25,14 @@ set (HDF_PACKAGE_NAMESPACE "hdf5::" CACHE STRING "Name for HDF package namespace set (HDF5_BUILD_CPP_LIB ON CACHE BOOL "Build HDF5 C++ Library" FORCE) -set (HDF5_BUILD_EXAMPLES ON CACHE BOOL "Build HDF5 Library Examples" FORCE) - set (HDF5_BUILD_FORTRAN ON CACHE BOOL "Build FORTRAN support" FORCE) set (HDF5_BUILD_HL_LIB ON CACHE BOOL "Build HIGH Level HDF5 Library" FORCE) set (HDF5_BUILD_TOOLS ON CACHE BOOL "Build HDF5 Tools" FORCE) +set (HDF5_BUILD_EXAMPLES ON CACHE BOOL "Build HDF5 Library Examples" FORCE) + set (HDF5_ENABLE_Z_LIB_SUPPORT ON CACHE BOOL "Enable Zlib Filters" FORCE) set (HDF5_ENABLE_SZIP_SUPPORT ON CACHE BOOL "Use SZip Filter" FORCE) diff --git a/config/cmake/patch.xml b/config/cmake/patch.xml.in similarity index 80% rename from config/cmake/patch.xml rename to config/cmake/patch.xml.in index 1bdff3e7c2e..d6843e12697 100644 --- a/config/cmake/patch.xml +++ b/config/cmake/patch.xml.in @@ -1,5 +1,5 @@ - + + +\section sec_exapi_desc Examples Description +The C, FORTRAN and Java examples below point to the examples in the hdf5-examples github repository. Examples for older versions of HDF5 +are handled by setting the appropriate USE_API_xxx definition. HDF5-1.6 examples are in a "16"-named subdirectory. + +The Java examples are in the HDF5-1.10 source code, and the Java Object package examples are in the HDFView source. +Please note that you must comment out the "package" statement at the top when downloading a Java Object example individually. + +The MATLAB and Python examples were generously provided by a user and are not tested. + +Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), and Python (Low Level APIs). + +\subsection sec_exapi_dsets Datasets + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Set Space Allocation Time for Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_alloc.h5h5ex_d_alloc.tsth5ex_d_alloc.ddl
Read / Write Dataset using Fletcher32 Checksum Filter +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_checksum.h5h5ex_d_checksum.tsth5ex_d_checksum.ddl
Read / Write Chunked Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_chunk.h5h5ex_d_chunk.tsth5ex_d_chunk.ddl
Read / Write Compact Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_compact.h5h5ex_d_compact.tsth5ex_d_compact.ddl
Read / Write to External Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_extern.h5h5ex_d_extern.tsth5ex_d_extern.ddl
Read / Write Dataset w/ Fill Value +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_fillval.h5h5ex_d_fillval.tsth5ex_d_fillval.ddl
Read / Write GZIP Compressed Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_gzip.h5h5ex_d_gzip.tsth5ex_d_gzip.ddl
Read / Write Data by Hyperslabs +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_hyper.h5h5ex_d_hyper.tsth5ex_d_hyper.ddl
Read / Write Dataset with n-bit Filter +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_nbit.h5h5ex_d_nbit.tsth5ex_d_nbit.ddl
Read / Write Integer Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_rdwrc.h5h5ex_d_rdwrc.tsth5ex_d_rdwr.ddl
Read / Write Dataset w/ Shuffle Filter and GZIP Compression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_shuffle.h5h5ex_d_shuffle.tsth5ex_d_shuffle.ddl
Read / Write Dataset using Scale-Offset Filter (float) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_sofloat.h5h5ex_d_sofloat.tsth5ex_d_sofloat.ddl
Read / Write Dataset using Scale-Offset Filter (integer) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_soint.h5h5ex_d_soint.tsth5ex_d_soint.ddl
Read / Write Dataset using SZIP Compression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_szip.h5h5ex_d_szip.tsth5ex_d_szip.ddl
Read / Write Dataset using Data Transform Expression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_transform.h5h5ex_d_transform.tsth5ex_d_transform.ddl
Read / Write Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimadd.h5h5ex_d_unlimadd.tsth5ex_d_unlimadd.ddl
Read / Write GZIP Compressed Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimgzip.h5h5ex_d_unlimgzip.tsth5ex_d_unlimgzip.ddl
Read / Write / Edit Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimmod.h5h5ex_d_unlimmod.tsth5ex_d_unlimmod.ddl
+ +\subsection sec_exapi_grps Groups + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Create "compact-or-indexed" Format Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_compact.h5h5ex_g_.tsth5ex_g_compact1.ddlh5ex_g_compact2.ddl
Track links in a Group by Creation Order +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_corder.h5h5ex_g_corder.tsth5ex_g_corder.ddl
Create / Open / Close a Group +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_create.h5h5ex_g_create.tsth5ex_g_create.ddl
Create Intermediate Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_intermediate.h5h5ex_g_intermediate.tsth5ex_g_intermediate.ddl
Iterate over Groups w/ H5Literate +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_iterate.h5h5ex_g_iterate.tsth5ex_g_iterate.ddl
Set Conditions to Convert between Compact and Dense Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_phase.h5h5ex_g_phase.tsth5ex_g_phase.ddl
Recursively Traverse a File with H5Literate +C + FORTRAN +Java + JavaObj MATLAB PyHigh PyLow +h5ex_g_traverse.h5h5ex_g_traverse.tsth5ex_g_traverse.ddl
Recursively Traverse a File with H5Ovisit / H5Lvisit +C + FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_visit.h5h5ex_g_visit.tsth5ex_g_visit.ddl
+ +\subsection sec_exapi_dtypes Datatypes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Read / Write Array (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_arrayatt.h5h5ex_t_arrayatt.tsth5ex_t_arrayatt.ddl
Read / Write Array (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_array.h5h5ex_t_array.tsth5ex_t_array.ddl
Read / Write Bitfield (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_bitatt.h5h5ex_t_bitatt.tsth5ex_t_bitatt.ddl
Read / Write Bitfield (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_bit.h5h5ex_t_bit.tsth5ex_t_bit.ddl
Read / Write Compound (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_cmpdatt.h5h5ex_t_cmpdatt.tsth5ex_t_cmpdatt.ddl
Read / Write Compound (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_cmpd.h5h5ex_t_cmpd.tsth5ex_t_cmpd.ddl
Commit Named Datatype and Read Back +C + FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_commit.h5h5ex_t_commit.tsth5ex_t_commit.ddl
Convert Between Datatypes in Memory +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_convert.h5h5ex_t_convert.tsth5ex_t_convert.ddl
Read / Write Complex Compound (Attribute) +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_cpxcmpdatt.h5h5ex_t_cpxcmpdatt.tsth5ex_t_cpxcmpdatt.ddl
Read / Write Complex Compound (Dataset) +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_cpxcmpd.h5h5ex_t_cpxcmpd.tsth5ex_t_cpxcmpd.ddl
Read / Write Enumerated (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_enumatt.h5h5ex_t_enumatt.tsth5ex_t_enumatt.ddl
Read / Write Enumerated (Dataset) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_enum.h5h5ex_t_enum.tsth5ex_t_enum.ddl
Read / Write Floating Point (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_floatatt.h5h5ex_t_floatatt.tsth5ex_t_floatatt.ddl
Read / Write Floating Point (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_float.h5h5ex_t_float.tsth5ex_t_float.ddl
Read / Write Integer Datatype (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_intatt.h5h5ex_t_intatt.tsth5ex_t_intatt.ddl
Read / Write Integer Datatype (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_int.h5h5ex_t_int.tsth5ex_t_int.ddl
Read / Write Object References (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_objrefatt.h5h5ex_t_objrefatt.tsth5ex_t_objrefatt.ddl
Read / Write Object References (Dataset) +C +FORTRAN +Java + JavaObj + MATLAB PyHigh PyLow +h5ex_t_objref.h5h5ex_t_objref.tsth5ex_t_objref.ddl
Read / Write Opaque (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_opaqueatt.h5h5ex_t_opaqueatt.tsth5ex_t_opaqueatt.ddl
Read / Write Opaque (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_opaque.h5h5ex_t_opaque.tsth5ex_t_opaque.ddl
Read / Write Region References (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_regrefatt.h5h5ex_t_regrefatt.tsth5ex_t_regrefatt.ddl
Read / Write Region References (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_regref.h5h5ex_t_regref.tsth5ex_t_regref.ddl
Read / Write String (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_stringatt.h5h5ex_t_stringatt.tsth5ex_t_stringatt.ddl
Read / Write String (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_string.h5h5ex_t_string.tsth5ex_t_string.ddl
Read / Write Variable Length (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlenatt.h5h5ex_t_vlenatt.tsth5ex_t_vlenatt.ddl
Read / Write Variable Length (Dataset) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlen.h5h5ex_t_vlen.tsth5ex_t_vlen.ddl
Read / Write Variable Length String (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlstringatt.h5h5ex_t_vlstringatt.tsth5ex_t_vlstringatt.ddl
Read / Write Variable Length String (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_vlstring.h5h5ex_t_vlstring.tsth5ex_t_vlstring.ddl
+ +\subsection sec_exapi_filts Filters + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Read / Write Dataset using Blosc Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_blosc.h5h5ex_d_blosc.tsth5ex_d_blosc.ddl
Read / Write Dataset using Bit Shuffle Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_bshuf.h5h5ex_d_bshuf.tsth5ex_d_bshuf.ddl
Read / Write Dataset using BZip2 Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_bzip2.h5h5ex_d_bzip2.tsth5ex_d_bzip2.ddl
Read / Write Dataset using JPEG Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_jpeg.h5h5ex_d_jpeg.tsth5ex_d_jpeg.ddl
Read / Write Dataset using LZ4 Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_lz4.h5h5ex_d_lz4.tsth5ex_d_lz4.ddl
Read / Write Dataset using LZF Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_lzf.h5h5ex_d_lzf.tsth5ex_d_lzf.ddl
Read / Write Dataset using MAFISC Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_mafisc.h5h5ex_d_mafisc.tsth5ex_d_mafisc.ddl
Read / Write Dataset using ZFP Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_zfp.h5h5ex_d_zfp.tsth5ex_d_zfp.ddl
Read / Write Dataset using ZStd Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_zstd.h5h5ex_d_zstd.tsth5ex_d_zstd.ddl
+ +\subsection sec_exapi_java Java General + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesOutput
Create/Read/Write an Attribute +Java +JavaObj +HDF5AttributeCreate.txt
Create Datasets +Java +JavaObj +HDF5DatasetCreate.txt
Read/Write Datasets +Java +JavaObj +HDF5DatasetRead.txt
Create an Empty File +Java +JavaObj +HDF5FileCreate.txt
Retrieve the File Structure +Java + JavaObj +HDF5FileStructure.txt
Create Groups +Java +JavaObj +HDF5GroupCreate.txt
Select a Subset of a Dataset +Java + JavaObj +HDF5SubsetSelect.txt
Create Two Datasets Within Groups +Java +JavaObj +HDF5GroupDatasetCreate.txt
+ + +\subsection sec_exapi_par Parallel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutput
Creating and Accessing a File +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Creating and Accessing a Dataset +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Contiguous Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Regularly Spaced Data Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Pattern Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Chunk Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Using the Subfiling VFD to Write a File Striped Across Multiple Subfiles +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Write to Datasets with Filters Applied +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Collectively Write Datasets with Filters and Not All Ranks have Data +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
+ + +
+Navigate back: \ref index "Main" / \ref GettingStarted + +*/ diff --git a/doxygen/dox/GettingStarted.dox b/doxygen/dox/GettingStarted.dox index 29c503319d0..87f3566361e 100644 --- a/doxygen/dox/GettingStarted.dox +++ b/doxygen/dox/GettingStarted.dox @@ -50,10 +50,10 @@ Parallel HDF5, and the HDF5-1.10 VDS and SWMR new features: -Introduction to Parallel HDF5 +\ref IntroParHDF5 -A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first. +A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first. diff --git a/doxygen/dox/IntroHDF5.dox b/doxygen/dox/IntroHDF5.dox index ec46217c4aa..2c25659b8f3 100644 --- a/doxygen/dox/IntroHDF5.dox +++ b/doxygen/dox/IntroHDF5.dox @@ -124,7 +124,7 @@ It is a 2-dimensional 5 x 3 array (the dataspace). The datatype should not be co \subsubsection subsec_intro_desc_prop_dspace Dataspaces -A dataspace describes the layout of a dataset’s data elements. It can consist of no elements (NULL), +A dataspace describes the layout of a dataset's data elements. It can consist of no elements (NULL), a single element (scalar), or a simple array. @@ -141,7 +141,7 @@ in size (i.e. they are extendible). There are two roles of a dataspace: \li It contains the spatial information (logical layout) of a dataset stored in a file. This includes the rank and dimensions of a dataset, which are a permanent part of the dataset definition. -\li It describes an application’s data buffers and data elements participating in I/O. In other words, it can be used to select a portion or subset of a dataset. +\li It describes an application's data buffers and data elements participating in I/O. In other words, it can be used to select a portion or subset of a dataset.
@@ -602,12 +602,12 @@ Navigate back: \ref index "Main" / \ref GettingStarted @page HDF5Examples HDF5 Examples Example programs of how to use HDF5 are provided below. For HDF-EOS specific examples, see the examples -of how to access and visualize NASA HDF-EOS files using IDL, MATLAB, and NCL on the -HDF-EOS Tools and Information Center page. +of how to access and visualize NASA HDF-EOS files using Python, IDL, MATLAB, and NCL +on the HDF-EOS Tools and Information Center page. \section secHDF5Examples Examples \li \ref LBExamples -\li Examples by API +\li \ref ExAPI \li Examples in the Source Code \li Other Examples diff --git a/doxygen/dox/IntroParExamples.dox b/doxygen/dox/IntroParExamples.dox new file mode 100644 index 00000000000..39291063dc7 --- /dev/null +++ b/doxygen/dox/IntroParExamples.dox @@ -0,0 +1,569 @@ +/** @page IntroParContHyperslab Writing by Contiguous Hyperslab + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +This example shows how to write a contiguous buffer in memory to a contiguous hyperslab in a file. In this case, +each parallel process writes a contiguous hyperslab to the file. + +In the C example (figure a), each hyperslab in memory consists of an equal number of consecutive rows. In the FORTRAN +90 example (figure b), each hyperslab in memory consists of +an equal number of consecutive columns. This reflects the difference in the storage order for C and FORTRAN 90. +
The dataspace is used to describe both the logical layout of a dataset and a subset of a dataset.
+ + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html pcont_hy_figa.gif + +\image html pcont_hy_figb.gif +
+ +\section secIntroParContHyperslabC Writing a Contiguous Hyperslab in C +In this example, you have a dataset of 8 (rows) x 5 (columns) and each process writes an equal number +of rows to the dataset. The dataset hyperslab is defined as follows: +\code + count [0] = dimsf [0] / number_processes + count [1] = dimsf [1] +\endcode +where, +\code + dimsf [0] is the number of rows in the dataset + dimsf [1] is the number of columns in the dataset +\endcode +The offset for the hyperslab is different for each process: +\code + offset [0] = k * count[0] + offset [1] = 0 +\endcode +where, +\code + "k" is the process id number + count [0] is the number of rows written in each hyperslab + offset [1] = 0 indicates to start at the beginning of the row +\endcode + +The number of processes that you could use would be 1, 2, 4, or 8. The number of rows that would be written by each slab is as follows: + + + + + + + + + + + + + +
ProcessesSize of count[0](\# of rows)
18
24
42
81
+ +If using 4 processes, then process 1 would look like: + + + + +
+\image html pcont_hy_figc.gif +
+ +The code would look like the following: +\code + 71 /* + 72 * Each process defines dataset in memory and writes it to the hyperslab + 73 * in the file. + 74 */ + 75 count[0] = dimsf[0]/mpi_size; + 76 count[1] = dimsf[1]; + 77 offset[0] = mpi_rank * count[0]; + 78 offset[1] = 0; + 79 memspace = H5Screate_simple(RANK, count, NULL); + 80 + 81 /* + 82 * Select hyperslab in the file. + 83 */ + 84 filespace = H5Dget_space(dset_id); + 85 H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL); +\endcode + +Below is the example program: + + + + +
+hyperslab_by_row.c +
+ +If using this example with 4 processes, then, +\li Process 0 writes "10"s to the file. +\li Process 1 writes "11"s. +\li Process 2 writes "12"s. +\li Process 3 writes "13"s. + +The following is the output from h5dump for the HDF5 file created by this example using 4 processes: +\code +HDF5 "SDS_row.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 5 ) / ( 8, 5 ) } + DATA { + 10, 10, 10, 10, 10, + 10, 10, 10, 10, 10, + 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, + 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, + 13, 13, 13, 13, 13, + 13, 13, 13, 13, 13 + } + } +} +} +\endcode + + +\section secIntroParContHyperslabFort Writing a Contiguous Hyperslab in Fortran +In this example you have a dataset of 5 (rows) x 8 (columns). Since a contiguous hyperslab in Fortran 90 +consists of consecutive columns, each process will be writing an equal number of columns to the dataset. + +You would define the size of the hyperslab to write to the dataset as follows: +\code + count(1) = dimsf(1) + count(2) = dimsf(2) / number_of_processes +\endcode + +where, +\code + dimsf(1) is the number of rows in the dataset + dimsf(2) is the number of columns +\endcode + +The offset for the hyperslab dimension would be different for each process: +\code + offset (1) = 0 + offset (2) = k * count (2) +\endcode + +where, +\code + offset (1) = 0 indicates to start at the beginning of the column + "k" is the process id number + "count(2) is the number of columns to be written by each hyperslab +\endcode + +The number of processes that could be used in this example are 1, 2, 4, or 8. The number of +columns that could be written by each slab is as follows: + + + + + + + + + + + + + +
ProcessesSize of count (2)(\# of columns)
18
24
42
81
+ +If using 4 processes, the offset and count parameters for Process 1 would look like: + + + + +
+\image html pcont_hy_figd.gif +
+ +The code would look like the following: +\code + 69 ! Each process defines dataset in memory and writes it to the hyperslab + 70 ! in the file. + 71 ! + 72 count(1) = dimsf(1) + 73 count(2) = dimsf(2)/mpi_size + 74 offset(1) = 0 + 75 offset(2) = mpi_rank * count(2) + 76 CALL h5screate_simple_f(rank, count, memspace, error) + 77 ! + 78 ! Select hyperslab in the file. + 79 ! + 80 CALL h5dget_space_f(dset_id, filespace, error) + 81 CALL h5sselect_hyperslab_f (filespace, H5S_SELECT_SET_F, offset, count, error) +\endcode + +Below is the F90 example program which illustrates how to write contiguous hyperslabs by column in Parallel HDF5: + + + + +
+hyperslab_by_col.F90 +
+ +If you run this program with 4 processes and look at the output with h5dump you will notice that the output is +much like the output shown above for the C example. This is because h5dump is written in C. The data would be +displayed in columns if it was printed using Fortran 90 code. + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParRegularSpaced Writing by Regularly Spaced Data + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +In this case, each process writes data from a contiguous buffer into disconnected locations in the file, using a regular pattern. + +In C it is done by selecting a hyperslab in a file that consists of regularly spaced columns. In F90, it is done by selecting a +hyperslab in a file that consists of regularly spaced rows. + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html preg_figa.gif + +\image html preg_figb.gif +
+ +\section secIntroParRegularSpacedC Writing Regularly Spaced Columns in C +In this example, you have two processes that write to the same dataset, each writing to +every other column in the dataset. For each process the hyperslab in the file is set up as follows: +\code + 89 count[0] = 1; + 90 count[1] = dimsm[1]; + 91 offset[0] = 0; + 92 offset[1] = mpi_rank; + 93 stride[0] = 1; + 94 stride[1] = 2; + 95 block[0] = dimsf[0]; + 96 block[1] = 1; +\endcode + +The stride is 2 for dimension 1 to indicate that every other position along this +dimension will be written to. A stride of 1 indicates that every position along a dimension will be written to. + +For two processes, the mpi_rank will be either 0 or 1. Therefore: +\li Process 0 writes to even columns (0, 2, 4...) +\li Process 1 writes to odd columns (1, 3, 5...) + +The block size allows each process to write a column of data to every other position in the dataset. + + + + + +
+\image html preg_figc.gif +
+ +Below is an example program for writing hyperslabs by column in Parallel HDF5: + + + + +
+hyperslab_by_col.c +
+ +The following is the output from h5dump for the HDF5 file created by this example: +\code +HDF5 "SDS_col.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 6 ) / ( 8, 6 ) } + DATA { + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200 + } + } +} +} +\endcode + + +\section secIntroParRegularSpacedFort Writing Regularly Spaced Rows in Fortran +In this example, you have two processes that write to the same dataset, each writing to every +other row in the dataset. For each process the hyperslab in the file is set up as follows: + + +You would define the size of the hyperslab to write to the dataset as follows: +\code + 83 ! Each process defines dataset in memory and writes it to + 84 ! the hyperslab in the file. + 85 ! + 86 count(1) = dimsm(1) + 87 count(2) = 1 + 88 offset(1) = mpi_rank + 89 offset(2) = 0 + 90 stride(1) = 2 + 91 stride(2) = 1 + 92 block(1) = 1 + 93 block(2) = dimsf(2) +\endcode + +The stride is 2 for dimension 1 to indicate that every other position along this dimension will +be written to. A stride of 1 indicates that every position along a dimension will be written to. + +For two process, the mpi_rank will be either 0 or 1. Therefore: +\li Process 0 writes to even rows (0, 2, 4 ...) +\li Process 1 writes to odd rows (1, 3, 5 ...) + +The block size allows each process to write a row of data to every other position in the dataset, +rather than just a point of data. + +The following shows the data written by Process 1 to the file: + + + + +
+\image html preg_figd.gif +
+ +Below is the example program for writing hyperslabs by column in Parallel HDF5: + + + + +
+hyperslab_by_row.F90 +
+ +The output for h5dump on the file created by this program will look like the output as shown above for the C example. This is +because h5dump is written in C. The data would be displayed in rows if it were printed using Fortran 90 code. + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParPattern Writing by Pattern + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +This is another example of writing data into disconnected locations in a file. Each process writes data from the contiguous +buffer into regularly scattered locations in the file. + +Each process defines a hyperslab in the file as described below and writes data to it. The C and Fortran 90 examples below +result in the same data layout in the file. + + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html ppatt_figa.gif + +\image html ppatt_figb.gif +
+ +The C and Fortran 90 examples use four processes to write the pattern shown above. Each process defines a hyperslab by: +\li Specifying a stride of 2 for each dimension, which indicates that you wish to write to every other position along a dimension. +\li Specifying a different offset for each process: + + + + + + + + + + + + + + +
CProcess 0Process 1Process 2Process 3
offset[0] = 0offset[0] = 1offset[0] = 0offset[0] = 1
offset[1] = 0offset[1] = 0offset[1] = 1offset[1] = 1
FortranProcess 0Process 1Process 2Process 3
offset(1) = 0offset(1) = 0offset(1) = 1offset(1) = 1
offset(2) = 0offset(2) = 1offset(2) = 0offset(2) = 1
+\li Specifying the size of the slab to write. The count is the number of positions along a dimension to write to. If writing a 4 x 2 slab, +then the count would be: + + + + + + + + +
CFortran
count[0] = 4count(1) = 2
count[1] = 2count(2) = 4
+ +For example, the offset, count, and stride parameters for Process 2 would look like: + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html ppatt_figc.gif + +\image html ppatt_figd.gif +
+ +Below are example programs for writing hyperslabs by pattern in Parallel HDF5: + + + + + + + +
+hyperslab_by_pattern.c +
+hyperslab_by_pattern.F90 +
+ +The following is the output from h5dump for the HDF5 file created in this example: +\code +HDF5 "SDS_pat.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 4 ) / ( 8, 4 ) } + DATA { + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4 + } + } +} +} +\endcode +The h5dump utility is written in C so the output is in C order. + + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParChunk Writing by Chunk + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +In this example each process writes a "chunk" of data to a dataset. The C and Fortran 90 +examples result in the same data layout in the file. + + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html pchunk_figa.gif + +\image html pchunk_figb.gif +
+ +For this example, four processes are used, and a 4 x 2 chunk is written to the dataset by each process. + +To do this, you would: +\li Use the block parameter to specify a chunk of size 4 x 2 (or 2 x 4 for Fortran). +\li Use a different offset (start) for each process, based on the chunk size: + + + + + + + + + + + + + + +
CProcess 0Process 1Process 2Process 3
offset[0] = 0offset[0] = 0offset[0] = 4offset[0] = 4
offset[1] = 0offset[1] = 2offset[1] = 0offset[1] = 2
FortranProcess 0Process 1Process 2Process 3
offset(1) = 0offset(1) = 2offset(1) = 0offset(1) = 2
offset(2) = 0offset(2) = 0offset(2) = 4offset(2) = 4
+ +For example, the offset and block parameters for Process 2 would look like: + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html pchunk_figc.gif + +\image html pchunk_figd.gif +
+ +Below are example programs for writing hyperslabs by pattern in Parallel HDF5: + + + + + + + +
+hyperslab_by_chunk.c +
+hyperslab_by_chunk.F90 +
+ +The following is the output from h5dump for the HDF5 file created in this example: +\code +HDF5 "SDS_chnk.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 4 ) / ( 8, 4 ) } + DATA { + 1, 1, 2, 2, + 1, 1, 2, 2, + 1, 1, 2, 2, + 1, 1, 2, 2, + 3, 3, 4, 4, + 3, 3, 4, 4, + 3, 3, 4, 4, + 3, 3, 4, 4 + } + } +} +} +\endcode +The h5dump utility is written in C so the output is in C order. + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +*/ diff --git a/doxygen/dox/IntroParHDF5.dox b/doxygen/dox/IntroParHDF5.dox new file mode 100644 index 00000000000..1f04e968e44 --- /dev/null +++ b/doxygen/dox/IntroParHDF5.dox @@ -0,0 +1,271 @@ +/** @page IntroParHDF5 A Brief Introduction to Parallel HDF5 + +Navigate back: \ref index "Main" / \ref GettingStarted +
+ +If you are new to HDF5 please see the @ref LearnBasics topic first. + +\section sec_pintro_overview Overview of Parallel HDF5 (PHDF5) Design +There were several requirements that we had for Parallel HDF5 (PHDF5). These were: +\li Parallel HDF5 files had to be compatible with serial HDF5 files and sharable +between different serial and parallel platforms. +\li Parallel HDF5 had to be designed to have a single file image to all processes, +rather than having one file per process. Having one file per process can cause expensive +post processing, and the files are not usable by different processes. +\li A standard parallel I/O interface had to be portable to different platforms. + +With these requirements of HDF5 our initial target was to support MPI programming, but not +for shared memory programming. We had done some experimentation with thread-safe support +for Pthreads and for OpenMP, and decided to use these. + +Implementation requirements were to: +\li Not use Threads, since they were not commonly supported in 1998 when we were looking at this. +\li Not have a reserved process, as this might interfere with parallel algorithms. +\li Not spawn any processes, as this is not even commonly supported now. + +The following shows the Parallel HDF5 implementation layers. + + +\subsection subsec_pintro_prog Parallel Programming with HDF5 +This tutorial assumes that you are somewhat familiar with parallel programming with MPI (Message Passing Interface). + +If you are not familiar with parallel programming, here is a tutorial that may be of interest: +Tutorial on HDF5 I/O tuning at NERSC + +Some of the terms that you must understand in this tutorial are: +
    +
  • +MPI Communicator +Allows a group of processes to communicate with each other. + +Following are the MPI routines for initializing MPI and the communicator and finalizing a session with MPI: + + + + + + + + + + + + + + + + + + + + + + + + + + +
    CFortranDescription
    MPI_InitMPI_INITInitialize MPI (MPI_COMM_WORLD usually)
    MPI_Comm_sizeMPI_COMM_SIZEDefine how many processes are contained in the communicator
    MPI_Comm_rankMPI_COMM_RANKDefine the process ID number within the communicator (from 0 to n-1)
    MPI_FinalizeMPI_FINALIZEExiting MPI
    +
  • +
  • +Collective +MPI defines this to mean all processes of the communicator must participate in the right order. +
  • +
+ +Parallel HDF5 opens a parallel file with a communicator. It returns a file handle to be used for future access to the file. + +All processes are required to participate in the collective Parallel HDF5 API. Different files can be opened using different communicators. + +Examples of what you can do with the Parallel HDF5 collective API: +\li File Operation: Create, open and close a file +\li Object Creation: Create, open, and close a dataset +\li Object Structure: Extend a dataset (increase dimension sizes) +\li Dataset Operations: Write to or read from a dataset +(Array data transfer can be collective or independent.) + +Once a file is opened by the processes of a communicator: +\li All parts of the file are accessible by all processes. +\li All objects in the file are accessible by all processes. +\li Multiple processes write to the same dataset. +\li Each process writes to an individual dataset. + +Please refer to the Supported Configuration Features Summary in the release notes for the current release +of HDF5 for an up-to-date list of the platforms that we support Parallel HDF5 on. + + +\subsection subsec_pintro_create_file Creating and Accessing a File with PHDF5 +The programming model for creating and accessing a file is as follows: +
    +
  1. Set up an access template object to control the file access mechanism.
  2. +
  3. Open the file.
  4. +
  5. Close the file.
  6. +
+ +Each process of the MPI communicator creates an access template and sets it up with MPI parallel +access information. This is done with the #H5Pcreate call to obtain the file access property list +and the #H5Pset_fapl_mpio call to set up parallel I/O access. + +Following is example code for creating an access template in HDF5: +C +\code + 23 MPI_Comm comm = MPI_COMM_WORLD; + 24 MPI_Info info = MPI_INFO_NULL; + 25 + 26 /* + 27 * Initialize MPI + 28 */ + 29 MPI_Init(&argc, &argv); + 30 MPI_Comm_size(comm, &mpi_size); + 31 MPI_Comm_rank(comm, &mpi_rank); + 32 + 33 /* + 34 * Set up file access property list with parallel I/O access + 35 */ + 36 plist_id = H5Pcreate(H5P_FILE_ACCESS); 37 H5Pset_fapl_mpio(plist_id, comm, info); +\endcode + +Fortran +\code + 23 comm = MPI_COMM_WORLD + 24 info = MPI_INFO_NULL + 25 + 26 CALL MPI_INIT(mpierror) + 27 CALL MPI_COMM_SIZE(comm, mpi_size, mpierror) + 28 CALL MPI_COMM_RANK(comm, mpi_rank, mpierror) + 29 ! + 30 ! Initialize FORTRAN interface + 31 ! + 32 CALL h5open_f(error) + 33 + 34 ! + 35 ! Setup file access property list with parallel I/O access. + 36 ! + 37 CALL h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, error) 38 CALL h5pset_fapl_mpio_f(plist_id, comm, info, error) +\endcode + +The following example programs create an HDF5 file using Parallel HDF5: +C: file_create.c +F90: file_create.F90 + + +\subsection subsec_pintro_create_dset Creating and Accessing a Dataset with PHDF5 +The programming model for creating and accessing a dataset is as follows: +
    +
  1. +Create or open a Parallel HDF5 file with a collective call to: +#H5Dcreate +#H5Dopen +
  2. +
  3. +Obtain a copy of the file transfer property list and set it to use collective or independent I/O. +
      +
    • +Do this by first passing a data transfer property list class type to: #H5Pcreate +
    • +
    • +Then set the data transfer mode to either use independent I/O access or to use collective I/O, with a call to: #H5Pset_dxpl_mpio + +Following are the parameters required by this call: +C +\code + herr_t H5Pset_dxpl_mpio (hid_t dxpl_id, H5FD_mpio_xfer_t xfer_mode ) + dxpl_id IN: Data transfer property list identifier + xfer_mode IN: Transfer mode: + H5FD_MPIO_INDEPENDENT - use independent I/O access + (default) + H5FD_MPIO_COLLECTIVE - use collective I/O access +\endcode + +Fortran +\code + h5pset_dxpl_mpi_f (prp_id, data_xfer_mode, hdferr) + prp_id IN: Property List Identifier (INTEGER (HID_T)) + data_xfer_mode IN: Data transfer mode (INTEGER) + H5FD_MPIO_INDEPENDENT_F (0) + H5FD_MPIO_COLLECTIVE_F (1) + hdferr IN: Error code (INTEGER) +\endcode +
    • +
    • +Access the dataset with the defined transfer property list. +All processes that have opened a dataset may do collective I/O. Each process may do an independent +and arbitrary number of data I/O access calls, using: +#H5Dwrite +#H5Dread + +If a dataset is unlimited, you can extend it with a collective call to: #H5Dextend +
    • +
    +
  4. +
+ +The following code demonstrates a collective write using Parallel HDF5: +C +\code + 95 /* + 96 * Create property list for collective dataset write. + 97 */ + 98 plist_id = H5Pcreate (H5P_DATASET_XFER); 99 H5Pset_dxpl_mpio (plist_id, H5FD_MPIO_COLLECTIVE); + 100 + 101 status = H5Dwrite (dset_id, H5T_NATIVE_INT, memspace, filespace, + 102 plist_id, data); +\endcode + +Fortran +\code + 108 ! Create property list for collective dataset write + 109 ! + 110 CALL h5pcreate_f (H5P_DATASET_XFER_F, plist_id, error) 111 CALL h5pset_dxpl_mpio_f (plist_id, H5FD_MPIO_COLLECTIVE_F, error) + 112 + 113 ! + 114 ! Write the dataset collectively. + 115 ! + 116 CALL h5dwrite_f (dset_id, H5T_NATIVE_INTEGER, data, dimsfi, error, & + 117 file_space_id = filespace, mem_space_id = memspace, xfer_prp = plist_id) +\endcode + +The following example programs create an HDF5 dataset using Parallel HDF5: +C: dataset.c +F90: dataset.F90 + + +\subsubsection subsec_pintro_hyperslabs Hyperslabs +The programming model for writing and reading hyperslabs is: +/li Each process defines the memory and file hyperslabs. +/li Each process executes a partial write/read call which is either collective or independent. + +The memory and file hyperslabs in the first step are defined with the #H5Sselect_hyperslab. + +The start (or offset), count, stride, and block parameters define the portion of the dataset +to write to. By changing the values of these parameters you can write hyperslabs with Parallel +HDF5 by contiguous hyperslab, by regularly spaced data in a column/row, by patterns, and by chunks: + + + + + + + + + + + + + + +
+\li @subpage IntroParContHyperslab +
+\li @subpage IntroParRegularSpaced +
+\li @subpage IntroParPattern +
+\li @subpage IntroParChunk +
+ + +
+Navigate back: \ref index "Main" / \ref GettingStarted + +*/ diff --git a/doxygen/dox/LearnBasics1.dox b/doxygen/dox/LearnBasics1.dox index a9b6d0e71fd..53c8e0aab99 100644 --- a/doxygen/dox/LearnBasics1.dox +++ b/doxygen/dox/LearnBasics1.dox @@ -642,7 +642,7 @@ See the programming example for an illustration of the use of these calls. \subsection subsecLBDsetCreateContent File Contents The contents of the file dset.h5 (dsetf.h5 for FORTRAN) are shown below: - +
Contents of dset.h5 ( dsetf.h5)Contents of dset.h5 (dsetf.h5)
\image html imgLBDsetCreate.gif diff --git a/doxygen/dox/UsersGuide.dox b/doxygen/dox/UsersGuide.dox index 53c8ad79d03..4b1a288ce39 100644 --- a/doxygen/dox/UsersGuide.dox +++ b/doxygen/dox/UsersGuide.dox @@ -126,7 +126,7 @@ HDF5 Release 1.10
  • \ref subsubsec_dataset_program_transfer
  • \ref subsubsec_dataset_program_read -\li \ref subsec_dataset_transfer Data Transfer +\li \ref subsec_dataset_transfer
    • \ref subsubsec_dataset_transfer_pipe
    • \ref subsubsec_dataset_transfer_filter diff --git a/doxygen/img/pchunk_figa.gif b/doxygen/img/pchunk_figa.gif new file mode 100644 index 00000000000..90b49c0cda7 Binary files /dev/null and b/doxygen/img/pchunk_figa.gif differ diff --git a/doxygen/img/pchunk_figb.gif b/doxygen/img/pchunk_figb.gif new file mode 100644 index 00000000000..c825fc31a8d Binary files /dev/null and b/doxygen/img/pchunk_figb.gif differ diff --git a/doxygen/img/pchunk_figc.gif b/doxygen/img/pchunk_figc.gif new file mode 100644 index 00000000000..9975a87669d Binary files /dev/null and b/doxygen/img/pchunk_figc.gif differ diff --git a/doxygen/img/pchunk_figd.gif b/doxygen/img/pchunk_figd.gif new file mode 100644 index 00000000000..45da389fa9d Binary files /dev/null and b/doxygen/img/pchunk_figd.gif differ diff --git a/doxygen/img/pcont_hy_figa.gif b/doxygen/img/pcont_hy_figa.gif new file mode 100644 index 00000000000..1417d170094 Binary files /dev/null and b/doxygen/img/pcont_hy_figa.gif differ diff --git a/doxygen/img/pcont_hy_figb.gif b/doxygen/img/pcont_hy_figb.gif new file mode 100644 index 00000000000..a3b637b0562 Binary files /dev/null and b/doxygen/img/pcont_hy_figb.gif differ diff --git a/doxygen/img/pcont_hy_figc.gif b/doxygen/img/pcont_hy_figc.gif new file mode 100644 index 00000000000..91bab7d7f0b Binary files /dev/null and b/doxygen/img/pcont_hy_figc.gif differ diff --git a/doxygen/img/pcont_hy_figd.gif b/doxygen/img/pcont_hy_figd.gif new file mode 100644 index 00000000000..2836b4fda92 Binary files /dev/null and b/doxygen/img/pcont_hy_figd.gif differ diff --git a/doxygen/img/ppatt_figa.gif b/doxygen/img/ppatt_figa.gif new file mode 100644 index 00000000000..5c86c93855f Binary files /dev/null and b/doxygen/img/ppatt_figa.gif differ diff --git a/doxygen/img/ppatt_figb.gif b/doxygen/img/ppatt_figb.gif new file mode 100644 index 00000000000..fe4e350ac9c Binary files /dev/null and b/doxygen/img/ppatt_figb.gif differ diff --git a/doxygen/img/ppatt_figc.gif b/doxygen/img/ppatt_figc.gif new file mode 100644 index 00000000000..aca8ef9d9bb Binary files /dev/null and b/doxygen/img/ppatt_figc.gif differ diff --git a/doxygen/img/ppatt_figd.gif b/doxygen/img/ppatt_figd.gif new file mode 100644 index 00000000000..e6c55c09fd4 Binary files /dev/null and b/doxygen/img/ppatt_figd.gif differ diff --git a/doxygen/img/preg_figa.gif b/doxygen/img/preg_figa.gif new file mode 100644 index 00000000000..0929bf4d505 Binary files /dev/null and b/doxygen/img/preg_figa.gif differ diff --git a/doxygen/img/preg_figb.gif b/doxygen/img/preg_figb.gif new file mode 100644 index 00000000000..33e57fc56c3 Binary files /dev/null and b/doxygen/img/preg_figb.gif differ diff --git a/doxygen/img/preg_figc.gif b/doxygen/img/preg_figc.gif new file mode 100644 index 00000000000..a4f98ffb1ed Binary files /dev/null and b/doxygen/img/preg_figc.gif differ diff --git a/doxygen/img/preg_figd.gif b/doxygen/img/preg_figd.gif new file mode 100644 index 00000000000..fe345fbca81 Binary files /dev/null and b/doxygen/img/preg_figd.gif differ diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt index 40aa14fe99c..e0cef5a1119 100644 --- a/fortran/src/CMakeLists.txt +++ b/fortran/src/CMakeLists.txt @@ -419,28 +419,32 @@ else () ) endif () +set (mod_export_files + h5fortran_types.mod + hdf5.mod + h5fortkit.mod + h5global.mod + h5a.mod + h5d.mod + h5e.mod + h5f.mod + h5g.mod + h5i.mod + h5l.mod + h5lib.mod + h5o.mod + h5p.mod + h5r.mod + h5s.mod + h5t.mod + h5z.mod + h5_gen.mod +) + if (BUILD_STATIC_LIBS) - set (mod_files - ${MOD_BUILD_DIR}/h5fortran_types.mod - ${MOD_BUILD_DIR}/hdf5.mod - ${MOD_BUILD_DIR}/h5fortkit.mod - ${MOD_BUILD_DIR}/h5global.mod - ${MOD_BUILD_DIR}/h5a.mod - ${MOD_BUILD_DIR}/h5d.mod - ${MOD_BUILD_DIR}/h5e.mod - ${MOD_BUILD_DIR}/h5f.mod - ${MOD_BUILD_DIR}/h5g.mod - ${MOD_BUILD_DIR}/h5i.mod - ${MOD_BUILD_DIR}/h5l.mod - ${MOD_BUILD_DIR}/h5lib.mod - ${MOD_BUILD_DIR}/h5o.mod - ${MOD_BUILD_DIR}/h5p.mod - ${MOD_BUILD_DIR}/h5r.mod - ${MOD_BUILD_DIR}/h5s.mod - ${MOD_BUILD_DIR}/h5t.mod - ${MOD_BUILD_DIR}/h5z.mod - ${MOD_BUILD_DIR}/h5_gen.mod - ) + foreach (mod_file ${mod_export_files}) + set (mod_files ${mod_files} ${MOD_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${mod_files} @@ -462,27 +466,9 @@ if (BUILD_STATIC_LIBS) endif () if (BUILD_SHARED_LIBS) - set (modsh_files - ${MODSH_BUILD_DIR}/h5fortran_types.mod - ${MODSH_BUILD_DIR}/hdf5.mod - ${MODSH_BUILD_DIR}/h5fortkit.mod - ${MODSH_BUILD_DIR}/h5global.mod - ${MODSH_BUILD_DIR}/h5a.mod - ${MODSH_BUILD_DIR}/h5d.mod - ${MODSH_BUILD_DIR}/h5e.mod - ${MODSH_BUILD_DIR}/h5f.mod - ${MODSH_BUILD_DIR}/h5g.mod - ${MODSH_BUILD_DIR}/h5i.mod - ${MODSH_BUILD_DIR}/h5l.mod - ${MODSH_BUILD_DIR}/h5lib.mod - ${MODSH_BUILD_DIR}/h5o.mod - ${MODSH_BUILD_DIR}/h5p.mod - ${MODSH_BUILD_DIR}/h5r.mod - ${MODSH_BUILD_DIR}/h5s.mod - ${MODSH_BUILD_DIR}/h5t.mod - ${MODSH_BUILD_DIR}/h5z.mod - ${MODSH_BUILD_DIR}/h5_gen.mod - ) + foreach (mod_file ${mod_export_files}) + set (modsh_files ${modsh_files} ${MODSH_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${modsh_files} diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt index 661f4ad3b41..5455d777d2a 100644 --- a/hl/fortran/src/CMakeLists.txt +++ b/hl/fortran/src/CMakeLists.txt @@ -235,16 +235,19 @@ endif () # Add file(s) to CMake Install #----------------------------------------------------------------------------- +set (mod_export_files + h5ds.mod + h5tb.mod + h5tb_const.mod + h5lt.mod + h5lt_const.mod + h5im.mod +) if (BUILD_STATIC_LIBS) - set (mod_files - ${MOD_BUILD_DIR}/h5ds.mod - ${MOD_BUILD_DIR}/h5tb.mod - ${MOD_BUILD_DIR}/h5tb_const.mod - ${MOD_BUILD_DIR}/h5lt.mod - ${MOD_BUILD_DIR}/h5lt_const.mod - ${MOD_BUILD_DIR}/h5im.mod - ) + foreach (mod_file ${mod_export_files}) + set (mod_files ${mod_files} ${MOD_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${mod_files} @@ -265,14 +268,9 @@ if (BUILD_STATIC_LIBS) endif () endif () if (BUILD_SHARED_LIBS) - set (modsh_files - ${MODSH_BUILD_DIR}/h5ds.mod - ${MODSH_BUILD_DIR}/h5tb.mod - ${MODSH_BUILD_DIR}/h5tb_const.mod - ${MODSH_BUILD_DIR}/h5lt.mod - ${MODSH_BUILD_DIR}/h5lt_const.mod - ${MODSH_BUILD_DIR}/h5im.mod - ) + foreach (mod_file ${mod_export_files}) + set (modsh_files ${modsh_files} ${MODSH_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${modsh_files} diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt index 6ed03cabce0..7542e8e1288 100644 --- a/java/examples/datasets/CMakeLists.txt +++ b/java/examples/datasets/CMakeLists.txt @@ -80,7 +80,7 @@ endforeach () if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) - set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) diff --git a/java/test/TestH5.java b/java/test/TestH5.java index 016c4714165..c5d945fd5fa 100644 --- a/java/test/TestH5.java +++ b/java/test/TestH5.java @@ -309,7 +309,7 @@ public void testH5set_free_list_limits() @Test public void testH5get_libversion() { - int libversion[] = {1, 10, 9}; + int libversion[] = {1, 10, 11}; try { H5.H5get_libversion(libversion); diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 0ab761a03bf..4207c6daac0 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1308,17 +1308,7 @@ endif () # Option to build documentation #----------------------------------------------------------------------------- if (DOXYGEN_FOUND) -# This cmake function requires that the non-default doxyfile settings are provided with set (DOXYGEN_xxx) commands -# In addition the doxyfile aliases @INCLUDE option is not supported and would need to be provided in a set (DOXYGEN_ALIASES) command. -# doxygen_add_docs (hdf5lib_doc -## ${common_SRCS} ${shared_gen_SRCS} ${H5_PUBLIC_HEADERS} ${H5_PRIVATE_HEADERS} ${H5_GENERATED_HEADERS} ${HDF5_DOXYGEN_DIR}/dox -# ${DOXYGEN_INPUT_DIRECTORY} -# ALL -# WORKING_DIRECTORY ${HDF5_SRC_DIR} -# COMMENT "Generating HDF5 library Source Documentation" -# ) - -# This custom target and doxygen/configure work together + # This custom target and doxygen/configure work together # Replace variables inside @@ with the current values add_custom_target (hdf5lib_doc ALL COMMAND ${DOXYGEN_EXECUTABLE} ${HDF5_BINARY_DIR}/Doxyfile