diff --git a/java/src/jni/CMakeLists.txt b/java/src/jni/CMakeLists.txt index cd5320b9319..8f8b21a6478 100644 --- a/java/src/jni/CMakeLists.txt +++ b/java/src/jni/CMakeLists.txt @@ -106,7 +106,7 @@ if (HDF5_EXPORTED_TARGETS) ${HDF5_EXPORTED_TARGETS} LIBRARY DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries ARCHIVE DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries - RUNTIME DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries + RUNTIME DESTINATION ${HDF5_INSTALL_BIN_DIR} COMPONENT libraries FRAMEWORK DESTINATION ${HDF5_INSTALL_FWRK_DIR} COMPONENT libraries INCLUDES DESTINATION include ) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index b226664343a..32694fcb4ca 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -121,6 +121,29 @@ Bug Fixes since HDF5-1.14.1 release =================================== Library ------- + - Fixed CVE-2018-13867 + + A corrupt file containing an invalid local heap datablock address + could trigger an assert failure when the metadata cache attempted + to load the datablock from storage. + + The local heap now verifies that the datablock address is valid + when the local heap header information is parsed. + + - Fixed CVE-2018-11202 + + A malformed file could result in chunk index memory leaks. Under most + conditions (i.e., when the --enable-using-memchecker option is NOT + used), this would result in a small memory leak and and infinite loop + and abort when shutting down the library. The infinite loop would be + due to the "free list" package not being able to clear its resources + so the library couldn't shut down. When the "using a memory checker" + option is used, the free lists are disabled so there is just a memory + leak with no abort on library shutdown. + + The chunk index resources are now correctly cleaned up when reading + misparsed files and valgrind confirms no memory leaks. + - Fixed an issue where an assert statement was converted to an incorrect error check statement diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index 7eba50cb4a1..be8ded47223 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -880,6 +880,7 @@ H5D__chunk_init(H5F_t *f, const H5D_t *const dset, hid_t dapl_id) H5D_rdcc_t *rdcc = &(dset->shared->cache.chunk); /* Convenience pointer to dataset's chunk cache */ H5P_genplist_t *dapl; /* Data access property list object pointer */ H5O_storage_chunk_t *sc = &(dset->shared->layout.storage.u.chunk); + bool idx_init = false; herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_PACKAGE @@ -955,12 +956,20 @@ H5D__chunk_init(H5F_t *f, const H5D_t *const dset, hid_t dapl_id) /* Allocate any indexing structures */ if (sc->ops->init && (sc->ops->init)(&idx_info, dset->shared->space, dset->oloc.addr) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't initialize indexing information"); + idx_init = true; /* Set the number of chunks in dataset, etc. */ if (H5D__chunk_set_info(dset) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to set # of chunks for dataset"); done: + if (FAIL == ret_value) { + if (rdcc->slot) + rdcc->slot = H5FL_SEQ_FREE(H5D_rdcc_ent_ptr_t, rdcc->slot); + + if (idx_init && sc->ops->dest && (sc->ops->dest)(&idx_info) < 0) + HDONE_ERROR(H5E_DATASET, H5E_CANTFREE, FAIL, "unable to release chunk index info"); + } FUNC_LEAVE_NOAPI(ret_value) } /* end H5D__chunk_init() */ diff --git a/src/H5HLcache.c b/src/H5HLcache.c index 653590dbc96..377bc0fa3d5 100644 --- a/src/H5HLcache.c +++ b/src/H5HLcache.c @@ -194,6 +194,12 @@ H5HL__hdr_deserialize(H5HL_t *heap, const uint8_t *image, size_t len, H5HL_cache HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); H5F_addr_decode_len(udata->sizeof_addr, &image, &(heap->dblk_addr)); + /* Check that the datablock address is valid (might not be true + * in a corrupt file) + */ + if (!H5_addr_defined(heap->dblk_addr)) + HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "bad datablock address"); + done: FUNC_LEAVE_NOAPI(ret_value) } /* end H5HL__hdr_deserialize() */