Skip to content

Commit

Permalink
Merge pull request #4025 from lrknox/1_14_dev_sync3_lrk
Browse files Browse the repository at this point in the history
Sync more develop changes to hdf5_1_14
  • Loading branch information
lrknox authored Feb 16, 2024
2 parents 413d10f + 17a542f commit 034271b
Show file tree
Hide file tree
Showing 21 changed files with 322 additions and 60 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/cmake-ctest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ jobs:
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build114/hdf5/ -Include *.zip
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace}}/build114/hdf5/ -Include *.zip
cd "${{ runner.workspace }}/build114"
7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip hdf5
shell: pwsh
Expand Down Expand Up @@ -147,7 +147,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cd "${{ runner.workspace }}/build114"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz hdf5
shell: bash
Expand All @@ -170,7 +170,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: docs-doxygen
path: ${{ runner.workspace }}/hdf5/build114/ci-StdShar-GNUC/hdf5lib_docs/html
path: ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/hdf5lib_docs/html
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`

build_and_test_mac:
Expand Down Expand Up @@ -232,7 +232,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cd "${{ runner.workspace }}/build114"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5
shell: bash
Expand Down
32 changes: 32 additions & 0 deletions .github/workflows/daily-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,26 @@ permissions:
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel.
jobs:
get-old-names:
runs-on: ubuntu-latest
outputs:
hdf5-name: ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}

steps:
- uses: actions/[email protected]

- name: Get hdf5 release base name
uses: dsaltares/fetch-gh-release-asset@master
with:
version: 'tags/snapshot-1.14'
file: 'last-file.txt'

- name: Read base-name file
id: gethdf5base
run: echo "HDF5_NAME_BASE=$(cat last-file.txt)" >> $GITHUB_OUTPUT

- run: echo "hdf5 base name is ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}."

call-workflow-tarball:
uses: ./.github/workflows/tarball.yml
with:
Expand All @@ -23,6 +43,7 @@ jobs:
uses: ./.github/workflows/cmake-ctest.yml
with:
file_base: ${{ needs.call-workflow-tarball.outputs.file_base }}
preset_name: ci-StdShar
#use_tag: snapshot-1.14
#use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
Expand Down Expand Up @@ -50,3 +71,14 @@ jobs:
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}

call-workflow-remove:
needs: [get-old-names, call-workflow-tarball, call-workflow-ctest, call-workflow-abi, call-workflow-release]
permissions:
contents: write # In order to allow file deletion
uses: ./.github/workflows/remove-files.yml
with:
file_base: ${{ needs.get-old-names.outputs.hdf5-name }}
use_tag: snapshot-1.14
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}

1 change: 0 additions & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,6 @@ jobs:
use_environ: release

call-workflow-release:
#needs: [call-workflow-tarball, call-workflow-ctest]
needs: [log-the-inputs, create-files-ctest, call-workflow-ctest, call-workflow-abi]
permissions:
contents: write # In order to allow tag creation
Expand Down
60 changes: 60 additions & 0 deletions .github/workflows/remove-files.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
name: hdf5 dev remove-files

# Controls when the action will run. Triggers the workflow on a schedule
on:
workflow_call:
inputs:
use_tag:
description: 'Release version tag'
type: string
required: false
default: snapshot
use_environ:
description: 'Environment to locate files'
type: string
required: true
default: snapshots
file_base:
description: "The common base name of the source tarballs"
required: true
type: string

# Minimal permissions to be inherited by any job that doesn't declare its own permissions
permissions:
contents: read

# Previous workflows must pass to get here so tag the commit that created the files
jobs:
PreRelease-delfiles:
runs-on: ubuntu-latest
environment: ${{ inputs.use_environ }}
permissions:
contents: write
steps:
- name: Get file base name
id: get-file-base
run: |
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
- name: PreRelease delete from tag
id: delete_prerelease
if: ${{ (inputs.use_environ == 'snapshots') }}
uses: mknejp/delete-release-assets@v1
with:
token: ${{ github.token }}
tag: "${{ inputs.use_tag }}"
assets: |
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_hl_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_cpp_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-java_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip
${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_dataset.c
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_file_create.c
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively.
*/
Expand Down
18 changes: 14 additions & 4 deletions HDF5Examples/C/H5PAR/ph5_filtered_writes.c
Original file line number Diff line number Diff line change
Expand Up @@ -377,13 +377,23 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);

/*
* OPTIONAL: Set collective metadata reads on FAPL to allow
* parallel writes to filtered datasets to perform
* better at scale. While not strictly necessary,
* this is generally recommended.
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(fapl_id, true);

/*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk
Expand Down
18 changes: 14 additions & 4 deletions HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c
Original file line number Diff line number Diff line change
Expand Up @@ -271,13 +271,23 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);

/*
* OPTIONAL: Set collective metadata reads on FAPL to allow
* parallel writes to filtered datasets to perform
* better at scale. While not strictly necessary,
* this is generally recommended.
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(fapl_id, true);

/*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_chunk.c
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_col.c
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_pattern.c
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_row.c
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5example.c
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,24 @@ phdf5writeInd(char *filename)
assert(ret != FAIL);
MESG("H5Pset_fapl_mpio succeed");

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(acc_tpl1, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(acc_tpl1, true);

/* create the file collectively */
fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl1);
assert(fid1 != FAIL);
Expand Down
Loading

0 comments on commit 034271b

Please sign in to comment.