diff --git a/configs/common/packages.yaml b/configs/common/packages.yaml index 866a2d7f5..55aefab47 100644 --- a/configs/common/packages.yaml +++ b/configs/common/packages.yaml @@ -20,7 +20,7 @@ version: [1.78.0] variants: ~atomic +chrono +date_time +exception +filesystem ~graph ~iostreams ~locale ~log ~math ~mpi ~numpy +pic +program_options +python ~random +regex +serialization ~signals +system +test +thread +timer ~wave cxxstd=14 visibility=hidden bufr: - version: [11.7.1] + version: [12.0.0] variants: +python # Newer versions of CDO require the C++-17 standard, which doesn't # work with all compilers that are currently in use in spack-stack @@ -42,7 +42,7 @@ version: [5.8.4] variants: +ui eckit: - version: [1.23.0] + version: [1.23.1] variants: linalg=eigen,lapack compression=lz4,bzip2 ecmwf-atlas: version: [0.33.0] @@ -70,7 +70,7 @@ version: [1.1.0] fms: version: [2023.01] - variants: precision=32,64 +quad_precision +gfs_phys +openmp +pic constants=GFS + variants: precision=32,64 +quad_precision +gfs_phys +openmp +pic constants=GFS build_type=Release g2: version: [3.4.5] g2c: @@ -248,6 +248,7 @@ version: [5.15.3] scotch: version: [7.0.3] + variants: +mpi+metis~shared~threads~mpi_thread+noarch sfcio: version: [1.4.1] shumlib: diff --git a/configs/containers/README.md b/configs/containers/README.md index 393a6485e..3f9e4df8f 100644 --- a/configs/containers/README.md +++ b/configs/containers/README.md @@ -2,11 +2,11 @@ To avoid hardcoding specs in the generic container recipes, we keep the specs list empty (`specs: []`) and manually add the specs for the particular spack-stack release and application as listed below, *after* running `spack stack create ctr`. -### spack-stack-1.4.0 / skylab-5.0.0 containers for fv3-jedi and mpas-jedi (but not for ufs-jedi) +### spack-stack-1.4.1 / skylab-5.0.0 containers for fv3-jedi and mpas-jedi (but not for ufs-jedi) ``` specs: [base-env@1.0.0, jedi-base-env@1.0.0 ~fftw, ewok-env@1.0.0, jedi-fv3-env@1.0.0, - jedi-mpas-env@1.0.0, bacio@2.4.1, bison@3.8.2, bufr@11.7.1, ecbuild@3.7.2, eccodes@2.27.0, ecflow@5, - eckit@1.23.0, ecmwf-atlas@0.33.0 +trans ~fftw, ectrans@1.2.0 ~fftw, eigen@3.4.0, + jedi-mpas-env@1.0.0, bacio@2.4.1, bison@3.8.2, bufr@12.0.0, ecbuild@3.7.2, eccodes@2.27.0, ecflow@5, + eckit@1.23.1, ecmwf-atlas@0.33.0 +trans ~fftw, ectrans@1.2.0 ~fftw, eigen@3.4.0, fckit@0.10.1, fms@release-jcsda, g2@3.4.5, g2tmpl@1.10.0, gftl-shared@1.5.0, gsibec@1.1.2, hdf@4.2.15, hdf5@1.14.1-2, ip@3.3.3, jasper@2.0.32, jedi-cmake@1.4.0, libpng@1.6.37, nccmp@1.9.0.1, netcdf-c@4.9.2, netcdf-cxx4@4.3.1, @@ -27,14 +27,14 @@ To avoid hardcoding specs in the generic container recipes, we keep the specs li # py-mysql-connector-python@8.0.32 ``` -### spack-stack-1.4.0 / ufs-weather-model-x.y.z containers for ufs-weather-model as of May 18, 2023 +### spack-stack-1.4.1 / ufs-weather-model-x.y.z containers for ufs-weather-model as of July 5, 2023 **Note. This is not yet working correctly, some libraries are missing. Please do not use yet! Also, if using the clang-mpich container, need to disable openmp for fms, not clear how to do this cleanly.** ``` specs: [base-env@1.0.0, - bacio@2.4.1, bison@3.8.2, bufr@11.7.1, ecbuild@3.7.2, eccodes@2.27.0, ecflow@5, - eckit@1.23.0, ecmwf-atlas@0.33.0 +trans ~fftw, ectrans@1.2.0 ~fftw, eigen@3.4.0, + bacio@2.4.1, bison@3.8.2, bufr@12.0.0, ecbuild@3.7.2, eccodes@2.27.0, ecflow@5, + eckit@1.23.1, ecmwf-atlas@0.33.0 +trans ~fftw, ectrans@1.2.0 ~fftw, eigen@3.4.0, fckit@0.10.1, fms@2023.01, g2@3.4.5, g2tmpl@1.10.0, gftl-shared@1.5.0, gsibec@1.1.2, hdf@4.2.15, hdf5@1.14.1-2, ip@3.3.3, jasper@2.0.32, jedi-cmake@1.4.0, libpng@1.6.37, nccmp@1.9.0.1, netcdf-c@4.9.2, netcdf-cxx4@4.3.1, diff --git a/configs/sites/gaea-c5/packages.yaml b/configs/sites/gaea-c5/packages.yaml index 75877520f..3ae3afb77 100644 --- a/configs/sites/gaea-c5/packages.yaml +++ b/configs/sites/gaea-c5/packages.yaml @@ -80,7 +80,7 @@ packages: buildable: False externals: - spec: ecflow@5.8.4+ui+static_boost - prefix: /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/ecflow-5.8.4-c5 + prefix: /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/ecflow-5.8.4 modules: [ecflow/5.8.4] file: externals: @@ -174,7 +174,7 @@ packages: buildable: False externals: - spec: mysql@8.0.31 - prefix: /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/mysql-8.0.31-c5 + prefix: /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/mysql-8.0.31 modules: [mysql/8.0.31] ncurses: externals: @@ -204,7 +204,7 @@ packages: qt: externals: - spec: qt@5.15.2 - prefix: /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/qt-5.15.2-c5/5.15.2/gcc_64 + prefix: /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/qt-5.15.2/5.15.2/gcc_64 rdma-core: externals: - spec: rdma-core@37.0 diff --git a/configs/sites/hercules/mirrors.yaml b/configs/sites/hercules/mirrors.yaml index 19890e6d8..97f382e8e 100644 --- a/configs/sites/hercules/mirrors.yaml +++ b/configs/sites/hercules/mirrors.yaml @@ -1,7 +1,7 @@ mirrors: local-source: fetch: - url: file:///work/noaa/da/role-da/spack-stack/source-cache + url: file:///work/noaa/epic/role-epic/spack-stack/source-cache access_pair: - null - null @@ -9,7 +9,7 @@ mirrors: profile: null endpoint_url: null push: - url: file:///work/noaa/da/role-da/spack-stack/source-cache + url: file:///work/noaa/epic/role-epic/spack-stack/source-cache access_pair: - null - null diff --git a/configs/sites/hercules/packages.yaml b/configs/sites/hercules/packages.yaml index 286c2bc55..e29456191 100644 --- a/configs/sites/hercules/packages.yaml +++ b/configs/sites/hercules/packages.yaml @@ -3,7 +3,7 @@ packages: compiler:: [intel@2021.7.1, gcc@11.3.1] #compiler:: [oneapi@2022.2.1] providers: - mpi:: [intel-oneapi-mpi@2021.7.1, openmpi@4.1.4] + mpi:: [intel-oneapi-mpi@2021.7.1, openmpi@4.1.5] ### MPI, Python, MKL mpi: @@ -20,11 +20,11 @@ packages: # - intel-oneapi-mpi/2021.7.1 openmpi: externals: - - spec: openmpi@4.1.4%gcc@11.3.1 ~cuda~cxx~cxx_exceptions~java~memchecker+pmi+static~wrapper-rpath + - spec: openmpi@4.1.5%gcc@11.3.1~cuda~cxx~cxx_exceptions~java~memchecker+pmi~static~wrapper-rpath fabrics=ucx schedulers=slurm - prefix: /apps/spack-managed/gcc-11.3.1/openmpi-4.1.4-ruvlmb6yyvzbzbiqaov4zk75ogthczsp + prefix: /work/noaa/epic/role-epic/spack-stack/hercules/openmpi-4.1.5/gcc-11.3.1 modules: - - openmpi/4.1.4 + - openmpi/4.1.5 python: buildable: False externals: @@ -74,9 +74,9 @@ packages: buildable: False externals: - spec: ecflow@5.8.4+ui+static_boost - prefix: /work/noaa/epic-ps/role-epic-ps/spack-stack/ecflow-5.8.4-hercules + prefix: /work/noaa/epic/role-epic/spack-stack/hercules/ecflow-5.8.4 modules: - - ecflow/5.8.4-hercules + - ecflow/5.8.4 findutils: externals: - spec: findutils@4.8.0 @@ -119,9 +119,9 @@ packages: buildable: False externals: - spec: mysql@8.0.31 - prefix: /work/noaa/epic-ps/role-epic-ps/spack-stack/mysql-8.0.31-hercules + prefix: /work/noaa/epic/role-epic/spack-stack/hercules/mysql-8.0.31 modules: - - mysql/8.0.31-hercules + - mysql/8.0.31 openssh: externals: - spec: openssh@8.7p1 @@ -164,3 +164,10 @@ packages: externals: - spec: wget@1.21.1 prefix: /usr + # Need to use external zlib, because of qt dependence on it (otherwise issues with tar command) + zlib: + externals: + - spec: zlib@1.2.13 + prefix: /apps/spack-managed/gcc-11.3.1/zlib-1.2.13-ltp4c3zzde3zi3gf7x4b7c7nj5ww4i4g + modules: + - zlib/1.2.13 diff --git a/configs/sites/orion/mirrors.yaml b/configs/sites/orion/mirrors.yaml index 19890e6d8..97f382e8e 100644 --- a/configs/sites/orion/mirrors.yaml +++ b/configs/sites/orion/mirrors.yaml @@ -1,7 +1,7 @@ mirrors: local-source: fetch: - url: file:///work/noaa/da/role-da/spack-stack/source-cache + url: file:///work/noaa/epic/role-epic/spack-stack/source-cache access_pair: - null - null @@ -9,7 +9,7 @@ mirrors: profile: null endpoint_url: null push: - url: file:///work/noaa/da/role-da/spack-stack/source-cache + url: file:///work/noaa/epic/role-epic/spack-stack/source-cache access_pair: - null - null diff --git a/doc/modulefile_templates/openmpi b/doc/modulefile_templates/openmpi index 02ad98463..b13398bbb 100644 --- a/doc/modulefile_templates/openmpi +++ b/doc/modulefile_templates/openmpi @@ -31,7 +31,7 @@ unsetenv SLURM_EXPORT_ENV setenv PSM2_PATH_SELECTION "static_base" setenv SLURM_CPU_BIND "none" -# Settings specific for Cheyenne +# Settings specific for Cheyenne and Hercules setenv MPI_ROOT ${OPENMPI_PATH} setenv UCX_MAX_RNDV_RAILS "1" setenv OMPI_MCA_btl "^openib" diff --git a/doc/source/MaintainersSection.rst b/doc/source/MaintainersSection.rst index 0b8564c30..d1dc3923d 100644 --- a/doc/source/MaintainersSection.rst +++ b/doc/source/MaintainersSection.rst @@ -63,6 +63,9 @@ Sign into qt, select customized installation, choose qt@5.15.2 only (uncheck all .. note:: On air-gapped systems, the above method may not work (we have not encountered such a system so far). +.. note:: + If ``./qt-unified-linux-x64-online.run`` fails to start with the error ``qt.qpa.xcb: could not connect to display`` and a role account is being used, follow the procedure described in https://www.thegeekdiary.com/how-to-set-x11-forwarding-export-remote-display-for-users-who-switch-accounts-using-sudo to export the display. A possible warning ``xauth: file /ncrc/home1/role.epic/.Xauthority does not exist`` can be ignored, since this file gets created by the ``xauth`` command. + .. _MaintainersSection_ecFlow: ------------------------------ @@ -243,7 +246,7 @@ MSU Hercules ------------------------------ ecflow - ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library, using an available ``Qt5`` installation. After loading the following modules, follow the instructions in :numref:`Section %s ` to install ``ecflow`` in ``/work/noaa/epic-ps/role-epic-ps/spack-stack/ecflow-5.8.4-hercules``. + ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library, using an available ``Qt5`` installation. After loading the following modules, follow the instructions in :numref:`Section %s ` to install ``ecflow`` in ``/work/noaa/epic/role-epic/spack-stack/hercules/ecflow-5.8.4``. .. code-block:: console @@ -253,6 +256,22 @@ ecflow mysql ``mysql`` must be installed separately from ``spack`` using a binary tarball provided by the MySQL community. Follow the instructions in :numref:`Section %s ` to install ``mysql`` in ``/work/noaa/epic-ps/role-epic-ps/spack-stack/mysql-8.0.31-hercules``. +openmpi + need to load qt so to get consistent zlib (or just load zlib directly, check qt module) + +.. code-block:: console + + module purge + module load zlib/1.2.13 + module load ucx/1.13.1 + ./configure \ + --prefix=/work/noaa/epic/role-epic/spack-stack/hercules/openmpi-4.1.5/gcc-11.3.1 \ + --with-ucx=$UCX_ROOT \ + --with-zlib=$ZLIB_ROOT + make VERBOSE=1 -j4 + make check + make install + .. _MaintainersSection_Discover: ------------------------------ @@ -538,17 +557,8 @@ NOAA RDHPCS Gaea C5 On Gaea C5, ``miniconda``, ``qt``, ``ecflow``, and ``mysql`` need to be installed as a one-off before spack can be used. -miniconda - Follow the instructions in :numref:`Section %s ` to create a basic ``miniconda`` installation and associated modulefile for working with spack. Don't forget to log off and back on to forget about the conda environment. Use the following workaround to avoid the terminal being spammed by error messages about missing version information (``/usr/bin/lua5.3: /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/miniconda-3.9.12-c5/lib/libtinfo.so.6: no version information available (required by /lib64/libreadline.so.7)``): - -.. code-block:: console - - cd /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/miniconda-3.9.12-c5/lib - mv libtinfow.so.6.3 libtinfow.so.6.3.conda.original - ln -sf /lib64/libtinfo.so.6 libtinfow.so.6.3 - qt (qt@5) - The default ``qt@5`` in ``/usr`` is incomplete and thus insufficient for building ``ecflow``. After loading/unloading the modules as shown below, refer to :numref:`Section %s ` to install ``qt@5.15.2`` in ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/qt-5.15.2-c5``. Note that the installation must be done as a regular user due to problems with graphical applications for role accounts. + The default ``qt@5`` in ``/usr`` is incomplete and thus insufficient for building ``ecflow``. After loading/unloading the modules as shown below, refer to :numref:`Section %s ` to install ``qt@5.15.2`` in ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/qt-5.15.2``. :numref:`Section %s ` describes how to export the X windows environment in order to install ``qt@5`` using the role account. .. code-block:: console @@ -557,7 +567,7 @@ qt (qt@5) module load PrgEnv-gnu/8.3.3 ecflow - ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After installing `qt5` and loading the following modules, follow the instructions in :numref:`Section %s `. Because of the dependency on ``miniconda``, that module must be loaded automatically in the ``ecflow`` module (similar to ``qt@5.15.2-c5``). Ensure to follow the extra instructions in that section for Gaea C5. + ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After installing `qt5` and loading the following modules, follow the instructions in :numref:`Section %s `. Because of the dependency on ``miniconda``, that module must be loaded automatically in the ``ecflow`` module (similar to ``qt@5.15.2-c5``). Ensure to follow the extra instructions in that section for Gaea C5 in ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/ecflow-5.8.4``. Ensure to follow the extra instructions in that section for Gaea. @@ -568,11 +578,11 @@ ecflow module load PrgEnv-gnu/8.3.3 module load python/3.9.12 - module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/modulefiles-c5 + module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/modulefiles module load qt/5.15.2 mysql - ``mysql`` must be installed separately from ``spack`` using a binary tarball provided by the MySQL community. Follow the instructions in :numref:`Section %s ` to install ``mysql`` in ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/mysql-8.0.31-c5``. + ``mysql`` must be installed separately from ``spack`` using a binary tarball provided by the MySQL community. Follow the instructions in :numref:`Section %s ` to install ``mysql`` in ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/mysql-8.0.31``. .. _MaintainersSection_Hera: diff --git a/doc/source/PreConfiguredSites.rst b/doc/source/PreConfiguredSites.rst index 37adbb85c..91094c985 100644 --- a/doc/source/PreConfiguredSites.rst +++ b/doc/source/PreConfiguredSites.rst @@ -8,23 +8,19 @@ Directory ``configs/sites`` contains site configurations for several HPC systems Pre-configured sites are split into two categories: Tier 1 with officially supported spack-stack installations (see :numref:`Section %s `), and Tier 2 (sites with configuration files that were tested or contributed by others in the past, but that are not officially supported by the spack-stack team; see :numref:`Section %s `). ============================================================= -Officially supported spack-stack 1.4.0 installations (tier 1) +Officially supported spack-stack 1.4.1 installations (tier 1) ============================================================= -Ready-to-use spack-stack 1.4.0 installations are available on the following, fully supported platforms. This version supports the JEDI Skylab release 5 of June 2023, and can be used for testing spack-stack with UFS applications (e.g. the UFS Weather Model, the UFS Short Range Weather Application, and the EMC Global Workflow). Amazon Web Services AMI are available in the US East 1 or 2 regions. - -.. note:: - - ``spack-stack-1.4.0`` was originally created with ``hdf5@1.14.1-2``. It turned out that there was a problem with the Fortran compiled module files when using Intel compilers to build this version of ``hdf5`` in spack-stack (see https://github.com/spack/spack/issues/37955). We therefore rebuilt ``spack-stack-1.4.0`` with ``hdf5@1.14.0`` on platforms using Intel or Intel+GNU. If those environments already had an installation with ``hdf5@1.14.1-2``, we named the environment with ``hdf5@1.14.0`` either ``unified-env-v2`` or ``unified-env-hdf5-1.14.0``. +Ready-to-use spack-stack 1.4.1 installations are available on the following, fully supported platforms. This version supports the JEDI Skylab release 5 of June 2023, and the UFS Weather Model of July 2023. It can also be used for testing spack-stack with other UFS applications (e.g. the UFS Short Range Weather Application, and the EMC Global Workflow). Note that some platforms have not received the 1.4.1 installations, for these the previous 1.4.0 installations are listed below. Amazon Web Services AMI are available in the US East 1 or 2 regions for the previous 1.4.0 release (1.4.1 is not provided on AWS AMIs). +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ | System | Maintainers | Location | +============================================================+===============================+==============================================================================================================+ -| MSU Orion Intel/GNU | Cam Book / Dom Heinzeller | ``/work/noaa/epic-ps/role-epic-ps/spack-stack/spack-stack-1.4.0/envs/unified-env-v2`` | +| MSU Orion Intel/GNU | Cam Book / Dom Heinzeller | ``/work/noaa/epic/role-epic/spack-stack/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| MSU Hercules Intel/GNU^* | Cam Book / Dom Heinzeller | ``/work/noaa/epic-ps/role-epic-ps/spack-stack/spack-stack-1.4.0-hercules/envs/unified-env-v2`` | +| MSU Hercules Intel/GNU^* | Cam Book / Dom Heinzeller | ``/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NASA Discover Intel/GNU | Dom Heinzeller / ??? | ``/gpfsm/dswdev/jcsda/spack-stack/spack-stack-1.4.0/envs/unified-env-v2`` | +| NASA Discover Intel/GNU | Dom Heinzeller / ??? | ``/gpfsm/dswdev/jcsda/spack-stack/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ | NAVY HPCMP Narwhal Intel^** | Dom Heinzeller / ??? | ``/p/app/projects/NEPTUNE/spack-stack/spack-stack-1.4.0/envs/unified-env-intel-2021.4.0-hdf5-1.14.0`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ @@ -34,31 +30,31 @@ Ready-to-use spack-stack 1.4.0 installations are available on the following, ful +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ | NAVY HPCMP Nautilus AMD clang/flang | Dom Heinzeller / ??? | **currently not supported** | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NCAR-Wyoming Casper Intel | Dom Heinzeller / ??? | ``/glade/work/epicufsrt/contrib/spack-stack/spack-stack-1.4.0-casper/envs/unified-env-v2`` | +| NCAR-Wyoming Casper Intel | Dom Heinzeller / ??? | ``/glade/work/epicufsrt/contrib/spack-stack/casper/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NCAR-Wyoming Cheyenne Intel/GNU | Cam Book / Dom Heinzeller | ``/glade/work/epicufsrt/contrib/spack-stack/spack-stack-1.4.0/envs/unified-env-v2`` | +| NCAR-Wyoming Cheyenne Intel/GNU | Cam Book / Dom Heinzeller | ``/glade/work/epicufsrt/contrib/spack-stack/cheyenne/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NOAA Parallel Works (AWS, Azure, Gcloud) Intel | Mark Potts / Cam Book | **will be added later (on develop)** | +| NOAA Parallel Works (AWS, Azure, Gcloud) Intel | Mark Potts / Cam Book | ``/contrib/EPIC/spack-stack/spack-stack-1.4.1/envs/unified-dev`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NOAA Acorn Intel | Hang Lei / Alex Richert | ``/lfs/h1/emc/nceplibs/noscrub/spack-stack/spack-stack-1.4.0/envs/unified-env`` | +| NOAA Acorn Intel | Hang Lei / Alex Richert | ``/lfs/h1/emc/nceplibs/noscrub/spack-stack/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NOAA RDHPCS Gaea C4 Intel | Dom Heinzeller / ??? | ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/spack-stack-1.4.0-c4/envs/unified-env-v2`` | +| NOAA RDHPCS Gaea C4 Intel | Dom Heinzeller / ??? | ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/spack-stack-1.4.1-c4/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NOAA RDHPCS Gaea C5 Intel | Dom Heinzeller / ??? | ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/spack-stack-1.4.0-c5/envs/unified-env-v2`` | +| NOAA RDHPCS Gaea C5 Intel | Dom Heinzeller / ??? | ``/lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NOAA RDHPCS Hera Intel/GNU | Mark Potts / Dom Heinzeller | ``/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.4.0/envs/unified-env-v2`` | +| NOAA RDHPCS Hera Intel/GNU | Mark Potts / Dom Heinzeller | ``/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| NOAA RDHPCS Jet Intel/GNU | Cam Book / Dom Heinzeller | ``/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.4.0/envs/unified-env-v2`` | +| NOAA RDHPCS Jet Intel/GNU | Cam Book / Dom Heinzeller | ``/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| UW (Univ. of Wisc.) S4 Intel | Dom Heinzeller / Mark Potts | ``/data/prod/jedi/spack-stack/spack-stack-1.4.0/envs/unified-env-v2`` | +| UW (Univ. of Wisc.) S4 Intel | Dom Heinzeller / Mark Potts | ``/data/prod/jedi/spack-stack/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ | UW (Univ. of Wisc.) S4 GNU^** | Dom Heinzeller / Mark Potts | **currently not supported** | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Amazon Web Services Parallelcluster Ubuntu 20.04 Intel/GNU | Dom Heinzeller / ??? | ``/mnt/experiments-efs/skylab-v5/spack-stack-1.4.0/envs/unified-env-v2`` | +| Amazon Web Services Parallelcluster Ubuntu 20.04 Intel/GNU | Dom Heinzeller / ??? | ``/mnt/experiments-efs/skylab-v5/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ | Amazon Web Services AMI Red Hat 8 GNU | Dom Heinzeller / ??? | ``/home/ec2-user/spack-stack/spack-stack-1.4.0/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Amazon Web Services AMI Ubuntu 20 GNU | Dom Heinzeller / ??? | ``/home/ubuntu/spack-stack/spack-stack-1.4.0/envs/unified-env`` | +| Amazon Web Services AMI Ubuntu 20 GNU | Dom Heinzeller / ??? | ``/mnt/experiments-efs/skylab-v5/spack-stack-1.4.1/envs/unified-env`` | +------------------------------------------------------------+-------------------------------+--------------------------------------------------------------------------------------------------------------+ ^* This system uses a different wgrib2 version 3.1.1 than the default 2.0.8. @@ -87,23 +83,21 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /work/noaa/epic-ps/role-epic-ps/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2022.0.2 module load stack-intel-oneapi-mpi/2021.5.1 module load stack-python/3.9.7 module available -For ``spack-stack-1.4.0`` with GNU, load the following modules after loading miniconda and ecflow: - -TODO TEST +For ``spack-stack-1.4.1`` with GNU, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /work/noaa/epic-ps/role-epic-ps/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-gcc/10.2.0 module load stack-openmpi/4.0.4 module load stack-python/3.9.7 @@ -118,27 +112,27 @@ The following is required for building new spack environments and for using spac .. code-block:: console module purge - module use /work/noaa/epic-ps/role-epic-ps/spack-stack/modulefiles - module load ecflow/5.8.4-hercules - module load mysql/8.0.31-hercules + module use /work/noaa/epic/role-epic/spack-stack/hercules/modulefiles + module load ecflow/5.8.4 + module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /work/noaa/epic-ps/role-epic-ps/spack-stack/spack-stack-1.4.0-hercules/envs/unified-env-v2/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2021.7.1 module load stack-intel-oneapi-mpi/2021.7.1 module load stack-python/3.9.14 module available -For ``spack-stack-1.4.0`` with GNU, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with GNU, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /work/noaa/epic-ps/role-epic-ps/spack-stack/spack-stack-1.4.0-hercules/envs/unified-env-v2/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-gcc/11.3.1 - module load stack-openmpi/4.1.4 + module load stack-openmpi/4.1.5 module load stack-python/3.9.14 module available @@ -158,21 +152,21 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /gpfsm/dswdev/jcsda/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /gpfsm/dswdev/jcsda/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2022.0.1 module load stack-intel-oneapi-mpi/2021.5.0 module load stack-python/3.9.7 module available -For ``spack-stack-1.4.0`` with GNU, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with GNU, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /gpfsm/dswdev/jcsda/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /gpfsm/dswdev/jcsda/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-gcc/10.1.0 module load stack-openmpi/4.1.3 module load stack-python/3.9.7 @@ -317,11 +311,11 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow. +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow. .. code-block:: console - module use /glade/work/epicufsrt/contrib/spack-stack/spack-stack-1.4.0-casper/envs/unified-env-v2/install/modulefiles/Core + module use /glade/work/epicufsrt/contrib/spack-stack/casper/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/19.1.1.217 module load stack-intel-mpi/2019.7.217 module load stack-python/3.9.12 @@ -344,21 +338,21 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow. +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow. .. code-block:: console - module use /glade/work/epicufsrt/contrib/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /glade/work/epicufsrt/contrib/spack-stack/cheyenne/spack-stack-1.4.1/envs/unified-env module load stack-intel/19.1.1.217 module load stack-intel-mpi/2019.7.217 module load stack-python/3.9.12 module available -For ``spack-stack-1.4.0`` with GNU, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with GNU, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /glade/work/epicufsrt/contrib/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /glade/work/epicufsrt/contrib/spack-stack/cheyenne/spack-stack-1.4.1/envs/unified-env module load stack-gcc/10.1.0 module load stack-openmpi/4.1.1 module load stack-python/3.9.12 @@ -370,18 +364,15 @@ For ``spack-stack-1.4.0`` with GNU, load the following modules after loading min NOAA Acorn (WCOSS2 test system) ------------------------------- -.. note:: - Support for spack-stack-1.4.0 will be added later on develop. The instructions below are for an older release. - -On WCOSS2 OpenSUSE sets `CONFIG_SITE` which causes libraries to be installed in `lib64`, breaking the `lib` assumption made by some packages. +For spack-stack-1.4.1, the meta modules are in ``/lfs/h1/emc/nceplibs/noscrub/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core``. -`CONFIG_SITE` should be set to empty in `compilers.yaml`. Don't use ``module purge`` on Acorn! +On WCOSS2 OpenSUSE sets ``CONFIG_SITE`` which causes libraries to be installed in ``lib64``, breaking the ``lib`` assumption made by some packages. Therefore, ``CONFIG_SITE`` should be set to empty in ``compilers.yaml``. Also, don't use ``module purge`` on Acorn! -When installing an official `spack-stack` on Acorn, be mindful of umask and group ownership, as these can be finicky. The umask value should be 002, otherwise various files can be assigned to the wrong group. In any case, running something to the effect of ``chgrp nceplibs -R`` and ``chmod o+rX -R`` after the whole installation is done is a good idea. +When installing an official ``spack-stack`` on Acorn, be mindful of umask and group ownership, as these can be finicky. The umask value should be 002, otherwise various files can be assigned to the wrong group. In any case, running something to the effect of ``chgrp nceplibs -R`` and ``chmod o+rX -R`` after the whole installation is done is a good idea. -Due to a combined quirk of Cray and Spack, the ``PrgEnv-gnu`` and ``gcc`` modules must be loaded when `ESMF` is being installed with `GCC`. +Due to a combined quirk of Cray and Spack, the ``PrgEnv-gnu`` and ``gcc`` modules must be loaded when `ESMF` is being installed with ``gcc``. -As of spring 2023, there is an inconsistency in `libstdc++` versions on Acorn between the login and compute nodes. It is advisable to compile on the compute nodes, which requires running ``spack fetch`` prior to installing through a batch job. +As of spring 2023, there is an inconsistency in ``libstdc++`` versions on Acorn between the login and compute nodes. It is advisable to compile on the compute nodes, which requires running ``spack fetch`` prior to installing through a batch job. Note that certain packages, such as recent versions of `py-scipy`, cannot be compiled on compute nodes because their build systems require internet access. @@ -404,31 +395,16 @@ The following is required for building new spack environments and for using spac module use /contrib/spack-stack/modulefiles/core module load miniconda/3.9.12 module load mysql/8.0.31 - # So far only on NOAA-AWS for spack-stack develop versions newer than 1.3.1 module load ecflow/5.8.4 -.. note:: - Support for spack-stack-1.4.0 will be added later on develop. The instructions below are for an older release. - -For ``spack-stack-1.3.0`` with Intel, load the following modules after loading miniconda and ecflow: - - module use /contrib/EPIC/spack-stack/spack-stack-1.3.0/envs/unified-env/install/modulefiles/Core - module load stack-intel/2021.3.0 - module load stack-intel-oneapi-mpi/2021.3.0 - module load stack-python/3.9.12 - module available +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda, mysql and ecflow: -For ``spack-stack-1.3.1`` with Intel, load the following modules after loading miniconda and ecflow: - - module use /contrib/EPIC/spack-stack/spack-stack-1.3.1/envs/unified-env/install/modulefiles/Core + module use /contrib/EPIC/spack-stack/spack-stack-1.4.1/envs/unified-dev/install/modulefiles/Core module load stack-intel/2021.3.0 module load stack-intel-oneapi-mpi/2021.3.0 module load stack-python/3.9.12 module available -.. note:: - ``spack-stack-1.3.1`` is not yet available on Azure. - .. _Preconfigured_Sites_Gaea: ------------------------------ @@ -448,11 +424,11 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/spack-stack-1.4.0-c4/envs/unified-env-v2/install/modulefiles/Core + module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/spack-stack-1.4.1-c4/envs/unified-env/install/modulefiles/Core module load stack-intel/2022.0.2 module load stack-cray-mpich/7.7.20 module load stack-python/3.9.12 @@ -480,7 +456,7 @@ The following is required for building new spack environments and for using spac module load cray-mpich/8.1.25 module load python/3.9.12 - module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/modulefiles-c5 + module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/modulefiles module load ecflow/5.8.4 module load mysql/8.0.31 @@ -488,14 +464,17 @@ For ``spack-stack-1.4.0`` with Intel, load the following modules after loading m .. code-block:: console - module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/spack-stack-1.4.0-c5/envs/unified-env-v2/install/modulefiles/Core + module use /lustre/f2/dev/wpo/role.epic/contrib/spack-stack/c5/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2022.2.1 module load stack-cray-mpich/8.1.25 module load stack-python/3.9.12 - module available + module -t available .. note:: - On Gaea, a current limitation is that any executable that is linked against the MPI library (``cray-mpich``) must be run through ``srun`` on a compute node, even if it is run serially (one process). This is in particular a problem when using ``ctest`` for unit testing created by the ``ecbuild add_test`` macro. A workaround is to use the `cmake` cross-compiling emulator for this: + On Gaea C5, running ``module available`` without the option ``-t`` leads to an error: ``/usr/bin/lua5.3: /opt/cray/pe/lmod/lmod/libexec/Spider.lua:568: stack overflow`` + +.. note:: + On Gaea C5, a current limitation is that any executable that is linked against the MPI library (``cray-mpich``) must be run through ``srun`` on a compute node, even if it is run serially (one process). This is in particular a problem when using ``ctest`` for unit testing created by the ``ecbuild add_test`` macro. A workaround is to use the `cmake` cross-compiling emulator for this: .. code-block:: console @@ -517,26 +496,26 @@ The following is required for building new spack environments and for using spac module load ecflow/5.5.3 module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2021.5.0 module load stack-intel-oneapi-mpi/2021.5.1 module load stack-python/3.9.12 module available -For ``spack-stack-1.4.0`` with GNU, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with GNU, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.4.0/envs/unified-env/install/modulefiles/Core module load stack-gcc/9.2.0 module load stack-openmpi/4.1.5 module load stack-python/3.9.12 module available - + Note that on Hera, a dedicated node exists for ``ecflow`` server jobs (``hecflow01``). Users starting ``ecflow_server`` on the regular login nodes will see their servers being killed every few minutes, and may be barred from accessing the system. .. _Preconfigured_Sites_Jet: @@ -556,21 +535,21 @@ The following is required for building new spack environments and for using spac module use /lfs4/HFIP/hfv3gfs/role.epic/modulefiles module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2021.5.0 module load stack-intel-oneapi-mpi/2021.5.1 module load stack-python/3.9.12 module available -For ``spack-stack-1.4.0`` with GNU, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with GNU, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-gcc/9.2.0 module load stack-openmpi/3.1.4 module load stack-python/3.9.12 @@ -590,11 +569,11 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load mysql/8.0.31 -For ``spack-stack-1.4.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.4.1`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /data/prod/jedi/spack-stack/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /data/prod/jedi/spack-stack/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2021.5.0 module load stack-intel-oneapi-mpi/2021.5.0 module load stack-python/3.9.12 @@ -614,24 +593,24 @@ Amazon Web Services Parallelcluster Ubuntu 20.04 Access to the JCSDA-managed AWS Parallel Cluster is not available to the public. The following instructions are for JCSDA core staff and in-kind contributors. -For ``spack-stack-1.4.0`` with Intel, run the following commands/load the following modules: +For ``spack-stack-1.4.1`` with Intel, run the following commands/load the following modules: .. code-block:: console module purge ulimit -s unlimited source /opt/intel/oneapi/compiler/2022.1.0/env/vars.sh - module use /mnt/experiments-efs/skylab-v5/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /mnt/experiments-efs/skylab-v5/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-intel/2022.1.0 module load stack-intel-oneapi-mpi/2021.6.0 module load stack-python/3.10.8 module available -For ``spack-stack-1.4.0`` with GNU, run the following commands/load the following modules: +For ``spack-stack-1.4.1`` with GNU, run the following commands/load the following modules: module purge ulimit -s unlimited - module use /mnt/experiments-efs/skylab-v5/spack-stack-1.4.0/envs/unified-env-v2/install/modulefiles/Core + module use /mnt/experiments-efs/skylab-v5/spack-stack-1.4.1/envs/unified-env/install/modulefiles/Core module load stack-gcc/9.4.0 module load stack-openmpi/4.1.4 module load stack-python/3.10.8 diff --git a/doc/source/conf.py b/doc/source/conf.py index cf07fa12a..80ede6bdf 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -24,9 +24,9 @@ author = 'Dominikus Heinzeller, Alexander Richert, Cameron Book' # The short X.Y version -version = '' +version = '1.4' # The full version, including alpha/beta/rc tags -release = '' +release = '1.4.1' numfig = True @@ -142,7 +142,7 @@ # Latex figure (float) alignment # # 'figure_align': 'htbp', - 'maketitle': r'\newcommand\sphinxbackoftitlepage{For referencing this document please use: \newline \break Heinzeller, D., A. Richert, C. Book, 2023. spack-stack documentation release v1.3.1. Available at https://spack-stack.readthedocs.io/\textunderscore/downloads/en/v1.3.1/pdf/.}\sphinxmaketitle' + 'maketitle': r'\newcommand\sphinxbackoftitlepage{For referencing this document please use: \newline \break Heinzeller, D., A. Richert, C. Book, 2023. spack-stack documentation release v1.4.1. Available at https://spack-stack.readthedocs.io/\textunderscore/downloads/en/v1.4.1/pdf/.}\sphinxmaketitle' } # Grouping the document tree into LaTeX files. List of tuples diff --git a/spack b/spack index 204f82600..01d6aa523 160000 --- a/spack +++ b/spack @@ -1 +1 @@ -Subproject commit 204f82600bda589358e1d2957ca3a23a65453c23 +Subproject commit 01d6aa5239a8f46862d0f35a9f3533afdb9151a2