diff --git a/.gitignore b/.gitignore index f94c17ba35..047313a32f 100644 --- a/.gitignore +++ b/.gitignore @@ -31,6 +31,7 @@ fix/cice fix/cpl fix/datm fix/gldas +fix/gdas fix/gsi fix/lut fix/mom6 @@ -40,7 +41,6 @@ fix/sfc_climo fix/ugwd fix/verif fix/wave -fix/wafs # Ignore parm file symlinks #-------------------------- @@ -97,8 +97,42 @@ parm/wafs #-------------------------------------------- sorc/*log sorc/logs -sorc/*.cd -sorc/*.fd +sorc/calc_analysis.fd +sorc/calc_increment_ens.fd +sorc/calc_increment_ens_ncio.fd +sorc/emcsfc_ice_blend.fd +sorc/emcsfc_snow2mdl.fd +sorc/enkf.fd +sorc/enkf_chgres_recenter_nc.fd +sorc/fbwndgfs.fd +sorc/gaussian_sfcanl.fd +sorc/getsfcensmeanp.fd +sorc/getsigensmeanp_smooth.fd +sorc/getsigensstatp.fd +sorc/gfs_bufr.fd +sorc/global_cycle.fd +sorc/gsi.fd +sorc/interp_inc.fd +sorc/mkgfsawps.fd +sorc/overgridid.fd +sorc/oznmon_horiz.fd +sorc/oznmon_time.fd +sorc/radmon_angle.fd +sorc/radmon_bcoef.fd +sorc/radmon_bcor.fd +sorc/radmon_time.fd +sorc/rdbfmsua.fd +sorc/recentersigp.fd +sorc/reg2grb2.fd +sorc/supvit.fd +sorc/syndat_getjtbul.fd +sorc/syndat_maksynrc.fd +sorc/syndat_qctropcy.fd +sorc/tave.fd +sorc/tocsbufr.fd +sorc/upp.fd +sorc/vint.fd +sorc/webtitle.fd # Ignore scripts from externals #------------------------------ @@ -121,6 +155,7 @@ scripts/exgfs_atmos_wafs_grib2_0p25.sh ush/chgres_cube.sh ush/emcsfc_ice_blend.sh ush/emcsfc_snow.sh +ush/exglobal_prep_ocean_obs.py ush/fix_precip.sh ush/fv3gfs_driver_grid.sh ush/fv3gfs_filter_topo.sh @@ -150,7 +185,6 @@ versions/build.ver versions/run.ver # wxflow checkout and symlinks -sorc/wxflow ush/python/wxflow workflow/wxflow ci/scripts/wxflow diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..3eb26fb0fe --- /dev/null +++ b/.gitmodules @@ -0,0 +1,28 @@ +[submodule "sorc/ufs_model.fd"] + path = sorc/ufs_model.fd + url = https://github.com/ufs-community/ufs-weather-model + ignore = dirty +[submodule "sorc/wxflow"] + path = sorc/wxflow + url = https://github.com/NOAA-EMC/wxflow +[submodule "sorc/gfs_utils.fd"] + path = sorc/gfs_utils.fd + url = https://github.com/NOAA-EMC/gfs-utils +[submodule "sorc/ufs_utils.fd"] + path = sorc/ufs_utils.fd + url = https://github.com/ufs-community/UFS_UTILS.git +[submodule "sorc/verif-global.fd"] + path = sorc/verif-global.fd + url = https://github.com/NOAA-EMC/EMC_verif-global.git +[submodule "sorc/gsi_enkf.fd"] + path = sorc/gsi_enkf.fd + url = https://github.com/NOAA-EMC/GSI.git +[submodule "sorc/gdas.cd"] + path = sorc/gdas.cd + url = https://github.com/NOAA-EMC/GDASApp.git +[submodule "sorc/gsi_utils.fd"] + path = sorc/gsi_utils.fd + url = https://github.com/NOAA-EMC/GSI-Utils.git +[submodule "sorc/gsi_monitor.fd"] + path = sorc/gsi_monitor.fd + url = https://github.com/NOAA-EMC/GSI-Monitor.git diff --git a/Externals.cfg b/Externals.cfg deleted file mode 100644 index eba775275a..0000000000 --- a/Externals.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# External sub-modules of global-workflow - -[wxflow] -tag = 528f5ab -local_path = sorc/wxflow -repo_url = https://github.com/NOAA-EMC/wxflow.git -protocol = git -required = True - -[UFS] -tag = 3ba8dff -local_path = sorc/ufs_model.fd -repo_url = https://github.com/ufs-community/ufs-weather-model.git -protocol = git -required = True - -[gfs-utils] -hash = a283262 -local_path = sorc/gfs_utils.fd -repo_url = https://github.com/NOAA-EMC/gfs-utils -protocol = git -required = True - -[UFS-Utils] -hash = 72a0471 -local_path = sorc/ufs_utils.fd -repo_url = https://github.com/ufs-community/UFS_UTILS.git -protocol = git -required = True - -[EMC_verif-global] -tag = c267780 -local_path = sorc/verif-global.fd -repo_url = https://github.com/NOAA-EMC/EMC_verif-global.git -protocol = git -required = True - -[GSI-EnKF] -hash = ca19008 -local_path = sorc/gsi_enkf.fd -repo_url = https://github.com/NOAA-EMC/GSI.git -protocol = git -required = False - -[GSI-Utils] -hash = 322cc7b -local_path = sorc/gsi_utils.fd -repo_url = https://github.com/NOAA-EMC/GSI-utils.git -protocol = git -required = False - -[GSI-Monitor] -hash = 45783e3 -local_path = sorc/gsi_monitor.fd -repo_url = https://github.com/NOAA-EMC/GSI-monitor.git -protocol = git -required = False - -[GDASApp] -hash = f44a6d5 -local_path = sorc/gdas.cd -repo_url = https://github.com/NOAA-EMC/GDASApp.git -protocol = git -required = False - -[externals_description] -schema_version = 1.0.0 diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 4af44507e9..798c98bf50 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -48,7 +48,7 @@ git clone "${REPO_URL}" cd global-workflow || exit 1 # checkout pull request -"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" +"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" --recurse-submodules HOMEgfs="${PWD}" source "${HOMEgfs}/ush/detect_machine.sh" @@ -67,30 +67,14 @@ source "${HOMEgfs}/ush/detect_machine.sh" commit=$(git log --pretty=format:'%h' -n 1) echo "${commit}" > "../commit" -# run checkout script +# build full cycle cd sorc || exit 1 set +e -./checkout.sh -c -g -u >> log.checkout 2>&1 -checkout_status=$? -DATE=$(date +'%D %r') -if [[ ${checkout_status} != 0 ]]; then - { - echo "Checkout: *** FAILED ***" - echo "Checkout: Failed at ${DATE}" - echo "Checkout: see output at ${PWD}/log.checkout" - } >> "${outfile}" - exit "${checkout_status}" -else - { - echo "Checkout: Completed at ${DATE}" - } >> "${outfile}" -fi -# build full cycle source "${HOMEgfs}/ush/module-setup.sh" export BUILD_JOBS=8 rm -rf log.build -./build_all.sh >> log.build 2>&1 +./build_all.sh -gu >> log.build 2>&1 build_status=$? DATE=$(date +'%D %r') diff --git a/ci/scripts/driver_weekly.sh b/ci/scripts/driver_weekly.sh index a52e5b1caa..88b027d100 100755 --- a/ci/scripts/driver_weekly.sh +++ b/ci/scripts/driver_weekly.sh @@ -66,7 +66,7 @@ echo "Creating new branch ${branch} from develop on ${MACHINE_ID} in ${develop_d rm -Rf "${develop_dir}" mkdir -p "${develop_dir}" cd "${develop_dir}" || exit 1 -git clone "${REPO_URL}" +git clone --recursive "${REPO_URL}" cd global-workflow || exit 1 git checkout -b "${branch}" diff --git a/docs/source/clone.rst b/docs/source/clone.rst index c098a34f7e..bad3f0e9f6 100644 --- a/docs/source/clone.rst +++ b/docs/source/clone.rst @@ -15,35 +15,34 @@ Clone the `global-workflow` and `cd` into the `sorc` directory: :: - git clone https://github.com/NOAA-EMC/global-workflow + git clone --recursive https://github.com/NOAA-EMC/global-workflow cd global-workflow/sorc -For forecast-only (coupled or uncoupled) checkout the components: +For forecast-only (coupled or uncoupled) build of the components: :: - ./checkout.sh + ./build_all.sh -For cycled (w/ data assimilation) use the `-g` option during checkout: +For cycled (w/ data assimilation) use the `-g` option during build: :: - ./checkout.sh -g + ./build_all.sh -g -For coupled cycling (include new UFSDA) use the `-gu` options during checkout: +For coupled cycling (include new UFSDA) use the `-gu` options during build: [Currently only available on Hera, Orion, and Hercules] :: - ./checkout.sh -gu + ./build_all.sh -gu Build workflow components and link workflow artifacts such as executables, etc. :: - ./build_all.sh ./link_workflow.sh @@ -73,7 +72,7 @@ You now have a cloned copy of the global-workflow git repository. To checkout a :: - git checkout BRANCH_NAME + git checkout --recurse-submodules BRANCH_NAME .. note:: Branch must already exist. If it does not you need to make a new branch using the ``-b`` flag: @@ -86,62 +85,55 @@ The ``checkout`` command will checkout BRANCH_NAME and switch your clone to that :: - git checkout my_branch + git checkout --recurse-submodules my_branch git branch * my_branch develop -********** -Components -********** +Using ``--recurse-submodules`` is important to ensure you are updating the component versions to match the branch. -Once you have cloned the workflow repository it's time to checkout/clone its components. The components will be checked out under the ``/sorc`` folder via a script called checkout.sh. Run the script with no arguments for forecast-only: +^^^^^^^^^^^^^^^^ +Build components +^^^^^^^^^^^^^^^^ -:: +Under the ``/sorc`` folder is a script to build all components called ``build_all.sh``. After checking out the branch you wish to use, run this script to build all components codes: - cd sorc - ./checkout.sh +:: -Or with the ``-g`` switch to include data assimilation (GSI) for cycling: + ./build_all.sh [-a UFS_app][-g][-h][-u][-v] + -a UFS_app: + Build a specific UFS app instead of the default + -g: + Build GSI + -h: + Print this help message and exit + -j: + Specify maximum number of build jobs (n) + -u: + Build UFS-DA + -v: + Execute all build scripts with -v option to turn on verbose where supported + +For forecast-only (coupled or uncoupled) build of the components: :: - cd sorc - ./checkout.sh -g + ./build_all.sh -Or also with the ``-u`` swtich to include coupled DA (via UFSDA): -[Currently only available on Hera, Orion, and Hercules] +For cycled (w/ data assimilation) use the `-g` option during build: :: - cd sorc - ./checkout.sh -gu - -Each component cloned via checkout.sh will have a log (``/sorc/logs/checkout-COMPONENT.log``). Check the screen output and logs for clone errors. + ./build_all.sh -g -^^^^^^^^^^^^^^^^ -Build components -^^^^^^^^^^^^^^^^ +For coupled cycling (include new UFSDA) use the `-gu` options during build: -Under the ``/sorc`` folder is a script to build all components called ``build_all.sh``. After running checkout.sh run this script to build all components codes: +[Currently only available on Hera, Orion, and Hercules] :: - ./build_all.sh [-a UFS_app][-c build_config][-h][-v] - -a UFS_app: - Build a specific UFS app instead of the default - -c build_config: - Selectively build based on the provided config instead of the default config - -h: - Print usage message and exit - -v: - Run all scripts in verbose mode - -A partial build option is also available via two methods: - - a) modify gfs_build.cfg config file to disable/enable particular builds and then rerun build_all.sh + ./build_all.sh -gu - b) run individual build scripts also available in ``/sorc`` folder for each component or group of codes ^^^^^^^^^^^^^^^ Link components diff --git a/docs/source/components.rst b/docs/source/components.rst index 4d2619e44e..98e76b467b 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -13,13 +13,13 @@ The major components of the system are: * Post-processing * Verification -The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. +The Global Workflow repository contains the workflow and script layers. External components will be checked out as git submodules. All of the submodules of the system reside in their respective repositories on GitHub. ====================== Component repositories ====================== -Components checked out via sorc/checkout.sh: +Components included as submodules: * **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration * **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory @@ -28,10 +28,11 @@ Components checked out via sorc/checkout.sh: * **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values * **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well * **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model +* **wxflow** Collection of python utilities for weather workflows (https://github.com/NOAA-EMC/wxflow) * **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only .. note:: - When running the system in forecast-only mode the Data Assimilation components are not needed and are hence not checked out. + When running the system in forecast-only mode the Data Assimilation components are not needed and are hence not built. ===================== External dependencies @@ -41,11 +42,11 @@ External dependencies Libraries ^^^^^^^^^ -All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms +All the libraries that are needed to run the end to end Global Workflow are built using a package manager. These are served via spack-stack. These libraries are already available on supported NOAA HPC platforms. -Find information on official installations of HPC-STACK here: +Find information on official installations of spack-stack here: -https://github.com/NOAA-EMC/hpc-stack/wiki/Official-Installations +https://github.com/JCSDA/spack-stack/wiki/Porting-spack-stack-to-a-new-system ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Observation data (OBSPROC/prep) diff --git a/docs/source/development.rst b/docs/source/development.rst index e95516bcca..4739d2b602 100644 --- a/docs/source/development.rst +++ b/docs/source/development.rst @@ -196,3 +196,4 @@ Moving forward you'll want to perform the "remote update" command regularly to u :: git remote update + diff --git a/docs/source/init.rst b/docs/source/init.rst index f9562a3a7d..65e400c68e 100644 --- a/docs/source/init.rst +++ b/docs/source/init.rst @@ -318,12 +318,12 @@ Manual Generation The following information is for users needing to generate cold-start initial conditions for a cycled experiment that will run at a different resolution or layer amount than the operational GFS (C768C384L127). -The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop (found in ``sorc/checkout.sh``). The ``chgres_cube`` code/scripts currently support the following GFS inputs: +The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop. The ``chgres_cube`` code/scripts currently support the following GFS inputs: -* pre-GFSv14 -* GFSv14 -* GFSv15 -* GFSv16 +* pre-GFSv14 +* GFSv14 +* GFSv15 +* GFSv16 Users can use the copy of UFS_UTILS that is already cloned and built within their global-workflow clone or clone/build it separately: diff --git a/docs/source/run.rst b/docs/source/run.rst index 0d38b8d6a4..817ed3ccfa 100644 --- a/docs/source/run.rst +++ b/docs/source/run.rst @@ -2,7 +2,7 @@ Run Global Workflow ################### -Here we will show how you can run an experiment using the Global Workflow. The Global workflow is regularly evolving and the underlying UFS-weather-model that it drives can run many different configurations. So this part of the document will be regularly updated. The workflow as it is configured today can be run as forecast only or cycled (forecast+Data Assimilation). Since cycled mode requires a number of Data Assimilation supporting repositories to be checked out, the instructions for the two modes from initial checkout stage will be slightly different. +Here we will show how you can run an experiment using the Global Workflow. The Global workflow is regularly evolving and the underlying UFS-weather-model that it drives can run many different configurations. So this part of the document will be regularly updated. The workflow as it is configured today can be run as forecast only or cycled (forecast+Data Assimilation). Since cycled mode requires a number of Data Assimilation supporting repositories to be checked out, the instructions for the two modes from initial build stage will be slightly different. .. toctree:: diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 695ad5fcc5..1fc0c606c1 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -730,7 +730,7 @@ elif [[ ${step} = "verfozn" ]]; then elif [[ ${step} = "verfrad" ]]; then - export wtime_verfrad="00:20:00" + export wtime_verfrad="00:40:00" export npe_verfrad=1 export nth_verfrad=1 export npe_node_verfrad=1 diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 4ba0b92888..e65c50e8de 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -19,12 +19,14 @@ Builds all of the global-workflow components by calling the individual build Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v] -a UFS_app: Build a specific UFS app instead of the default - -c build_config: - Selectively build based on the provided config instead of the default config + -g: + Build GSI -h: - print this help message and exit + Print this help message and exit -j: Specify maximum number of build jobs (n) + -u: + Build UFS-DA -v: Execute all build scripts with -v option to turn on verbose where supported EOF @@ -35,17 +37,19 @@ script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) cd "${script_dir}" || exit 1 _build_ufs_opt="" +_build_ufsda="NO" +_build_gsi="NO" _verbose_opt="" -_partial_opt="" _build_job_max=20 # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:c:j:hv" option; do +while getopts ":a:ghj:uv" option; do case "${option}" in a) _build_ufs_opt+="-a ${OPTARG} ";; - c) _partial_opt+="-c ${OPTARG} ";; + g) _build_gsi="YES" ;; h) _usage;; j) _build_job_max="${OPTARG} ";; + u) _build_ufsda="YES" ;; v) _verbose_opt="-v";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -89,15 +93,6 @@ fi # TODO: Commented out until components aligned for build #source ../versions/build.ver -#------------------------------------ -# INCLUDE PARTIAL BUILD -#------------------------------------ -# Turn off some shellcheck warnings because we want to have -# variables with multiple arguments. -# shellcheck disable=SC2086,SC2248 -source ./partial_build.sh ${_verbose_opt} ${_partial_opt} -# shellcheck disable= - #------------------------------------ # Exception Handling Init #------------------------------------ @@ -116,45 +111,36 @@ declare -A build_opts # Mandatory builds, unless otherwise specified, for the UFS big_jobs=0 -if [[ ${Build_ufs_model} == 'true' ]]; then - build_jobs["ufs"]=8 - big_jobs=$((big_jobs+1)) - build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}" -fi -# The UPP is hardcoded to use 6 cores -if [[ ${Build_upp} == 'true' ]]; then - build_jobs["upp"]=6 - build_opts["upp"]="" -fi -if [[ ${Build_ufs_utils} == 'true' ]]; then - build_jobs["ufs_utils"]=3 - build_opts["ufs_utils"]="${_verbose_opt}" -fi -if [[ ${Build_gfs_utils} == 'true' ]]; then - build_jobs["gfs_utils"]=1 - build_opts["gfs_utils"]="${_verbose_opt}" -fi -if [[ ${Build_ww3prepost} == "true" ]]; then - build_jobs["ww3prepost"]=3 - build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}" -fi +build_jobs["ufs"]=8 +big_jobs=$((big_jobs+1)) +build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}" + +build_jobs["upp"]=6 # The UPP is hardcoded to use 6 cores +build_opts["upp"]="" + +build_jobs["ufs_utils"]=3 +build_opts["ufs_utils"]="${_verbose_opt}" + +build_jobs["gfs_utils"]=1 +build_opts["gfs_utils"]="${_verbose_opt}" + +build_jobs["ww3prepost"]=3 +build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}" # Optional DA builds -if [[ -d gdas.cd ]]; then +if [[ "${_build_ufsda}" == "YES" ]]; then build_jobs["gdas"]=8 big_jobs=$((big_jobs+1)) build_opts["gdas"]="${_verbose_opt}" fi -if [[ -d gsi_enkf.fd ]]; then +if [[ "${_build_gsi}" == "YES" ]]; then build_jobs["gsi_enkf"]=8 big_jobs=$((big_jobs+1)) build_opts["gsi_enkf"]="${_verbose_opt}" fi -if [[ -d gsi_utils.fd ]]; then +if [[ "${_build_gsi}" == "YES" || "${_build_ufsda}" == "YES" ]] ; then build_jobs["gsi_utils"]=2 build_opts["gsi_utils"]="${_verbose_opt}" -fi -if [[ -d gsi_monitor.fd ]]; then build_jobs["gsi_monitor"]=1 build_opts["gsi_monitor"]="${_verbose_opt}" fi diff --git a/sorc/checkout.sh b/sorc/checkout.sh deleted file mode 100755 index 25680df2c0..0000000000 --- a/sorc/checkout.sh +++ /dev/null @@ -1,184 +0,0 @@ -#! /usr/bin/env bash - -set +x -set -u - -function usage() { - cat << EOF -Clones and checks out external components necessary for - global workflow. If the directory already exists, skip - cloning and just check out the requested version (unless - -c option is used). - -Usage: ${BASH_SOURCE[0]} [-c][-h][-m ufs_hash] - -c: - Create a fresh clone (delete existing directories) - -h: - Print this help message and exit - -m ufs_hash: - Check out this UFS hash instead of the default - -g: - Check out GSI for GSI-based DA - -u: - Check out GDASApp for UFS-based DA -EOF - exit 1 -} - -function checkout() { - # - # Clone or fetch repo, then checkout specific hash and update submodules - # - # Environment variables: - # topdir [default: $(pwd)]: parent directory to your checkout - # logdir [default: $(pwd)]: where you want logfiles written - # CLEAN [default: NO]: whether to delete existing directories and create a fresh clone - # - # Usage: checkout - # - # Arguments - # dir: Directory for the clone - # remote: URL of the remote repository - # version: Commit to check out; should always be a speciifc commit (hash or tag), not a branch - # - # Returns - # Exit code of last failed command, or 0 if successful - # - - dir="$1" - remote="$2" - version="$3" - cpus="${4:-1}" # Default 1 thread - recursive=${5:-"YES"} - - name=$(echo "${dir}" | cut -d '.' -f 1) - echo "Performing checkout of ${name}" - - logfile="${logdir:-$(pwd)}/checkout_${name}.log" - - if [[ -f "${logfile}" ]]; then - rm "${logfile}" - fi - - cd "${topdir}" || exit 1 - if [[ -d "${dir}" && ${CLEAN} == "YES" ]]; then - echo "|-- Removing existing clone in ${dir}" - rm -Rf "${dir}" - fi - if [[ ! -d "${dir}" ]]; then - echo "|-- Cloning from ${remote} into ${dir}" - git clone "${remote}" "${dir}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while cloning ${name}" - echo - return "${status}" - fi - cd "${dir}" || exit 1 - else - # Fetch any updates from server - cd "${dir}" || exit 1 - echo "|-- Fetching updates from ${remote}" - git fetch - fi - echo "|-- Checking out ${version}" - git checkout "${version}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while checking out ${version} in ${name}" - echo - return "${status}" - fi - if [[ "${recursive}" == "YES" ]]; then - echo "|-- Updating submodules (if any)" - git submodule update --init --recursive -j "${cpus}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while updating submodules of ${name}" - echo - return "${status}" - fi - fi - echo - return 0 -} - -# Set defaults for variables toggled by options -export CLEAN="NO" -checkout_gsi="NO" -checkout_gdas="NO" - -# Parse command line arguments -while getopts ":chgum:o" option; do - case ${option} in - c) - echo "Received -c flag, will delete any existing directories and start clean" - export CLEAN="YES" - ;; - g) - echo "Received -g flag for optional checkout of GSI-based DA" - checkout_gsi="YES" - ;; - h) usage;; - u) - echo "Received -u flag for optional checkout of UFS-based DA" - checkout_gdas="YES" - ;; - m) - echo "Received -m flag with argument, will check out ufs-weather-model hash ${OPTARG} instead of default" - ufs_model_hash=${OPTARG} - ;; - :) - echo "option -${OPTARG} needs an argument" - usage - ;; - *) - echo "invalid option -${OPTARG}, exiting..." - usage - ;; - esac -done -shift $((OPTIND-1)) - -topdir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) -export topdir -export logdir="${topdir}/logs" -mkdir -p "${logdir}" - -# Setup lmod environment -source "${topdir}/../workflow/gw_setup.sh" - -# The checkout version should always be a speciifc commit (hash or tag), not a branch -errs=0 -# Checkout UFS submodules in parallel -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-3ba8dff}" "8" & - -# Run all other checkouts simultaneously with just 1 core each to handle submodules. -checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" & -checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "427d467" & -checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "892b693" & -checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" & - -if [[ ${checkout_gsi} == "YES" ]]; then - checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "c94bc72" "1" "NO" & -fi - -if [[ ${checkout_gdas} == "YES" ]]; then - checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "f44a6d5" & -fi - -if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then - checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "f371890" & - checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "ae256c0" & -fi - -# Go through each PID and verify no errors were reported. -for checkout_pid in $(jobs -p); do - wait "${checkout_pid}" || errs=$((errs + $?)) -done - -if (( errs > 0 )); then - echo "WARNING: One or more errors encountered during checkout process, please check logs before building" -fi -echo -exit "${errs}" diff --git a/sorc/gdas.cd b/sorc/gdas.cd new file mode 160000 index 0000000000..f44a6d500d --- /dev/null +++ b/sorc/gdas.cd @@ -0,0 +1 @@ +Subproject commit f44a6d500dda2aba491e4fa12c0bee428ddb7b80 diff --git a/sorc/gfs_build.cfg b/sorc/gfs_build.cfg deleted file mode 100644 index 8c171072d0..0000000000 --- a/sorc/gfs_build.cfg +++ /dev/null @@ -1,15 +0,0 @@ -# -# ***** configuration of global-workflow build ***** - - Building ufs_model (ufs_model) ........................ yes - Building ww3prepost (ww3prepost) ...................... yes - Building gsi_enkf (gsi_enkf) .......................... yes - Building gsi_utils (gsi_utils) ........................ yes - Building gsi_monitor (gsi_monitor) .................... yes - Building gdas (gdas) .................................. yes - Building UPP (upp) .................................... yes - Building ufs_utils (ufs_utils) ........................ yes - Building gfs_utils (gfs_utils) ........................ yes - -# -- END -- - diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd new file mode 160000 index 0000000000..427d4678b8 --- /dev/null +++ b/sorc/gfs_utils.fd @@ -0,0 +1 @@ +Subproject commit 427d4678b80f88723528d5f5ff07b6c90df9a977 diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd new file mode 160000 index 0000000000..c94bc72ff4 --- /dev/null +++ b/sorc/gsi_enkf.fd @@ -0,0 +1 @@ +Subproject commit c94bc72ff410b48c325abbfe92c9fcb601d89aed diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd new file mode 160000 index 0000000000..ae256c0d69 --- /dev/null +++ b/sorc/gsi_monitor.fd @@ -0,0 +1 @@ +Subproject commit ae256c0d69df3232ee9dd3e81b176bf2c3cda312 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd new file mode 160000 index 0000000000..f371890b9f --- /dev/null +++ b/sorc/gsi_utils.fd @@ -0,0 +1 @@ +Subproject commit f371890b9fcb42312da5f6228d87b5a4829e7e3a diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 581c50e704..1bdb4dd492 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -199,7 +199,7 @@ fi #------------------------------ #--add GDASApp files #------------------------------ -if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then cd "${HOMEgfs}/ush" || exit 1 ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ufsda" . ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/jediinc2fv3.py" . @@ -261,7 +261,7 @@ for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle; do done # GSI -if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd/install" ]]; then for gsiexe in enkf.x gsi.x; do [[ -s "${gsiexe}" ]] && rm -f "${gsiexe}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_enkf.fd/install/bin/${gsiexe}" . @@ -269,7 +269,7 @@ if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd" ]]; then fi # GSI Utils -if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd/install" ]]; then for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x \ interp_inc.x recentersigp.x @@ -280,7 +280,7 @@ if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd" ]]; then fi # GSI Monitor -if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd/install" ]]; then for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ radmon_bcoef.x radmon_bcor.x radmon_time.x do @@ -290,7 +290,7 @@ if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd" ]]; then fi # GDASApp -if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then declare -a JEDI_EXE=("fv3jedi_addincrement.x" \ "fv3jedi_diffstates.x" \ "fv3jedi_ensvariance.x" \ diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh deleted file mode 100755 index 34b8b557ce..0000000000 --- a/sorc/partial_build.sh +++ /dev/null @@ -1,199 +0,0 @@ -#! /usr/bin/env bash -# -# define the array of the name of build program -# -declare -a Build_prg=("Build_ufs_model" \ - "Build_ww3prepost" \ - "Build_gsi_enkf" \ - "Build_gsi_utils" \ - "Build_gsi_monitor" \ - "Build_gdas" \ - "Build_upp" \ - "Build_ufs_utils" \ - "Build_gfs_utils") - -# -# function parse_cfg: read config file and retrieve the values -# -parse_cfg() { - declare -i n - declare -i num_args - declare -i total_args - declare -a all_prg - total_args=$# - num_args=$1 - (( num_args == 0 )) && return 0 - config=$2 - [[ ${config,,} == "--verbose" ]] && config=$3 - all_prg=() - for (( n = num_args + 2; n <= total_args; n++ )); do - all_prg+=( "${!n}" ) - done - - if [[ ${config^^} == ALL ]]; then - # - # set all values to true - # - for var in "${Build_prg[@]}"; do - eval "${var}=true" - done - elif [[ ${config} == config=* ]]; then - # - # process config file - # - cfg_file=${config#config=} - ${verbose} && echo "INFO: settings in config file: ${cfg_file}" - while read -r cline; do - # remove leading white space - clean_line="${cline#"${cline%%[![:space:]]*}"}" - { [[ -z "${clean_line}" ]] || [[ "${clean_line:0:1}" == "#" ]]; } || { - ${verbose} && echo "${clean_line}" - first9=${clean_line:0:9} - [[ ${first9,,} == "building " ]] && { - # No shellcheck, this can't be replaced by a native bash substitute - # because it uses a regex - # shellcheck disable=SC2001 - short_prg=$(sed -e 's/.*(\(.*\)).*/\1/' <<< "${clean_line}") - # shellcheck disable= - # remove trailing white space - clean_line="${cline%"${cline##*[![:space:]]}"}" - build_action=true - last5=${clean_line: -5} - [[ ${last5,,} == ". yes" ]] && build_action=true - last4=${clean_line: -4} - [[ ${last4,,} == ". no" ]] && build_action=false - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=${build_action}" - break - } - done - ${found} || { - echo "*** Unrecognized line in config file \"${cfg_file}\":" 2>&1 - echo "${cline}" 2>&1 - exit 3 - } - } - } - done < "${cfg_file}" - elif [[ ${config} == select=* ]]; then - # - # set all values to (default) false - # - for var in "${Build_prg[@]}"; do - eval "${var}=false" - done - # - # read command line partial build setting - # - del="" - sel_prg=${config#select=} - for separator in " " "," ";" ":" "/" "|"; do - [[ "${sel_prg/${separator}}" == "${sel_prg}" ]] || { - del=${separator} - sel_prg=${sel_prg//${del}/ } - } - done - if [[ ${del} == "" ]]; then - { - short_prg=${sel_prg} - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=true" - break - } - done - ${found} || { - echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 - exit 4 - } - } || { - for short_prg in ${sel_prg}; do - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=true" - break - } - done - ${found} || { - echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 - exit 5 - } - done - } - fi - else - echo "*** Unrecognized command line option \"${config}\"" 2>&1 - exit 6 - fi -} - - -usage() { - cat << EOF 2>&1 -Usage: ${BASH_SOURCE[0]} [-c config_file][-h][-v] - -h: - Print this help message and exit - -v: - Turn on verbose mode - -c config_file: - Override default config file to determine whether to build each program [default: gfs_build.cfg] -EOF -} - - -# -# read command line arguments; processing config file -# -declare -a parse_argv=() -verbose=false -config_file="gfs_build.cfg" -# Reset option counter for when this script is sourced -OPTIND=1 -while getopts ":c:h:v" option; do - case "${option}" in - c) config_file="${OPTARG}";; - h) usage;; - v) - verbose=true - parse_argv+=( "--verbose" ) - ;; - :) - echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage - ;; - *) - echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage - ;; - esac -done - -shift $((OPTIND-1)) - -parse_argv+=( "config=${config_file}" ) - -# -# call arguments retriever/config parser -# -parse_cfg ${#parse_argv[@]} "${parse_argv[@]}" "${Build_prg[@]}" - -# -# print values of build array -# -${verbose} && { - echo "INFO: partial build settings:" - for var in "${Build_prg[@]}"; do - echo -n " ${var}: " - "${!var}" && echo True || echo False - done -} - -echo "=== end of partial build setting ===" > /dev/null - diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd new file mode 160000 index 0000000000..3ba8dff29a --- /dev/null +++ b/sorc/ufs_model.fd @@ -0,0 +1 @@ +Subproject commit 3ba8dff29a7395445ce5da8c9b48cfe0ff8a668a diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd new file mode 160000 index 0000000000..892b693ba4 --- /dev/null +++ b/sorc/ufs_utils.fd @@ -0,0 +1 @@ +Subproject commit 892b693ba49b37c23f08cc8e18550ba72e108762 diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd new file mode 160000 index 0000000000..c267780a12 --- /dev/null +++ b/sorc/verif-global.fd @@ -0,0 +1 @@ +Subproject commit c267780a1255fa7db052c745cf9c78b7dc6a2695 diff --git a/sorc/wxflow b/sorc/wxflow new file mode 160000 index 0000000000..528f5abb49 --- /dev/null +++ b/sorc/wxflow @@ -0,0 +1 @@ +Subproject commit 528f5abb49e80751f83ebd6eb0a87bc70012bb24