From 9b9942f918d5e3ba5b8158b894e43bc44f7185e5 Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Thu, 29 Jun 2023 12:59:06 -0400 Subject: [PATCH] [develop] Bring change of production/AQM.v7 back to develop (#828) Multiple scripts relevant to AQM are updated to meet the NCO standards and EE2 reviewer's comments based on the production/AQM.v7 branch: * The NCO variables such as NET, RUN, envir are renamed with the suffix _default meaning the default value. This is because they should be defined in job cards as can be seen in the NCO standards (pp.4, Table 1). In the current rocoto-based workflow, the job cards do not exist (instead, it uses the XML file). Therefore, they can be set in the configuration file and ush/machine/ file. However, for ecFlow, these variables should be replaced with those in the job cards. (The ecFlow option will be added in a separate PR later). This is done in the ush/job_preamble.sh script which is sourced in the J-job scripts. * NCO wants to define COMIN/COMOUT, COMINmodel, DATAROOT using the compath call available in prod_util. * NCO does not want to add a new directory (COMINext) to COM. The use of COMINext is removed. * NCO wants to use the error handling calls such as 'err_chk' and 'err_exit' in 'prod_util' which is available only on WCOSS2. * NCO uses the YES/NO flags while SRW App uses TRUE/FALSE. To resolve this mismatch, the boolify calls for the NCO variables are added to job_preamble.sh. * The 'pgmerr' function is added to POST_STEP. * The forecast hour in some output file names is changed to 3 digit. * The working directory DATA is defined and created in job_preamble.sh for the NCO mode. Therefore, if-statement for the 'community' mode is added to the J-job scripts. * The KEEPDATA capability does not work correctly for the shared tasks * The input data directories are renamed to DCOMINdata. --- .gitignore | 1 - Externals.cfg | 4 +- jobs/JREGIONAL_AQM_ICS | 8 +- jobs/JREGIONAL_AQM_LBCS | 9 +- jobs/JREGIONAL_BIAS_CORRECTION_O3 | 9 +- jobs/JREGIONAL_BIAS_CORRECTION_PM25 | 9 +- jobs/JREGIONAL_FIRE_EMISSION | 10 +- jobs/JREGIONAL_GET_EXTRN_MDL_FILES | 7 +- jobs/JREGIONAL_MAKE_ICS | 8 +- jobs/JREGIONAL_MAKE_LBCS | 8 +- jobs/JREGIONAL_NEXUS_EMISSION | 8 +- jobs/JREGIONAL_NEXUS_GFS_SFC | 21 +- jobs/JREGIONAL_NEXUS_POST_SPLIT | 8 +- jobs/JREGIONAL_POINT_SOURCE | 8 +- jobs/JREGIONAL_POST_STAT_O3 | 9 +- jobs/JREGIONAL_POST_STAT_PM25 | 9 +- jobs/JREGIONAL_PRE_POST_STAT | 7 +- jobs/JREGIONAL_RUN_FCST | 4 +- jobs/JREGIONAL_RUN_POST | 28 +- modulefiles/tasks/wcoss2/python_srw.lua | 2 + parm/aqm.rc | 2 +- parm/wflow/default_workflow.yaml | 10 +- scripts/exregional_aqm_ics.sh | 43 ++-- scripts/exregional_aqm_lbcs.sh | 60 +++-- scripts/exregional_bias_correction_o3.sh | 239 ++++++++++++------ scripts/exregional_bias_correction_pm25.sh | 182 +++++++------ scripts/exregional_fire_emission.sh | 136 ++++++---- scripts/exregional_get_extrn_mdl_files.sh | 14 +- scripts/exregional_make_ics.sh | 75 +++--- scripts/exregional_make_lbcs.sh | 80 +++--- scripts/exregional_nexus_emission.sh | 92 +++++-- scripts/exregional_nexus_gfs_sfc.sh | 69 +++-- scripts/exregional_nexus_post_split.sh | 42 ++- scripts/exregional_point_source.sh | 21 +- scripts/exregional_post_stat_o3.sh | 83 +++--- scripts/exregional_post_stat_pm25.sh | 76 +++--- scripts/exregional_pre_post_stat.sh | 11 - scripts/exregional_run_fcst.sh | 111 ++++++-- scripts/exregional_run_post.sh | 23 +- tests/WE2E/run_WE2E_tests.py | 4 +- ...fig.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml | 2 +- ush/config.aqm.community.yaml | 2 +- ush/config.aqm.nco.realtime.yaml | 12 +- ush/config.nco.yaml | 6 +- ush/config_defaults.yaml | 123 ++++----- ush/create_aqm_rc_file.py | 10 +- ush/job_preamble.sh | 116 +++++++-- ush/load_modules_run_task.sh | 15 +- ush/machine/hera.yaml | 13 +- ush/machine/wcoss2.yaml | 18 +- ush/setup.py | 32 +-- ush/valid_param_vals.yaml | 2 +- 52 files changed, 1193 insertions(+), 708 deletions(-) diff --git a/.gitignore b/.gitignore index a727b940c0..b6da1c53a0 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,6 @@ fix/ include/ lib/ share/ -modulefiles/extrn_comp_build/ sorc/*/ tests/WE2E/WE2E_tests_*.yaml tests/WE2E/*.txt diff --git a/Externals.cfg b/Externals.cfg index 1807bc4b3c..1338e4a6af 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -49,7 +49,7 @@ protocol = git repo_url = https://github.com/noaa-oar-arl/NEXUS # Specify either a branch name or a hash but not both. #branch = develop -hash = 3842818 +hash = 6a7a994 local_path = sorc/arl_nexus required = True @@ -58,7 +58,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/AQM-utils # Specify either a branch name or a hash but not both. #branch = develop -hash = 0a86f73 +hash = 694a139 local_path = sorc/AQM-utils required = True diff --git a/jobs/JREGIONAL_AQM_ICS b/jobs/JREGIONAL_AQM_ICS index b03f22dc2a..ce3e539cc9 100755 --- a/jobs/JREGIONAL_AQM_ICS +++ b/jobs/JREGIONAL_AQM_ICS @@ -77,8 +77,12 @@ mkdir_vrfy -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_ICS}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_AQM_LBCS b/jobs/JREGIONAL_AQM_LBCS index 0b675a388f..7f13d12fa1 100755 --- a/jobs/JREGIONAL_AQM_LBCS +++ b/jobs/JREGIONAL_AQM_LBCS @@ -77,8 +77,13 @@ mkdir_vrfy -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_LBCS}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi + # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_O3 b/jobs/JREGIONAL_BIAS_CORRECTION_O3 index 32b12accd5..6586d4427c 100755 --- a/jobs/JREGIONAL_BIAS_CORRECTION_O3 +++ b/jobs/JREGIONAL_BIAS_CORRECTION_O3 @@ -60,7 +60,14 @@ This is the J-job script for the task that runs BIAS-CORRECTION-O3. # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_O3}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi + +mkdir_vrfy -p ${COMOUTwmo} export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_PM25 b/jobs/JREGIONAL_BIAS_CORRECTION_PM25 index 27c6728e9c..a0c0ba1b4a 100755 --- a/jobs/JREGIONAL_BIAS_CORRECTION_PM25 +++ b/jobs/JREGIONAL_BIAS_CORRECTION_PM25 @@ -60,7 +60,14 @@ This is the J-job script for the task that runs BIAS-CORRECTION-PM25. # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_PM25}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi + +mkdir_vrfy -p ${COMOUTwmo} export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" diff --git a/jobs/JREGIONAL_FIRE_EMISSION b/jobs/JREGIONAL_FIRE_EMISSION index 95b53ca4ff..ee540cfa6e 100755 --- a/jobs/JREGIONAL_FIRE_EMISSION +++ b/jobs/JREGIONAL_FIRE_EMISSION @@ -90,8 +90,12 @@ fi # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_FIRE_EMISSION}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # @@ -99,7 +103,7 @@ mkdir_vrfy -p "${DATA}" # #----------------------------------------------------------------------- # -export FIRE_EMISSION_STAGING_DIR="${COMINext}/FIRE_EMISSION" +export FIRE_EMISSION_STAGING_DIR="${FIRE_EMISSION_STAGING_DIR:-${COMIN}/FIRE_EMISSION}" mkdir_vrfy -p "${FIRE_EMISSION_STAGING_DIR}" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES index 7ef397b738..718e920d65 100755 --- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES @@ -30,7 +30,7 @@ # . $USHdir/source_util_funcs.sh source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh +. $USHdir/job_preamble.sh "TRUE" # #----------------------------------------------------------------------- # @@ -221,8 +221,7 @@ esac #----------------------------------------------------------------------- # if [ $RUN_ENVIR = "nco" ]; then - export EXTRN_MDL_STAGING_DIR="${COMINext}" - mkdir_vrfy -p "${EXTRN_MDL_STAGING_DIR}" + export EXTRN_MDL_STAGING_DIR="${EXTRN_MDL_STAGING_DIR:-${DATA}}" else export EXTRN_MDL_STAGING_DIR="${COMIN}/${EXTRN_MDL_NAME}/for_${ICS_OR_LBCS}" mkdir_vrfy -p "${EXTRN_MDL_STAGING_DIR}" @@ -245,7 +244,7 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # -job_postamble +job_postamble "FALSE" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index ffefa18ed5..679a9392bb 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -72,8 +72,12 @@ mkdir_vrfy -p "${INPUT_DATA_NWGES}" # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ $RUN_ENVIR = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_ICS}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 7b5c7f2622..35aba70846 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -71,8 +71,12 @@ mkdir_vrfy -p "${INPUT_DATA_NWGES}" # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_LBCS}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_NEXUS_EMISSION b/jobs/JREGIONAL_NEXUS_EMISSION index b916fabf25..0f1a00d1a6 100755 --- a/jobs/JREGIONAL_NEXUS_EMISSION +++ b/jobs/JREGIONAL_NEXUS_EMISSION @@ -75,8 +75,12 @@ mkdir_vrfy -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_EMISSION_${nspt}}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_NEXUS_GFS_SFC b/jobs/JREGIONAL_NEXUS_GFS_SFC index 184f6b7681..5930762106 100755 --- a/jobs/JREGIONAL_NEXUS_GFS_SFC +++ b/jobs/JREGIONAL_NEXUS_GFS_SFC @@ -17,7 +17,7 @@ # . $USHdir/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh +. $USHdir/job_preamble.sh "TRUE" # #----------------------------------------------------------------------- # @@ -91,21 +91,16 @@ fi # #----------------------------------------------------------------------- # -# Create the directory where the RAVE fire emission files should be stored -# -#----------------------------------------------------------------------- -# -export GFS_SFC_STAGING_DIR="${COMINext}/GFS_SFC" -mkdir_vrfy -p "${GFS_SFC_STAGING_DIR}" -# -#----------------------------------------------------------------------- -# # Set the run directory # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_GFS_SFC}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # @@ -123,7 +118,7 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # -job_postamble +job_postamble "FALSE" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_NEXUS_POST_SPLIT b/jobs/JREGIONAL_NEXUS_POST_SPLIT index 83054ab10b..fc21421f8d 100755 --- a/jobs/JREGIONAL_NEXUS_POST_SPLIT +++ b/jobs/JREGIONAL_NEXUS_POST_SPLIT @@ -67,8 +67,12 @@ mkdir_vrfy -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_POST_SPLIT}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_POINT_SOURCE b/jobs/JREGIONAL_POINT_SOURCE index 2164d822a8..d3a02b7534 100755 --- a/jobs/JREGIONAL_POINT_SOURCE +++ b/jobs/JREGIONAL_POINT_SOURCE @@ -66,8 +66,12 @@ mkdir_vrfy -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -mkdir_vrfy -p "${DATA}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POINT_SOURCE}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_POST_STAT_O3 b/jobs/JREGIONAL_POST_STAT_O3 index 99cdaeb789..af8652e05d 100755 --- a/jobs/JREGIONAL_POST_STAT_O3 +++ b/jobs/JREGIONAL_POST_STAT_O3 @@ -60,7 +60,14 @@ This is the J-job script for the task that runs POST-STAT-O3. # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_O3}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi + +mkdir_vrfy -p ${COMOUTwmo} export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" # diff --git a/jobs/JREGIONAL_POST_STAT_PM25 b/jobs/JREGIONAL_POST_STAT_PM25 index b0f1b2bfab..81ec818075 100755 --- a/jobs/JREGIONAL_POST_STAT_PM25 +++ b/jobs/JREGIONAL_POST_STAT_PM25 @@ -60,7 +60,14 @@ This is the J-job script for the task that runs POST-UPP-STAT. # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" +DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}" +if [ "${RUN_ENVIR}" = "community" ]; then + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi + +mkdir_vrfy -p ${COMOUTwmo} export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" # diff --git a/jobs/JREGIONAL_PRE_POST_STAT b/jobs/JREGIONAL_PRE_POST_STAT index 7611c6bd60..41cb25d6a0 100755 --- a/jobs/JREGIONAL_PRE_POST_STAT +++ b/jobs/JREGIONAL_PRE_POST_STAT @@ -60,7 +60,12 @@ This is the J-job script for the task that runs POST-UPP-STAT. # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_PRE_POST_STAT}" + check_for_preexist_dir_file "$DATA" "delete" + mkdir_vrfy -p $DATA + cd_vrfy $DATA +fi # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST index e2501c37e4..383739e172 100755 --- a/jobs/JREGIONAL_RUN_FCST +++ b/jobs/JREGIONAL_RUN_FCST @@ -66,6 +66,7 @@ the specified cycle. if [ $RUN_ENVIR = "nco" ]; then export INPUT_DATA="${COMIN}" else + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi # @@ -75,7 +76,6 @@ fi # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" mkdir_vrfy -p ${DATA}/INPUT mkdir_vrfy -p ${DATA}/RESTART # @@ -96,7 +96,7 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # -job_postamble +job_postamble "FALSE" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index ef783f34da..459431522f 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -56,14 +56,6 @@ This is the J-job script for the task that runs the post-processor (UPP) on the output files corresponding to a specified forecast hour. ========================================================================" # -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" -# # If SUB_HOURLY_POST is not set to "TRUE", ensure that the forecast # minutes (fmn) are set to "00". This is necessary in order to pass # "fmn" into the post ex-script for the calculation of post_time. @@ -90,10 +82,11 @@ fi # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" != "nco" ]; then +if [ "${RUN_ENVIR}" = "community" ]; then + DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" export COMOUT="${DATA}/postprd" + mkdir_vrfy -p "${COMOUT}" fi -mkdir_vrfy -p "${COMOUT}" if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" @@ -143,9 +136,11 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} - fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) - if [ "${fhr}" = "${fcst_len_hrs}" ]; then - touch "${COMIN}/post_${PDY}${cyc}_task_complete.txt" + if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) + if [ "${fhr}" = "${fcst_len_hrs}" ]; then + touch "${COMIN}/post_${PDY}${cyc}_task_complete.txt" + fi fi fi # @@ -155,7 +150,12 @@ fi # #----------------------------------------------------------------------- # -job_postamble +fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) +if [ "${fhr}" = "${fcst_len_hrs}" ]; then + job_postamble "TRUE" +else + job_postamble +fi # #----------------------------------------------------------------------- # diff --git a/modulefiles/tasks/wcoss2/python_srw.lua b/modulefiles/tasks/wcoss2/python_srw.lua index 519f1cdf4a..0a3bc3207b 100644 --- a/modulefiles/tasks/wcoss2/python_srw.lua +++ b/modulefiles/tasks/wcoss2/python_srw.lua @@ -1,3 +1,5 @@ +load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) +load(pathJoin("craype", os.getenv("craype_ver"))) load(pathJoin("intel", os.getenv("intel_ver"))) load(pathJoin("python", os.getenv("python_ver"))) load(pathJoin("prod_util", os.getenv("prod_util_ver"))) diff --git a/parm/aqm.rc b/parm/aqm.rc index ad2c85940a..3d2ad32711 100644 --- a/parm/aqm.rc +++ b/parm/aqm.rc @@ -172,7 +172,7 @@ bio_format: netcdf bio_file: {{ aqm_rc_bio_file_fp }} bio_frequency: static bio_period: summer -bio_speciation_file: {{ aqm_bio_dir }}/gspro_biogenics_1mar2017.txt +bio_speciation_file: {{ dcominbio }}/gspro_biogenics_1mar2017.txt bio_speciation_profile: B10C6 bio_species:: AVG_NOAG_GROW 1.00000 AVG_NOAG_GROW gmN/hr diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml index 77231f8136..a2e20174ff 100644 --- a/parm/wflow/default_workflow.yaml +++ b/parm/wflow/default_workflow.yaml @@ -5,25 +5,25 @@ rocoto: entities: ACCOUNT: '{{ user.ACCOUNT }}' CCPA_OBS_DIR: '{{ platform.CCPA_OBS_DIR }}' - COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/{}.@Y@m@d/@H".format(nco.COMIN_BASEDIR,nco.RUN)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' + COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/{}.@Y@m@d/@H".format(nco.COMIN_BASEDIR,nco.RUN_default)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' COMINgfs: '{{ platform.get("COMINgfs") }}' - FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/run_fcst_mem#mem#.{}_@Y@m@d@H".format(nco.DATAROOT,workflow.WORKFLOW_ID)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' + FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/run_fcst_mem#mem#.{}_@Y@m@d@H".format(nco.DATAROOT_default,workflow.WORKFLOW_ID)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' GLOBAL_VAR_DEFNS_FP: '{{ workflow.GLOBAL_VAR_DEFNS_FP }}' JOBSdir: '{{ user.JOBSdir }}' LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}' - LOGDIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/@Y@m@d".format(nco.LOGBASEDIR)}}{% else %}{{nco.LOGBASEDIR }}{% endif %}' + LOGDIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/@Y@m@d".format(nco.LOGBASEDIR_default)}}{% else %}{{nco.LOGBASEDIR_default }}{% endif %}' LOGEXT: '{% if user.RUN_ENVIR == "nco" %}{{".{}.log".format(workflow.WORKFLOW_ID)}}{% else %}{{".log"}}{% endif %}' MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}' NCORES_PER_NODE: '{{ platform.NCORES_PER_NODE }}' NDAS_OBS_DIR: '{{ platform.NDAS_OBS_DIR }}' - NET: '{{ nco.NET }}' + NET: '{{ nco.NET_default }}' PARTITION_DEFAULT: '{{ platform.get("PARTITION_DEFAULT") }}' PARTITION_FCST: '{{ platform.get("PARTITION_FCST") }}' PARTITION_HPSS: '{{ platform.get("PARTITION_HPSS") }}' QUEUE_DEFAULT: '{{ platform.get("QUEUE_DEFAULT") }}' QUEUE_FCST: '{{ platform.get("QUEUE_FCST") }}' QUEUE_HPSS: '{{ platform.get("QUEUE_HPSS") }}' - RUN: '{{ nco.RUN }}' + RUN: '{{ nco.RUN_default }}' SCRIPTSdir: '{{ user.SCRIPTSdir }}' SLASH_ENSMEM_SUBDIR: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% else %}{{ "/" }}{% endif %}' USHdir: '{{ user.USHdir }}' diff --git a/scripts/exregional_aqm_ics.sh b/scripts/exregional_aqm_ics.sh index 73fd27cfb0..c59bf87f81 100755 --- a/scripts/exregional_aqm_ics.sh +++ b/scripts/exregional_aqm_ics.sh @@ -72,8 +72,12 @@ if [ ! -r ${fv_tracer_file} ]; then print_info_msg " Tracer file found: \"${fv_tracer_file}\"" else - print_err_msg_exit "\ - No suitable tracer restart file found." + message_txt="No suitable tracer restart file found." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2"]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi fi fi fi @@ -81,16 +85,6 @@ fi # #----------------------------------------------------------------------- # -# Move to work directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_AQM_ICS" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA -# -#----------------------------------------------------------------------- -# # Add air quality tracer variables from previous cycle's restart output # to atmosphere's initial condition file according to the steps below: # @@ -112,13 +106,28 @@ print_info_msg " cp_vrfy ${gfs_ic_file} ${wrk_ic_file} python3 ${HOMEdir}/sorc/AQM-utils/python_utils/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_file}" +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to python script \"add_aqm_ics.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi -ncatted -a checksum,,d,s, tmp1.nc || print_err_msg_exit "\ -Call to NCATTED returned with nonzero exit code." - -mv_vrfy tmp1.nc ${gfs_ic_file} +ncatted -a checksum,,d,s, tmp1.nc +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to NCATTED returned with nonzero exit code." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi -rm_vrfy gfs.nc +cp_vrfy tmp1.nc ${gfs_ic_file} unset fv_tracer_file unset wrk_ic_file diff --git a/scripts/exregional_aqm_lbcs.sh b/scripts/exregional_aqm_lbcs.sh index ccf09f52e2..24bac24ad7 100755 --- a/scripts/exregional_aqm_lbcs.sh +++ b/scripts/exregional_aqm_lbcs.sh @@ -73,16 +73,6 @@ fi # #----------------------------------------------------------------------- # -# Move to working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_AQM_LBCS" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA -# -#----------------------------------------------------------------------- -# # Add chemical LBCS # #----------------------------------------------------------------------- @@ -107,20 +97,33 @@ if [ ${DO_AQM_CHEM_LBCS} = "TRUE" ]; then ext_lbcs_file=${AQM_LBCS_FILES} chem_lbcs_fn=${ext_lbcs_file///${mm}} - chem_lbcs_fp=${AQM_LBCS_DIR}/${chem_lbcs_fn} + chem_lbcs_fp=${DCOMINchem_lbcs}/${chem_lbcs_fn} if [ -f ${chem_lbcs_fp} ]; then #Copy the boundary condition file to the current location cp_vrfy ${chem_lbcs_fp} . else - print_err_msg_exit "\ -The chemical LBC files do not exist: - CHEM_BOUNDARY_CONDITION_FILE = \"${chem_lbcs_fp}\"" + message_txt="The chemical LBC files do not exist: + CHEM_BOUNDARY_CONDITION_FILE = \"${chem_lbcs_fp}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi fi for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do fhr=$( printf "%03d" "${hr}" ) if [ -r ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc ]; then ncks -A ${chem_lbcs_fn} ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCKS returned with nonzero exit code." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi fi done @@ -152,7 +155,7 @@ if [ ${DO_AQM_GEFS_LBCS} = "TRUE" ]; then if [ "${DO_REAL_TIME}" = "TRUE" ]; then AQM_MOFILE_FP="${COMINgefs}/gefs.${yyyymmdd}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${AQM_MOFILE_FN}" else - AQM_MOFILE_FP="${AQM_GEFS_DIR}/${yyyymmdd}/${AQM_GEFS_FILE_CYC}/${AQM_MOFILE_FN}" + AQM_MOFILE_FP="${DCOMINgefs}/${yyyymmdd}/${AQM_GEFS_FILE_CYC}/${AQM_MOFILE_FN}" fi # Check if GEFS aerosol files exist @@ -161,8 +164,17 @@ if [ ${DO_AQM_GEFS_LBCS} = "TRUE" ]; then fhr=$( printf "%03d" "${hr_mod}" ) AQM_MOFILE_FHR_FP="${AQM_MOFILE_FP}${fhr}.nemsio" if [ ! -e "${AQM_MOFILE_FHR_FP}" ]; then - print_err_msg_exit "The GEFS file (AQM_MOFILE_FHR_FP) for LBCs does not exist: - AQM_MOFILE_FHR_FP = \"${AQM_MOFILE_FHR_FP}\"" + message_txt="The GEFS file (AQM_MOFILE_FHR_FP) for LBCs of \"${cycle}\" does not exist: + AQM_MOFILE_FHR_FP = \"${AQM_MOFILE_FHR_FP}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + message_warning="WARNING: ${message_txt}" + print_info_msg "${message_warning}" + if [ ! -z "${maillist}" ]; then + echo "${message_warning}" | mail.py $maillist + fi + else + print_err_msg_exit "${message_txt}" + fi fi done @@ -214,11 +226,17 @@ Please ensure that you've built this executable." #---------------------------------------------------------------------- # PREP_STEP - eval ${RUN_CMD_AQMLBC} ${exec_fp} ${REDIRECT_OUT_ERR} || \ - print_err_msg_exit "\ -Call to executable (exec_fp) to generate chemical and GEFS LBCs -file for RRFS-CMAQ failed: + eval ${RUN_CMD_AQMLBC} ${exec_fp} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable (exec_fp) to generate chemical and +GEFS LBCs file for RRFS-CMAQ failed: exec_fp = \"${exec_fp}\"" + fi + fi POST_STEP print_info_msg " diff --git a/scripts/exregional_bias_correction_o3.sh b/scripts/exregional_bias_correction_o3.sh index 82a099066f..79ed056f59 100755 --- a/scripts/exregional_bias_correction_o3.sh +++ b/scripts/exregional_bias_correction_o3.sh @@ -73,18 +73,6 @@ else All executables will be submitted with command \'${RUN_CMD_SERIAL}\'." fi -# -#----------------------------------------------------------------------- -# -# Move to the working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_BIAS_CORRECTION_O3" -rm_vrfy -rf "$DATA" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA - yyyy=${PDY:0:4} yyyymm=${PDY:0:6} yyyy_m1=${PDYm1:0:4} @@ -92,8 +80,8 @@ yyyymm_m1=${PDYm1:0:6} yyyy_m2=${PDYm2:0:4} yyyymm_m2=${PDYm2:0:6} yyyy_m3=${PDYm3:0:4} -yyyymm_m3=${PDYm3:0:6} - +yyyymm_m3=${PDYm3:0:6} + # #----------------------------------------------------------------------- # @@ -115,26 +103,53 @@ fi # STEP 1: Retrieve AIRNOW observation data #----------------------------------------------------------------------------- -# Link the historical airnow data mkdir_vrfy -p "${DATA}/data" -ln_vrfy -sf ${AQM_AIRNOW_HIST_DIR}/bcdata* "${DATA}/data" -if [ -d "${DATA}/data/bcdata.${yyyymm}" ]; then - rm_vrfy -rf "${DATA}/data/bcdata.${yyyymm}" - mkdir_vrfy -p "${DATA}/data/bcdata.${yyyymm}" - cp_vrfy -rL "${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/airnow" "${DATA}/data/bcdata.${yyyymm}" - cp_vrfy -rL "${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/interpolated" "${DATA}/data/bcdata.${yyyymm}" -fi -# Retrieve real-time airnow data for the last three days -if [ "${DO_REAL_TIME}" = "TRUE" ]; then - mkdir -p ${DATA}/data/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - mkdir -p ${DATA}/data/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - mkdir -p ${DATA}/data/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 +# Retrieve real-time airnow data for the last three days and convert them into netcdf. +# In the following for-loop, pdym stands for previous (m) day of the present day (PDY) +# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago +for i_pdym in {1..3}; do + case $i_pdym in + 1) + cvt_yyyy="${yyyy_m1}" + cvt_yyyymm="${yyyymm_m1}" + cvt_pdy="${PDYm1}" + ;; + 2) + cvt_yyyy="${yyyy_m2}" + cvt_yyyymm="${yyyymm_m2}" + cvt_pdy="${PDYm2}" + ;; + 3) + cvt_yyyy="${yyyy_m3}" + cvt_yyyymm="${yyyymm_m3}" + cvt_pdy="${PDYm3}" + ;; + esac + + cvt_input_dir="${DATA}/data/bcdata.${cvt_yyyymm}/airnow/csv" + cvt_output_dir="${DATA}/data/bcdata.${cvt_yyyymm}/airnow/netcdf" + cvt_input_fn="HourlyAQObs_YYYYMMDDHH.dat" + cvt_output_fn="HourlyAQObs.YYYYMMDD.nc" + cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}" + cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}" + + mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" + cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" - cp_vrfy ${COMINairnow}/${PDYm1}/b008/xx021 ${DATA}/data/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - cp_vrfy ${COMINairnow}/${PDYm2}/b008/xx021 ${DATA}/data/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - cp_vrfy ${COMINairnow}/${PDYm3}/b008/xx021 ${DATA}/data/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 -fi + PREP_STEP + eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." + fi + fi + POST_STEP +done #----------------------------------------------------------------------------- # STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs @@ -174,48 +189,90 @@ mkdir_vrfy -p ${DATA}/data/site-lists.interp mkdir_vrfy -p ${DATA}/out/ozone/${yyyy} mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} -cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.ozone.20220724.12z.list ${DATA}/data/site-lists.interp +cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/data/site-lists.interp cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA} PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} || print_err_msg_exit "Call to executable to run AQM_BIAS_INTERPOLATE returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_BIAS_INTERPOLATE returned with nonzero exit code." + fi +fi POST_STEP cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} - cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} - - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/airnow/${yyyy}/${PDY}/b008 - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 - cp_vrfy ${COMINairnow}/${PDY}/b008/xx021 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/airnow/${yyyy}/${PDY}/b008 - cp_vrfy ${COMINairnow}/${PDYm1}/b008/xx021 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - cp_vrfy ${COMINairnow}/${PDYm2}/b008/xx021 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - cp_vrfy ${COMINairnow}/${PDYm3}/b008/xx021 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 - - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} - cp_vrfy ${COMIN}/${NET}.${cycle}.*sfc*.nc ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} + mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} + cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} + + for i_pdym in {0..3}; do + case $i_pdym in + 0) + cvt_yyyy="${yyyy}" + cvt_yyyymm="${yyyymm}" + cvt_pdy="${PDY}" + ;; + 1) + cvt_yyyy="${yyyy_m1}" + cvt_yyyymm="${yyyymm_m1}" + cvt_pdy="${PDYm1}" + ;; + 2) + cvt_yyyy="${yyyy_m2}" + cvt_yyyymm="${yyyymm_m2}" + cvt_pdy="${PDYm2}" + ;; + 3) + cvt_yyyy="${yyyy_m3}" + cvt_yyyymm="${yyyymm_m3}" + cvt_pdy="${PDYm3}" + ;; + esac + # CSV and NetCDF files + mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy} + mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy} + if [ "${i_pdym}" != "0" ]; then + cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy} + cp_vrfy ${DATA}/data/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy}/HourlyAQObs.${cvt_pdy}.nc ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy} + fi + done + mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} + cp_vrfy ${COMIN}/${NET}.${cycle}.*sfc*.nc ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} fi #----------------------------------------------------------------------------- # STEP 4: Performing Bias Correction for Ozone #----------------------------------------------------------------------------- +rm_vrfy -rf ${DATA}/data/bcdata* + +ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data" + mkdir_vrfy -p ${DATA}/data/sites cp_vrfy ${PARMaqm_utils}/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA} PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} || print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." + fi +fi POST_STEP cp_vrfy ${DATA}/out/ozone.corrected* ${COMIN} if [ "${cyc}" = "12" ]; then - cp_vrfy ${DATA}/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA} + cp_vrfy ${DATA}/data/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA} fi #----------------------------------------------------------------------------- @@ -236,8 +293,15 @@ EOF1 # convert from netcdf to grib2 format PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable to run AQM_POST_BIAS_COR_GRIB2 returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_BIAS_COR_GIRB2 returned with nonzero exit code." + fi +fi POST_STEP cp_vrfy ${DATA}/${NET}.${cycle}.awpozcon*bc*.grib2 ${COMOUT} @@ -264,10 +328,10 @@ EOF1 flag_run_bicor_max=yes # 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN_PDY}/00/ozone.corrected.${PDY}.00z.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/00/ozone.corrected.${PDY}.00z.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/ozone.corrected.${PDYm1}.12z.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/ozone.corrected.${PDYm1}.12z.nc b.nc + if [ -s ${COMIN}/../00/ozone.corrected.${PDY}.00z.nc ]; then + ln_vrfy -sf ${COMIN}/../00/ozone.corrected.${PDY}.00z.nc b.nc + elif [ -s ${COMINm1}/12/ozone.corrected.${PDYm1}.12z.nc ]; then + ln_vrfy -sf ${COMINm1}/12/ozone.corrected.${PDYm1}.12z.nc b.nc chk=0 else flag_run_bicor_max=no @@ -276,20 +340,20 @@ EOF1 if [ "${cyc}" = "12" ]; then # 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN_PDY}/00/ozone.corrected.${PDY}.00z.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/00/ozone.corrected.${PDY}.00z.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/ozone.corrected.${PDYm1}.12z.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/ozone.corrected.${PDYm1}.12z.nc b.nc + if [ -s ${COMIN}/../00/ozone.corrected.${PDY}.00z.nc ]; then + ln_vrfy -sf ${COMIN}/../00/ozone.corrected.${PDY}.00z.nc b.nc + elif [ -s ${COMINm1}/12/ozone.corrected.${PDYm1}.12z.nc ]; then + ln_vrfy -sf ${COMINm1}/12/ozone.corrected.${PDYm1}.12z.nc b.nc chk=0 else flag_run_bicor_max=no fi # 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN_PDY}/06/ozone.corrected.${PDY}.06z.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/06/ozone.corrected.${PDY}.06z.nc c.nc - elif [ -s ${COMIN_PDYm1}/12/ozone.corrected.${PDYm1}.12z.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/ozone.corrected.${PDYm1}.12z.nc c.nc + if [ -s ${COMIN}/../06/ozone.corrected.${PDY}.06z.nc ]; then + ln_vrfy -sf ${COMIN}/../06/ozone.corrected.${PDY}.06z.nc c.nc + elif [ -s ${COMINm1}/12/ozone.corrected.${PDYm1}.12z.nc ]; then + ln_vrfy -sf ${COMINm1}/12/ozone.corrected.${PDYm1}.12z.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -301,8 +365,15 @@ EOF1 # write out grib2 format #------------------------------------------------- PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ - Call to executable to run AQM_POST_MAXI_BIAS_COR_GRIB2 returned with nonzero exit code." + eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_MAXI_BIAS_COR_GRIB2 returned with nonzero exit code." + fi + fi POST_STEP # split into max_1h and max_8h files and copy to grib227 @@ -316,7 +387,7 @@ EOF1 cp_vrfy ${DATA}/${NET}.${cycle}.max_*hr_o3_bc.*.grib2 ${COMOUT} - if [ "$SENDDBN" = "YES" ]; then + if [ "$SENDDBN" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_o3_bc.227.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_8hr_o3_bc.227.grib2 fi @@ -340,13 +411,13 @@ EOF1 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227 done - # Post Files to COMOUT - cp_vrfy awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUT} + # Post Files to COMOUTwmo + cp_vrfy awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo} # Distribute Data - if [ "${SENDDBN_NTC}" = "YES" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.1ho3-max-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.8ho3-max-bc.227.grib2 + if [ "${SENDDBN_NTC}" = "TRUE" ] ; then + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1ho3-max-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.8ho3-max-bc.227.grib2 fi fi fi @@ -356,17 +427,17 @@ rm_vrfy -rf tmpfile fhr=01 while [ "${fhr}" -le "${FCST_LEN_HRS}" ]; do - fhr2d=$( printf "%02d" "${fhr}" ) + fhr3d=$( printf "%03d" "${fhr}" ) - cp_vrfy ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr2d}.${id_domain}.grib2 ${COMOUT} + cp_vrfy ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 ${COMOUT} # create GRIB file to convert to grid 227 then to GRIB2 for NDFD - cat ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr2d}.${id_domain}.grib2 >> tmpfile + cat ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 >> tmpfile if [ "${fhr}" -le "07" ]; then - cat ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr2d}.${id_domain}.grib2 >> tmpfile.1hr + cat ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 >> tmpfile.1hr else - wgrib2 ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr2d}.${id_domain}.grib2 -d 1 -append -grib tmpfile.1hr - wgrib2 ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr2d}.${id_domain}.grib2 -d 2 -append -grib tmpfile.8hr + wgrib2 ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 -d 1 -append -grib tmpfile.1hr + wgrib2 ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 -d 2 -append -grib tmpfile.8hr fi (( fhr=fhr+1 )) done @@ -391,7 +462,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then cp_vrfy ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT} fi -if [ "${SENDDBN}" = "YES" ] ; then +if [ "${SENDDBN}" = "TRUE" ] ; then ${DBNROOT}/bin/dbn_alert MODEL AQM_CONC ${job} ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_CONC ${job} ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 @@ -438,14 +509,14 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 - # Post Files to COMOUT - cp_vrfy awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUT} - cp_vrfy awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUT} + # Post Files to COMOUTwmo + cp_vrfy awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo} + cp_vrfy awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo} # Distribute Data - if [ "${SENDDBN}" = "YES" ]; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.${hr}ho3-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 + if [ "${SENDDBN}" = "TRUE" ]; then + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.${hr}ho3-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 fi done fi diff --git a/scripts/exregional_bias_correction_pm25.sh b/scripts/exregional_bias_correction_pm25.sh index 2e49e9554b..05348ecbc8 100755 --- a/scripts/exregional_bias_correction_pm25.sh +++ b/scripts/exregional_bias_correction_pm25.sh @@ -73,18 +73,6 @@ else All executables will be submitted with command \'${RUN_CMD_SERIAL}\'." fi -# -#----------------------------------------------------------------------- -# -# Move to the working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_BIAS_CORRECTION_PM25" -rm_vrfy -rf $DATA -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA - yyyy=${PDY:0:4} yyyymm=${PDY:0:6} yyyy_m1=${PDYm1:0:4} @@ -115,26 +103,53 @@ fi # STEP 1: Retrieve AIRNOW observation data #----------------------------------------------------------------------------- -# Link the historical airnow data mkdir_vrfy -p "${DATA}/data" -ln_vrfy -sf ${AQM_AIRNOW_HIST_DIR}/bcdata* "${DATA}/data" -if [ -d "${DATA}/data/bcdata.${yyyymm}" ]; then - rm_vrfy -rf "${DATA}/data/bcdata.${yyyymm}" - mkdir_vrfy -p "${DATA}/data/bcdata.${yyyymm}" - cp_vrfy -rL "${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/airnow" "${DATA}/data/bcdata.${yyyymm}" - cp_vrfy -rL "${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/interpolated" "${DATA}/data/bcdata.${yyyymm}" -fi - -# Retrieve real-time airnow data for the last three days -if [ "${DO_REAL_TIME}" = "TRUE" ]; then - mkdir -p ${DATA}/data/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - mkdir -p ${DATA}/data/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - mkdir -p ${DATA}/data/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 - cp_vrfy ${COMINairnow}/${PDYm1}/b008/xx031 ${DATA}/data/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - cp_vrfy ${COMINairnow}/${PDYm2}/b008/xx031 ${DATA}/data/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - cp_vrfy ${COMINairnow}/${PDYm3}/b008/xx031 ${DATA}/data/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 -fi +# Retrieve real-time airnow data for the last three days. +# In the following for-loop, pdym stands for previous (m) day of the present day (PDY) +# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago +for i_pdym in {1..3}; do + case $i_pdym in + 1) + cvt_yyyy="${yyyy_m1}" + cvt_yyyymm="${yyyymm_m1}" + cvt_pdy="${PDYm1}" + ;; + 2) + cvt_yyyy="${yyyy_m2}" + cvt_yyyymm="${yyyymm_m2}" + cvt_pdy="${PDYm2}" + ;; + 3) + cvt_yyyy="${yyyy_m3}" + cvt_yyyymm="${yyyymm_m3}" + cvt_pdy="${PDYm3}" + ;; + esac + + cvt_input_dir="${DATA}/data/bcdata.${cvt_yyyymm}/airnow/csv" + cvt_output_dir="${DATA}/data/bcdata.${cvt_yyyymm}/airnow/netcdf" + cvt_input_fn="HourlyAQObs_YYYYMMDDHH.dat" + cvt_output_fn="HourlyAQObs.YYYYMMDD.nc" + cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}" + cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}" + + mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" + cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + + PREP_STEP + eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." + fi + fi + POST_STEP +done #----------------------------------------------------------------------------- # STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs @@ -174,34 +189,37 @@ mkdir_vrfy -p ${DATA}/data/site-lists.interp mkdir_vrfy -p ${DATA}/out/pm25/${yyyy} mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} -cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.pm25.20220724.12z.list ${DATA}/data/site-lists.interp +cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/data/site-lists.interp cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA} PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} || print_err_msg_exit "Call to executable to run AQM_BIAS_INTERPOLATE returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." + fi +fi POST_STEP cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} - cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} - - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/airnow/${yyyy}/${PDY}/b008 - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - mkdir_vrfy -p ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 - cp_vrfy ${COMINairnow}/${PDY}/b008/xx031 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm}/airnow/${yyyy}/${PDY}/b008 - cp_vrfy ${COMINairnow}/${PDYm1}/b008/xx031 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m1}/airnow/${yyyy_m1}/${PDYm1}/b008 - cp_vrfy ${COMINairnow}/${PDYm2}/b008/xx031 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m2}/airnow/${yyyy_m2}/${PDYm2}/b008 - cp_vrfy ${COMINairnow}/${PDYm3}/b008/xx031 ${AQM_AIRNOW_HIST_DIR}/bcdata.${yyyymm_m3}/airnow/${yyyy_m3}/${PDYm3}/b008 + mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} + cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} fi #----------------------------------------------------------------------- # STEP 4: Performing Bias Correction for PM2.5 #----------------------------------------------------------------------- +rm_vrfy -rf ${DATA}/data/bcdata* + +ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data" + mkdir_vrfy -p ${DATA}/data/sites cp_vrfy ${PARMaqm_utils}/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z ${DATA} @@ -209,13 +227,21 @@ cp_vrfy ${PARMaqm_utils}/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.t cp_vrfy ${PARMaqm_utils}/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA} PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} || print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." + fi +fi POST_STEP cp_vrfy $DATA/out/pm2.5.corrected* ${COMIN} if [ "${cyc}" = "12" ]; then - cp_vrfy ${DATA}/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA} + cp_vrfy ${DATA}/data/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA} fi #------------------------------------------------------------------------ @@ -235,12 +261,19 @@ id_gribdomain=${id_domain} EOF1 PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable to run AQM_POST_BIAS_COR_GRIB2 returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_bias_cor_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_BIAS_COR_GRIB2 returned with nonzero exit code." + fi +fi POST_STEP cp_vrfy ${DATA}/${NET}.${cycle}.pm25*bc*.grib2 ${COMOUT} -if [ "$SENDDBN" = "YES" ]; then +if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUT} fi @@ -266,10 +299,10 @@ EOF1 flag_run_bicor_max=yes # 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN_PDY}/00/pm2.5.corrected.${PDY}.00z.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/00/pm2.5.corrected.${PDY}.00z.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/pm2.5.corrected.${PDYm1}.12z.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/pm2.5.corrected.${PDYm1}.12z.nc b.nc + if [ -s ${COMIN}/../00/pm2.5.corrected.${PDY}.00z.nc ]; then + ln_vrfy -sf ${COMIN}/../00/pm2.5.corrected.${PDY}.00z.nc b.nc + elif [ -s ${COMINm1}/12/pm2.5.corrected.${PDYm1}.12z.nc ]; then + ln_vrfy -sf ${COMINm1}/12/pm2.5.corrected.${PDYm1}.12z.nc b.nc chk=0 else flag_run_bicor_max=no @@ -278,20 +311,20 @@ EOF1 if [ "${cyc}" = "12" ]; then # 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN_PDY}/00/pm2.5.corrected.${PDY}.00z.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/00/pm2.5.corrected.${PDY}.00z.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/pm2.5.corrected.${PDYm1}.12z.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/pm2.5.corrected.${PDYm1}.12z.nc b.nc + if [ -s ${COMIN}/../00/pm2.5.corrected.${PDY}.00z.nc ]; then + ln_vrfy -sf ${COMIN}/../00/pm2.5.corrected.${PDY}.00z.nc b.nc + elif [ -s ${COMINm1}/12/pm2.5.corrected.${PDYm1}.12z.nc ]; then + ln_vrfy -sf ${COMINm1}/12/pm2.5.corrected.${PDYm1}.12z.nc b.nc chk=0 else flag_run_bicor_max=no fi # 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN_PDY}/06/pm2.5.corrected.${PDY}.06z.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/06/pm2.5.corrected.${PDY}.06z.nc c.nc - elif [ -s ${COMIN_PDYm1}/12/pm2.5.corrected.${PDYm1}.12z.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/pm2.5.corrected.${PDYm1}.12z.nc c.nc + if [ -s ${COMIN}/../06/pm2.5.corrected.${PDY}.06z.nc ]; then + ln_vrfy -sf ${COMIN}/../06/pm2.5.corrected.${PDY}.06z.nc c.nc + elif [ -s ${COMINm1}/12/pm2.5.corrected.${PDYm1}.12z.nc ]; then + ln_vrfy -sf ${COMINm1}/12/pm2.5.corrected.${PDYm1}.12z.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -302,8 +335,15 @@ EOF1 # write out grib2 format #------------------------------------------------- PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ - Call to executable to run AQM_POST_MAXI_BIAS_COR_GRIB2 returned with nonzero exit code." + eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_bias_cor_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_MAXI_BIAS_COR_GRIB2 returned with nonzero exit code." + fi + fi POST_STEP # split into two files: one for 24hr_ave and one for 1h_max @@ -330,7 +370,7 @@ EOF1 cp_vrfy ${NET}.${cycle}.max_1hr_pm25_bc.227.grib2 ${COMOUT} cp_vrfy ${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2 ${COMOUT} - if [ "${SENDDBN}" = "YES" ]; then + if [ "${SENDDBN}" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_pm25_bc.227.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUT}/${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2 fi @@ -338,8 +378,8 @@ fi fhr=01 while [ "${fhr}" -le "${FCST_LEN_HRS}" ]; do - fhr2d=$( printf "%02d" "${fhr}" ) - cat ${DATA}/${NET}.${cycle}.pm25_bc.f${fhr2d}.${id_domain}.grib2 >> tmpfile_pm25_bc + fhr3d=$( printf "%03d" "${fhr}" ) + cat ${DATA}/${NET}.${cycle}.pm25_bc.f${fhr3d}.${id_domain}.grib2 >> tmpfile_pm25_bc (( fhr=fhr+1 )) done @@ -348,7 +388,7 @@ wgrib2 tmpfile_pm25_bc -set_grib_type c3b -new_grid_winds earth -new_grid ${grid cp_vrfy tmpfile_pm25_bc ${COMOUT}/${NET}.${cycle}.ave_1hr_pm25_bc.${id_domain}.grib2 cp_vrfy ${NET}.${cycle}.grib2_pm25_bc.227 ${COMOUT}/${NET}.${cycle}.ave_1hr_pm25_bc.227.grib2 -if [ "${SENDDBN}" = "YES" ]; then +if [ "${SENDDBN}" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUT}/${NET}.${cycle}.ave_1hr_pm25_bc.227.grib2 fi @@ -410,13 +450,13 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT51=awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 - # Post Files to COMOUT - cp_vrfy awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUT} - cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUT} - cp_vrfy awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUT} + # Post Files to COMOUTwmo + cp_vrfy awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo} + cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo} + cp_vrfy awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo} # Distribute Data - if [ "${SENDDBN_NTC}" = "YES" ] ; then + if [ "${SENDDBN_NTC}" = "TRUE" ] ; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.1hpm25-bc.227.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 diff --git a/scripts/exregional_fire_emission.sh b/scripts/exregional_fire_emission.sh index 4605d52486..a2e9f9b44d 100755 --- a/scripts/exregional_fire_emission.sh +++ b/scripts/exregional_fire_emission.sh @@ -49,17 +49,6 @@ data files. # #----------------------------------------------------------------------- # -# Move to the FIRE EMISSION working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_FIRE_EMISSION" -rm_vrfy -rf $DATA -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA -# -#----------------------------------------------------------------------- -# # Set up variables for call to retrieve_data.py # #----------------------------------------------------------------------- @@ -67,13 +56,10 @@ cd_vrfy $DATA yyyymmdd=${FIRE_FILE_CDATE:0:8} hh=${FIRE_FILE_CDATE:8:2} -CDATE_md1=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 24 hours" "+%Y%m%d%H" ) -CDATE_mh3=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 3 hours" "+%Y%m%d%H" ) -yyyymmdd_mh3=${CDATE_mh3:0:8} -hh_mh3=${CDATE_mh3:8:2} -CDATE_mh2=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 2 hours" "+%Y%m%d%H" ) CDATE_mh1=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 1 hours" "+%Y%m%d%H" ) +yyyymmdd_mh1=${CDATE_mh1:0:8} +hh_mh1=${CDATE_mh1:8:2} # #----------------------------------------------------------------------- # @@ -84,64 +70,122 @@ CDATE_mh1=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 1 hours" "+%Y%m%d% aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${yyyymmdd}_t${hh}z${AQM_FIRE_FILE_SUFFIX}" # Check if the fire file exists in the designated directory -if [ -e "${AQM_FIRE_DIR}/${yyyymmdd}/${aqm_fire_file_fn}" ]; then - cp_vrfy "${AQM_FIRE_DIR}/${yyyymmdd}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" +if [ -e "${DCOMINfire}/${yyyymmdd}/${aqm_fire_file_fn}" ]; then + cp_vrfy "${DCOMINfire}/${yyyymmdd}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" else # Copy raw data - for ihr in {0..21}; do - download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_mh3} ${hh_mh3} UTC - $ihr hours" "+%Y%m%d%H" ) + for ihr in {0..23}; do + download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_mh1} ${hh_mh1} UTC - $ihr hours" "+%Y%m%d%H" ) FILE_13km="Hourly_Emissions_13km_${download_time}00_${download_time}00.nc" - if [ -e "${AQM_FIRE_DIR}/RAVE_raw_new/${FILE_13km}" ]; then - ln_vrfy -sf "${AQM_FIRE_DIR}/RAVE_raw_new/Hourly_Emissions_13km_${download_time}00_${download_time}00.nc" . - elif [ -d "${AQM_FIRE_DIR}/${CDATE_md1}" ]; then - echo "${FILE_13km} does not exist. Replacing with the file of previous date ..." - yyyymmdd_dn=${download_time:0:8} - hh_dn=${download_time:8:2} - missing_download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_dn} ${hh_dn} UTC - 24 hours" "+%Y%m%d%H" ) - ln_vrfy -sf "${AQM_FIRE_DIR}/${CDATE_md1}/Hourly_Emissions_13km_${missing_download_time}00_${missing_download_time}00.nc" "Hourly_Emissions_13km_${download_time}00_${download_time}00.nc" + yyyymmdd_dn=${download_time:0:8} + hh_dn=${download_time:8:2} + missing_download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_dn} ${hh_dn} UTC - 24 hours" "+%Y%m%d%H" ) + yyyymmdd_dn_md1=${missing_download_time:0:8} + FILE_13km_md1=Hourly_Emissions_13km_${missing_download_time}00_${missing_download_time}00.nc + if [ -e "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" ]; then + cp_vrfy "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" . + elif [ -e "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" ]; then + echo "WARNING: ${FILE_13km} does not exist. Replacing with the file of previous date ..." + cp_vrfy "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" "${FILE_13km}" else - print_err_msg_exit "RAVE raw data files do not exist." + message_txt="Fire Emission RAW data does not exist: + FILE_13km_md1 = \"${FILE_13km_md1}\" + DCOMINfire = \"${DCOMINfire}\"" + + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + cp_vrfy "${DCOMINfire}/Hourly_Emissions_13km_dummy.nc" "${FILE_13km}" + message_warning="WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED." + print_info_msg "${message_warning}" + if [ ! -z "${maillist}" ]; then + echo "${message_warning}" | mail.py $maillist + fi + else + print_err_msg_exit "${message_txt}" + fi fi done - ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc || print_err_msg_exit "\ -Call to NCKS returned with nonzero exit code." + ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCKS returned with nonzero exit code." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi mv_vrfy temp.nc Hourly_Emissions_13km_${download_time}00_${download_time}00.nc - # Extra times - cp_vrfy Hourly_Emissions_13km_${CDATE_mh3}00_${CDATE_mh3}00.nc Hourly_Emissions_13km_${CDATE_mh2}00_${CDATE_mh2}00.nc - cp_vrfy Hourly_Emissions_13km_${CDATE_mh3}00_${CDATE_mh3}00.nc Hourly_Emissions_13km_${CDATE_mh1}00_${CDATE_mh1}00.nc - - ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc || print_err_msg_exit "\ -Call to NCRCAT returned with nonzero exit code." + ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCRCAT returned with nonzero exit code." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi input_fire="${DATA}/Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc" output_fire="${DATA}/Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc" python3 ${HOMEdir}/sorc/AQM-utils/python_utils/RAVE_remake.allspecies.aqmna13km.g793.py --date "${yyyymmdd}" --cyc "${hh}" --input_fire "${input_fire}" --output_fire "${output_fire}" + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"RAVE_remake.allspecies.py\" returned with nonzero exit code." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi - ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc -o Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc || print_err_msg_exit "\ -Call to NCKS returned with nonzero exit code." + ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc -o Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCKS returned with nonzero exit code." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi - ncrcat Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc ${aqm_fire_file_fn} || print_err_msg_exit "\ -Call to NCRCAT returned with nonzero exit code." + ncrcat Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc ${aqm_fire_file_fn} + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCRCAT returned with nonzero exit code." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi # Copy the final fire emission file to STAGING_DIR cp_vrfy "${DATA}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" # Archive the final fire emission file to disk and HPSS if [ "${DO_AQM_SAVE_FIRE}" = "TRUE" ]; then - mkdir -p "${AQM_FIRE_DIR}/${yyyymmdd}" - cp_vrfy "${DATA}/${aqm_fire_file_fn}" "${AQM_FIRE_DIR}/${yyyymmdd}" + cp "${DATA}/${aqm_fire_file_fn}" ${DCOMINfire} hsi_log_fn="log.hsi_put.${yyyymmdd}_${hh}" - hsi put ${aqm_fire_file_fn} : ${AQM_FIRE_ARCHV_DIR}/${aqm_fire_file_fn} >& ${hsi_log_fn} || \ - print_err_msg_exit "\ -htar file writing operation (\"hsi put ...\") failed. Check the log + hsi put ${aqm_fire_file_fn} : ${AQM_FIRE_ARCHV_DIR}/${aqm_fire_file_fn} >& ${hsi_log_fn} + export err=$? + if [ $err -ne 0 ]; then + message_txt="htar file writing operation (\"hsi put ...\") failed. Check the log file hsi_log_fn in the DATA directory for details: DATA = \"${DATA}\" hsi_log_fn = \"${hsi_log_fn}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi fi fi # diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh index 5b138eb298..6152a62716 100755 --- a/scripts/exregional_get_extrn_mdl_files.sh +++ b/scripts/exregional_get_extrn_mdl_files.sh @@ -192,12 +192,20 @@ python3 -u ${USHdir}/retrieve_data.py \ --summary_file ${EXTRN_DEFNS} \ $additional_flags" -$cmd || print_err_msg_exit "\ -Call to retrieve_data.py failed with a non-zero exit status. - +$cmd +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to retrieve_data.py failed with a non-zero exit status. The command was: ${cmd} " + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi + # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index a110cd1d2c..98867afc79 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -84,7 +84,7 @@ fi #----------------------------------------------------------------------- # if [ $RUN_ENVIR = "nco" ]; then - extrn_mdl_staging_dir="${COMINext}${SLASH_ENSMEM_SUBDIR}" + extrn_mdl_staging_dir="${DATAROOT}/get_extrn_ics.${share_pid}${SLASH_ENSMEM_SUBDIR}" extrn_mdl_var_defns_fp="${extrn_mdl_staging_dir}/${NET}.${cycle}.${EXTRN_MDL_NAME_ICS}.ICS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" else extrn_mdl_staging_dir="${COMIN}/${EXTRN_MDL_NAME_ICS}/for_ICS${SLASH_ENSMEM_SUBDIR}" @@ -94,16 +94,6 @@ fi # #----------------------------------------------------------------------- # -# -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_ICS" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA -# -#----------------------------------------------------------------------- -# # Set physics-suite-dependent variable mapping table needed in the FORTRAN # namelist file that the chgres_cube executable will read in. # @@ -140,10 +130,14 @@ case "${CCPP_PHYS_SUITE}" in ;; # *) - print_err_msg_exit "\ -The variable \"varmap_file\" has not yet been specified for this physics -suite (CCPP_PHYS_SUITE): + message_txt="The variable \"varmap_file\" has not yet been specified for +this physics suite (CCPP_PHYS_SUITE): CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi ;; # esac @@ -495,10 +489,14 @@ case "${EXTRN_MDL_NAME_ICS}" in ;; *) - print_err_msg_exit "\ -External-model-dependent namelist variables have not yet been specified + message_txt="External-model-dependent namelist variables have not yet been specified for this external IC model (EXTRN_MDL_NAME_ICS): EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi ;; esac @@ -522,11 +520,15 @@ hh="${EXTRN_MDL_CDATE:8:2}" exec_fn="chgres_cube" exec_fp="$EXECdir/${exec_fn}" if [ ! -f "${exec_fp}" ]; then - print_err_msg_exit "\ -The executable (exec_fp) for generating initial conditions on the FV3-LAM -native grid does not exist: + message_txt="The executable (exec_fp) for generating initial conditions +on the FV3-LAM native grid does not exist: exec_fp = \"${exec_fp}\" Please ensure that you've built this executable." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi fi # #----------------------------------------------------------------------- @@ -592,16 +594,23 @@ settings=" # Call the python script to create the namelist file. # nml_fn="fort.41" -${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} || \ - print_err_msg_exit "\ -Call to python script set_namelist.py to set the variables in the namelist -file read in by the ${exec_fn} executable failed. Parameters passed to -this script are: +${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} +err=$? +if [ $err -ne 0 ]; then + message_txt="Call to python script set_namelist.py to set the variables +in the namelist file read in by the ${exec_fn} executable failed. Parameters +passed to this script are: Name of output namelist file: nml_fn = \"${nml_fn}\" Namelist settings specified on command line (these have highest precedence): settings = $settings" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #----------------------------------------------------------------------- # @@ -660,21 +669,29 @@ if [ "${USE_FVCOM}" = "TRUE" ]; then fvcom_exec_fp="$EXECdir/${fvcom_exec_fn}" fvcom_time="${DATE_FIRST_CYCL:0:4}-${DATE_FIRST_CYCL:4:2}-${DATE_FIRST_CYCL:6:2}T${DATE_FIRST_CYCL:8:2}:00:00.000000" if [ ! -f "${fvcom_exec_fp}" ]; then - print_err_msg_exit "\ -The executable (fvcom_exec_fp) for processing FVCOM data onto FV3-LAM -native grid does not exist: + message_txt="The executable (fvcom_exec_fp) for processing FVCOM data +onto FV3-LAM native grid does not exist: fvcom_exec_fp = \"${fvcom_exec_fp}\" Please ensure that you've built this executable." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}"\ + else + print_err_msg_exit "${message_txt}" + fi fi cp_vrfy ${fvcom_exec_fp} ${INPUT_DATA}/. fvcom_data_fp="${FVCOM_DIR}/${FVCOM_FILE}" if [ ! -f "${fvcom_data_fp}" ]; then - print_err_msg_exit "\ -The file or path (fvcom_data_fp) does not exist: + message_txt="The file or path (fvcom_data_fp) does not exist: fvcom_data_fp = \"${fvcom_data_fp}\" Please check the following user defined variables: FVCOM_DIR = \"${FVCOM_DIR}\" FVCOM_FILE= \"${FVCOM_FILE}\" " + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi fi cp_vrfy ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 3dad9bd0be..b0311c626d 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -82,7 +82,7 @@ fi #----------------------------------------------------------------------- # if [ $RUN_ENVIR = "nco" ]; then - extrn_mdl_staging_dir="${COMINext}${SLASH_ENSMEM_SUBDIR}" + extrn_mdl_staging_dir="${DATAROOT}/get_extrn_lbcs.${share_pid}${SLASH_ENSMEM_SUBDIR}" extrn_mdl_var_defns_fp="${extrn_mdl_staging_dir}/${NET}.${cycle}.${EXTRN_MDL_NAME_LBCS}.LBCS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" else extrn_mdl_staging_dir="${COMIN}/${EXTRN_MDL_NAME_LBCS}/for_LBCS${SLASH_ENSMEM_SUBDIR}" @@ -92,16 +92,6 @@ fi # #----------------------------------------------------------------------- # -# -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_LBCS" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA -# -#----------------------------------------------------------------------- -# # Set physics-suite-dependent variable mapping table needed in the FORTRAN # namelist file that the chgres_cube executable will read in. # @@ -138,10 +128,14 @@ case "${CCPP_PHYS_SUITE}" in ;; # *) - print_err_msg_exit "\ -The variable \"varmap_file\" has not yet been specified for this physics -suite (CCPP_PHYS_SUITE): + message_txt="The variable \"varmap_file\" has not yet been specified +for this physics suite (CCPP_PHYS_SUITE): CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi ;; # esac @@ -337,10 +331,14 @@ case "${EXTRN_MDL_NAME_LBCS}" in ;; *) - print_err_msg_exit "\ -External-model-dependent namelist variables have not yet been specified -for this external LBC model (EXTRN_MDL_NAME_LBCS): + message_txt="External-model-dependent namelist variables have not yet been +specified for this external LBC model (EXTRN_MDL_NAME_LBCS): EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi ;; esac @@ -354,11 +352,15 @@ esac exec_fn="chgres_cube" exec_fp="$EXECdir/${exec_fn}" if [ ! -f "${exec_fp}" ]; then - print_err_msg_exit "\ -The executable (exec_fp) for generating initial conditions on the FV3-LAM -native grid does not exist: + message_txt="The executable (exec_fp) for generating initial conditions +on the FV3-LAM native grid does not exist: exec_fp = \"${exec_fp}\" Please ensure that you've built this executable." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi fi # #----------------------------------------------------------------------- @@ -423,10 +425,14 @@ for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; *) - print_err_msg_exit "\ -The external model output file name to use in the chgres_cube FORTRAN name- -list file has not specified for this external LBC model (EXTRN_MDL_NAME_LBCS): + message_txt="The external model output file name to use in the chgres_cube +FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_NAME_LBCS): EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi ;; esac # @@ -498,16 +504,23 @@ settings=" # Call the python script to create the namelist file. # nml_fn="fort.41" - ${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} || \ - print_err_msg_exit "\ -Call to python script set_namelist.py to set the variables in the namelist -file read in by the ${exec_fn} executable failed. Parameters passed to -this script are: + ${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script set_namelist.py to set the variables +in the namelist file read in by the ${exec_fn} executable failed. Parameters +passed to this script are: Name of output namelist file: nml_fn = \"${nml_fn}\" Namelist settings specified on command line (these have highest precedence): settings = $settings" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi # #----------------------------------------------------------------------- # @@ -524,8 +537,13 @@ $settings" # forecast task. # PREP_STEP - eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} || \ - print_err_msg_exit "\ + eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "\ Call to executable (exec_fp) to generate lateral boundary conditions (LBCs) file for the FV3-LAM for forecast hour fhr failed: exec_fp = \"${exec_fp}\" @@ -535,6 +553,8 @@ The external model from which the LBCs files are to be generated is: The external model files that are inputs to the executable (exec_fp) are located in the following directory: extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\"" + fi + fi POST_STEP # # Move LBCs file for the current lateral boundary update time to the LBCs diff --git a/scripts/exregional_nexus_emission.sh b/scripts/exregional_nexus_emission.sh index 2c9915f304..8c0f904b94 100755 --- a/scripts/exregional_nexus_emission.sh +++ b/scripts/exregional_nexus_emission.sh @@ -79,13 +79,9 @@ fi # #----------------------------------------------------------------------- # -DATA="${DATA}/tmp_NEXUS/${nspt}" -mkdir_vrfy -p "$DATA" - DATAinput="${DATA}/input" mkdir_vrfy -p "$DATAinput" -cd_vrfy $DATA # #----------------------------------------------------------------------- # @@ -94,9 +90,15 @@ cd_vrfy $DATA #----------------------------------------------------------------------- # USE_GFS_SFC="FALSE" -if [ -d "${COMINext}/GFS_SFC" ]; then - if [ "$(ls -A ${COMINext}/GFS_SFC)" ]; then - ln_vrfy -sf "${COMINext}/GFS_SFC" . +if [ "${RUN_ENVIR}" = "nco" ]; then + GFS_SFC_INPUT="${DATAROOT}/nexus_gfs_sfc.${share_pid}" +else + GFS_SFC_INPUT="${COMIN}/GFS_SFC" +fi + +if [ -d "${GFS_SFC_INPUT}" ]; then + if [ "$(ls -A ${GFS_SFC_INPUT})" ]; then + ln -sf "${GFS_SFC_INPUT}" "GFS_SFC" USE_GFS_SFC="TRUE" fi fi @@ -149,10 +151,10 @@ else start_del_hr=$(( len_per_split * nspt )) start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${start_del_hr} hours " "+%Y%m%d%H" ) if [ "${nsptp}" = "${NUM_SPLIT_NEXUS}" ];then - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) + end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $FCST_LEN_HRS + 1) hours" "+%Y%m%d%H" ) else end_del_hr=$(( len_per_split * nsptp )) - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${end_del_hr} hours" "+%Y%m%d%H" ) + end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $end_del_hr + 1) hours" "+%Y%m%d%H" ) fi fi # @@ -186,6 +188,15 @@ NEXUS_INPUT_BASE_DIR=${NEXUS_INPUT_DIR} # modify time configuration file # python3 ${ARL_NEXUS_DIR}/utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to python script \"nexus_time_parser.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #--------------------------------------------------------------------- @@ -193,6 +204,15 @@ python3 ${ARL_NEXUS_DIR}/utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_T # set the root directory to the temporary directory # python3 ${ARL_NEXUS_DIR}/utils/python/nexus_root_parser.py -f ${DATA}/NEXUS_Config.rc -d ${DATAinput} +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to python script \"nexus_root_parser.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #---------------------------------------------------------------------- @@ -203,7 +223,26 @@ if [ "${NEI2016}" = "TRUE" ]; then #NEI2016 mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07 mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07/${mm} python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_linker.py --src_dir ${NEXUS_INPUT_BASE_DIR} --date ${yyyymmdd} --work_dir ${DATAinput} -v "v2022-07" + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"nexus_nei2016_linker.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi + python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_control_tilefix.py -f ${DATA}/NEXUS_Config.rc -t ${DATA}/HEMCO_sa_Time.rc # -d ${yyyymmdd} + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"nexus_nei2016_control_tilefix.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi fi if [ "${TIMEZONES}" = "TRUE" ]; then # TIME ZONES @@ -265,6 +304,15 @@ fi if [ "${USE_GFS_SFC}" = "TRUE" ]; then # GFS INPUT mkdir_vrfy -p ${DATAinput}/GFS_SFC python3 ${ARL_NEXUS_DIR}/utils/python/nexus_gfs_bio.py -i ${DATA}/GFS_SFC/gfs.t??z.sfcf???.nc -o ${DATA}/GFS_SFC_MEGAN_INPUT.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"nexus_gfs_bio.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi fi # @@ -275,20 +323,34 @@ fi #----------------------------------------------------------------------- # PREP_STEP -eval ${RUN_CMD_NEXUS} ${EXECdir}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR} || \ -print_err_msg_exit "\ -Call to execute nexus standalone for the FV3LAM failed." +eval ${RUN_CMD_NEXUS} ${EXECdir}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to execute nexus standalone for the FV3LAM failed." + fi +fi POST_STEP # #----------------------------------------------------------------------- # -# Move NEXUS output to INPUT_DATA directory. +# Make NEXUS output pretty and move to INPUT_DATA directory. # #----------------------------------------------------------------------- # -mv_vrfy ${DATA}/NEXUS_Expt_split.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc - +python3 ${ARL_NEXUS_DIR}/utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_split.nc --grid ${DATA}/grid_spec.nc -o ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc -t ${DATA}/HEMCO_sa_Time.rc +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to python script \"make_nexus_output_pretty.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "wcoss2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_nexus_gfs_sfc.sh b/scripts/exregional_nexus_gfs_sfc.sh index 46ed3b15ef..01207e5666 100755 --- a/scripts/exregional_nexus_gfs_sfc.sh +++ b/scripts/exregional_nexus_gfs_sfc.sh @@ -48,12 +48,6 @@ data files from disk or HPSS. # #----------------------------------------------------------------------- # -DATA="${DATA}/tmp_GFS_SFC" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA -# -#----------------------------------------------------------------------- -# # Set up variables for call to retrieve_data.py # #----------------------------------------------------------------------- @@ -68,10 +62,11 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi +fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS )) # #----------------------------------------------------------------------- # -# Retrieve GFS surface files to GFS_SFC_STAGING_DIR +# Retrieve GFS surface files # #----------------------------------------------------------------------- # @@ -89,17 +84,31 @@ GFS_SFC_DATA_INTVL="3" # copy files from local directory if [ -d ${GFS_SFC_LOCAL_DIR} ]; then gfs_sfc_fn="gfs.t${hh}z.sfcanl.nc" - cp_vrfy "${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" ${GFS_SFC_STAGING_DIR} - for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${FCST_LEN_HRS}); do + relative_link_flag="FALSE" + gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" + create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \ + relative="${relative_link_flag}" + + for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do gfs_sfc_fn="gfs.t${hh}z.sfcf${fhr}.nc" if [ -e "${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" ]; then - cp_vrfy "${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" ${GFS_SFC_STAGING_DIR} + gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" + create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \ + relative="${relative_link_flag}" else - print_err_msg_exit "\ -sfc file does not exist in the directory: + message_txt="SFC file for nexus emission for \"${cycle}\" does not exist in the directory: GFS_SFC_LOCAL_DIR = \"${GFS_SFC_LOCAL_DIR}\" gfs_sfc_fn = \"${gfs_sfc_fn}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + message_warning="WARNING: ${message_txt}" + print_info_msg "${message_warning}" + if [ ! -z "${maillist}" ]; then + echo "${message_warning}" | mail.py $maillist + fi + else + print_err_msg_exit "${message_txt}" + fi fi done @@ -121,8 +130,8 @@ else gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}" gfs_sfc_fns=("gfs.t${hh}z.sfcanl.nc") gfs_sfc_fps="./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcanl.nc" - if [ "${FCST_LEN_HRS}" -lt "40" ]; then - ARCHV_LEN_HRS="${FCST_LEN_HRS}" + if [ "${fcst_len_hrs_offset}" -lt "40" ]; then + ARCHV_LEN_HRS="${fcst_len_hrs_offset}" else ARCHV_LEN_HRS="39" fi @@ -131,27 +140,43 @@ else gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc" done - # Retrieve data from A file up to FCST_LEN_HRS=39 + # Retrieve data from A file up to fcst_len_hrs_offset=39 htar -tvf ${gfs_sfc_tar_fp} PREP_STEP - htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} || \ - print_err_msg_exit "htar file reading operation (\"htar -xvf ...\") failed." + htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} + export err=$? + if [ $err -ne 0 ]; then + message_txt="htar file reading operation (\"htar -xvf ...\") failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi POST_STEP - # Retireve data from B file when FCST_LEN_HRS>=40 - if [ "${FCST_LEN_HRS}" -ge "40" ]; then + # Retireve data from B file when fcst_len_hrs_offset>=40 + if [ "${fcst_len_hrs_offset}" -ge "40" ]; then gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${yyyymmdd}_${hh}.${GFS_SFC_TAR_FN_SUFFIX_B}" gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}" gfs_sfc_fns=() gfs_sfc_fps="" - for fhr in $(seq -f "%03g" 42 ${GFS_SFC_DATA_INTVL} ${FCST_LEN_HRS}); do + for fhr in $(seq -f "%03g" 42 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do gfs_sfc_fns+="gfs.t${hh}z.sfcf${fhr}.nc" gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc" done htar -tvf ${gfs_sfc_tar_fp} PREP_STEP - htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} || \ - print_err_msg_exit "htar file reading operation (\"htar -xvf ...\") failed." + htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} + export err=$? + if [ $err -ne 0 ]; then + message_txt="htar file reading operation (\"htar -xvf ...\") failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi POST_STEP fi # Link retrieved files to staging directory diff --git a/scripts/exregional_nexus_post_split.sh b/scripts/exregional_nexus_post_split.sh index 054b825d0c..a2e2a63b2a 100755 --- a/scripts/exregional_nexus_post_split.sh +++ b/scripts/exregional_nexus_post_split.sh @@ -52,17 +52,6 @@ This is the ex-script for the task that runs NEXUS. #----------------------------------------------------------------------- # eval ${PRE_TASK_CMDS} -# -#----------------------------------------------------------------------- -# -# Move to the NEXUS working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_NEXUS_POST_SPLIT" -mkdir_vrfy -p "$DATA" - -cd_vrfy $DATA mm="${PDY:4:2}" dd="${PDY:6:2}" @@ -94,18 +83,16 @@ if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then cp_vrfy ${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc else python3 ${ARL_NEXUS_DIR}/utils/python/concatenate_nexus_post_split.py "${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"concatenate_nexus_post_split.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi fi - -# -#----------------------------------------------------------------------- -# -# make nexus output pretty -# -#----------------------------------------------------------------------- -# -python3 ${ARL_NEXUS_DIR}/utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date - -python3 ${ARL_NEXUS_DIR}/utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_combined.nc --grid ${DATA}/grid_spec.nc -o ${DATA}/NEXUS_Expt_pretty.nc -t ${DATA}/HEMCO_sa_Time.rc # #----------------------------------------------------------------------- @@ -114,7 +101,16 @@ python3 ${ARL_NEXUS_DIR}/utils/python/make_nexus_output_pretty.py --src ${DATA}/ # #----------------------------------------------------------------------- # -python3 ${ARL_NEXUS_DIR}/utils/combine_ant_bio.py ${DATA}/NEXUS_Expt_pretty.nc ${DATA}/NEXUS_Expt.nc +python3 ${ARL_NEXUS_DIR}/utils/combine_ant_bio.py "${DATA}/NEXUS_Expt_combined.nc" ${DATA}/NEXUS_Expt.nc +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to python script \"NEXUS_Expt_pretty.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #----------------------------------------------------------------------- diff --git a/scripts/exregional_point_source.sh b/scripts/exregional_point_source.sh index a3be0256c4..8eee83337c 100755 --- a/scripts/exregional_point_source.sh +++ b/scripts/exregional_point_source.sh @@ -61,16 +61,6 @@ fi nstep=$(( FCST_LEN_HRS+1 )) yyyymmddhh="${PDY}${cyc}" -# -#----------------------------------------------------------------------- -# -# Move to the working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_PT_SOURCE" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA # #----------------------------------------------------------------------- # @@ -78,7 +68,7 @@ cd_vrfy $DATA # #----------------------------------------------------------------------- # -PT_SRC_PRECOMB="${PT_SRC_BASEDIR}" +PT_SRC_PRECOMB="${DCOMINpt_src}" # #----------------------------------------------------------------------- # @@ -88,6 +78,15 @@ PT_SRC_PRECOMB="${PT_SRC_BASEDIR}" # if [ ! -s "${DATA}/pt-${yyyymmddhh}.nc" ]; then python3 ${HOMEdir}/sorc/AQM-utils/python_utils/stack-pt-merge.py -s ${yyyymmddhh} -n ${nstep} -i ${PT_SRC_PRECOMB} + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"stack-pt-merge.py\" failed." + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi fi # Move to COMIN diff --git a/scripts/exregional_post_stat_o3.sh b/scripts/exregional_post_stat_o3.sh index 555d43b82a..266cfe67f9 100755 --- a/scripts/exregional_post_stat_o3.sh +++ b/scripts/exregional_post_stat_o3.sh @@ -63,16 +63,6 @@ else All executables will be submitted with command \'${RUN_CMD_SERIAL}\'." fi -# -#----------------------------------------------------------------------- -# -# Move to the working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_POST_STAT_O3" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA # #----------------------------------------------------------------------- # @@ -98,8 +88,15 @@ EOF1 # convert from netcdf to grib2 format PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable to run AQM_POST_GRIB2 returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_GRIB2 returned with nonzero exit code." + fi +fi POST_STEP if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then @@ -110,13 +107,13 @@ fi fhr=01 while [ ${fhr} -le ${FCST_LEN_HRS} ]; do - fhr9=$( printf "%02d" "${fhr}" ) + fhr3d=$( printf "%03d" "${fhr}" ) - if [ "${fhr9}" -le "07" ]; then - cat ${DATA}/${NET}.${cycle}.awpozcon.f${fhr9}.${id_domain}.grib2 >> ${NET}.${cycle}.1ho3.${id_domain}.grib2 + if [ "${fhr3d}" -le "07" ]; then + cat ${DATA}/${NET}.${cycle}.awpozcon.f${fhr3d}.${id_domain}.grib2 >> ${NET}.${cycle}.1ho3.${id_domain}.grib2 else - wgrib2 ${DATA}/${NET}.${cycle}.awpozcon.f${fhr9}.${id_domain}.grib2 -d 1 -append -grib ${NET}.${cycle}.1ho3.${id_domain}.grib2 - wgrib2 ${DATA}/${NET}.${cycle}.awpozcon.f${fhr9}.${id_domain}.grib2 -d 2 -append -grib ${NET}.${cycle}.8ho3.${id_domain}.grib2 + wgrib2 ${DATA}/${NET}.${cycle}.awpozcon.f${fhr3d}.${id_domain}.grib2 -d 1 -append -grib ${NET}.${cycle}.1ho3.${id_domain}.grib2 + wgrib2 ${DATA}/${NET}.${cycle}.awpozcon.f${fhr3d}.${id_domain}.grib2 -d 2 -append -grib ${NET}.${cycle}.8ho3.${id_domain}.grib2 fi (( fhr=fhr+1 )) done @@ -150,9 +147,12 @@ for grid in 227 196 198;do export FORT51=awpaqm.${cycle}.${hr}ho3.${grid}.grib2 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3-awpozcon.${cycle}.${grid} done - for var in 1ho3 8ho3 awpozcon;do + for var in 1ho3 8ho3;do + cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} + cp_vrfy ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo} + done + for var in awpozcon;do cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} - cp_vrfy ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUT} done else for var in 1ho3 awpozcon;do @@ -185,10 +185,10 @@ EOF1 ## 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc b.nc + if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then + ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then + ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no @@ -197,20 +197,20 @@ EOF1 if [ "${cyc}" = "12" ]; then ## 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc b.nc + if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then + ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then + ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no fi ## 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN_PDY}/06/${NET}.t06z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN_PDY}/06/${NET}.t06z.chem_sfc.nc c.nc - elif [ -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc c.nc + if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then + ln_vrfy -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc + elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then + ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -218,8 +218,15 @@ EOF1 fi PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ - Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." + eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." + fi + fi POST_STEP # split into max_1h and max_8h files and copy to grib227 @@ -237,7 +244,7 @@ EOF1 wgrib2 ${NET}.${cycle}.max_1hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_1hr_o3.${grid}.grib2 cp_vrfy ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT} - if [ "$SENDDBN" = "YES" ]; then + if [ "$SENDDBN" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_o3.${grid}.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_8hr_o3.${grid}.grib2 fi @@ -260,10 +267,10 @@ EOF1 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} done - cp_vrfy awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUT} - if [ "${SENDDBN_NTC}" = "YES" ]; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.1ho3-max.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.8ho3-max.${grid}.grib2 + cp_vrfy awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo} + if [ "${SENDDBN_NTC}" = "TRUE" ]; then + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1ho3-max.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.8ho3-max.${grid}.grib2 fi done fi diff --git a/scripts/exregional_post_stat_pm25.sh b/scripts/exregional_post_stat_pm25.sh index cc3131b3da..47ee326fb4 100755 --- a/scripts/exregional_post_stat_pm25.sh +++ b/scripts/exregional_post_stat_pm25.sh @@ -63,16 +63,6 @@ else All executables will be submitted with command \'${RUN_CMD_SERIAL}\'." fi -# -#----------------------------------------------------------------------- -# -# Move to the working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_POST_STAT_PM25" -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA # #----------------------------------------------------------------------- # @@ -102,8 +92,15 @@ EOF1 # convert from netcdf to grib2 format PREP_STEP -eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable to run AQM_POST_GRIB2 returned with nonzero exit code." +eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_grib2 ${PDY} ${cyc} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] || [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_GRIB2 returned with nonzero exit code." + fi +fi POST_STEP cat ${NET}.${cycle}.pm25.*.${id_domain}.grib2 >> ${NET}.${cycle}.1hpm25.${id_domain}.grib2 @@ -139,13 +136,13 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT51=awpaqm.${cycle}.1hpm25.${grid}.grib2 tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid} - # Post Files to COMOUT - cp_vrfy awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUT} + # Post Files to COMOUTwmo + cp_vrfy awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo} # Distribute Data - if [ "${SENDDBN_NTC}" = "YES" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.1hpm25.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 + if [ "${SENDDBN_NTC}" = "TRUE" ] ; then + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 fi done fi @@ -173,10 +170,10 @@ EOF1 flag_run_bicor_max=yes # 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc b.nc + if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then + ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then + ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no @@ -185,20 +182,20 @@ EOF1 if [ "${cyc}" = "12" ]; then # 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/00/${NET}.t00z.chem_sfc.nc b.nc - elif [ -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc + if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then + ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then + ln_vrfy -sf ${COMINm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no fi # 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN_PDY}/06/${NET}.t06z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN_PDY}/06/${NET}.t06z.chem_sfc.nc c.nc - elif [ -s ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN_PDYm1}/12/${NET}.t12z.chem_sfc.nc c.nc + if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then + ln_vrfy -sf ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc + elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then + ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -206,8 +203,15 @@ EOF1 fi PREP_STEP - eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ - Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." + eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} + export err=$? + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk + else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." + fi + fi POST_STEP wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PMTF" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.ave_24hr_pm25.${id_domain}.grib2 @@ -260,12 +264,12 @@ EOF1 cp_vrfy ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT} cp_vrfy ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT} - cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUT} - cp_vrfy awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUT} + cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo} + cp_vrfy awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo} - if [ "$SENDDBN" = "YES" ]; then - ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.ave_24hr_pm25.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_pm25.${grid}.grib2 + if [ "$SENDDBN" = "TRUE" ]; then + ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.ave_24hr_pm25.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.max_1hr_pm25.${grid}.grib2 fi done fi diff --git a/scripts/exregional_pre_post_stat.sh b/scripts/exregional_pre_post_stat.sh index a78ce9e2f5..33bbd75a41 100755 --- a/scripts/exregional_pre_post_stat.sh +++ b/scripts/exregional_pre_post_stat.sh @@ -52,17 +52,6 @@ This is the ex-script for the task that runs POST-UPP-STAT. #----------------------------------------------------------------------- # eval ${PRE_TASK_CMDS} -# -#----------------------------------------------------------------------- -# -# Move to the working directory -# -#----------------------------------------------------------------------- -# -DATA="${DATA}/tmp_PRE_POST_STAT" -rm_vrfy -r $DATA -mkdir_vrfy -p "$DATA" -cd_vrfy $DATA if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 2f4efc4a44..736a2fc115 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -97,10 +97,7 @@ print_info_msg "$VERBOSE" " Creating links in the INPUT subdirectory of the current run directory to the grid and (filtered) orography files ..." - # Create links to fix files in the FIXlam directory. - - cd_vrfy ${DATA}/INPUT # @@ -514,11 +511,19 @@ if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then python3 $USHdir/update_input_nml.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --run_dir "${DATA}" \ - --restart || print_err_msg_exit "\ -Call to function to update the FV3 input.nml file for restart for the -current cycle's (cdate) run directory (DATA) failed: + --restart + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to function to update the FV3 input.nml file for restart +for the current cycle's (cdate) run directory (DATA) failed: cdate = \"${CDATE}\" DATA = \"${DATA}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi # Check that restart files exist at restart_interval file_ids=( "coupler.res" "fv_core.res.nc" "fv_core.res.tile1.nc" "fv_srf_wnd.res.tile1.nc" "fv_tracer.res.tile1.nc" "phy_data.nc" "sfc_data.nc" ) @@ -579,12 +584,19 @@ if [ "${CPL_AQM}" = "TRUE" ]; then --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" \ --run-dir "${DATA}" \ - --init_concentrations "${init_concentrations}" \ - || print_err_msg_exit "\ -Call to function to create an aqm.rc file for the current + --init_concentrations "${init_concentrations}" + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to function to create an aqm.rc file for the current cycle's (cdate) run directory (DATA) failed: cdate = \"${CDATE}\" DATA = \"${DATA}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi + fi fi # @@ -603,11 +615,19 @@ python3 $USHdir/create_model_configure_file.py \ --run-dir "${DATA}" \ --sub-hourly-post "${SUB_HOURLY_POST}" \ --dt-subhourly-post-mnts "${DT_SUBHOURLY_POST_MNTS}" \ - --dt-atmos "${DT_ATMOS}" || print_err_msg_exit "\ -Call to function to create a model configuration file for the current -cycle's (cdate) run directory (DATA) failed: + --dt-atmos "${DT_ATMOS}" +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to function to create a model configuration file +for the current cycle's (cdate) run directory (DATA) failed: cdate = \"${CDATE}\" DATA = \"${DATA}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #----------------------------------------------------------------------- # @@ -618,14 +638,23 @@ cycle's (cdate) run directory (DATA) failed: # python3 $USHdir/create_diag_table_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run-dir "${DATA}" || print_err_msg_exit "\ -Call to function to create a diag table file for the current cycle's -(cdate) run directory (DATA) failed: + --run-dir "${DATA}" +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to function to create a diag table file for the current +cycle's (cdate) run directory (DATA) failed: DATA = \"${DATA}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #----------------------------------------------------------------------- # -# Pre-generate symlinks to forecast output in DATA +# Pre-generate symlink to forecast RESTART in DATA for early start of +# the next cycle # #----------------------------------------------------------------------- # @@ -643,11 +672,18 @@ fi # python3 $USHdir/create_nems_configure_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run-dir "${DATA}" \ - || print_err_msg_exit "\ -Call to function to create a NEMS configuration file for the current -cycle's (cdate) run directory (DATA) failed: + --run-dir "${DATA}" +export err=$? +if [ $err -ne 0 ]; then + message_txt="Call to function to create a NEMS configuration file for +the current cycle's (cdate) run directory (DATA) failed: DATA = \"${DATA}\"" + if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_exit "${message_txt}" + else + print_err_msg_exit "${message_txt}" + fi +fi # #----------------------------------------------------------------------- # @@ -660,9 +696,15 @@ cycle's (cdate) run directory (DATA) failed: #----------------------------------------------------------------------- # PREP_STEP -eval ${RUN_CMD_FCST} ${FV3_EXEC_FP} ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable to run FV3-LAM forecast returned with nonzero exit -code." +eval ${RUN_CMD_FCST} ${FV3_EXEC_FP} ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run FV3-LAM forecast returned with nonzero exit code." + fi +fi POST_STEP # #----------------------------------------------------------------------- @@ -677,15 +719,28 @@ POST_STEP # if [ "${CPL_AQM}" = "TRUE" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then - rm_vrfy -rf "${COMIN}/RESTART" + if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then + rm_vrfy -rf "${COMIN}/RESTART" + fi if [ "$(ls -A ${DATA}/RESTART)" ]; then - mv_vrfy ${DATA}/RESTART ${COMIN} - ln_vrfy -sf ${COMIN}/RESTART ${DATA}/RESTART + cp_vrfy -Rp ${DATA}/RESTART ${COMIN} fi fi - mv_vrfy ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} - + cp_vrfy -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} + + fhr_ct=0 + fhr=0 + while [ $fhr -le ${FCST_LEN_HRS} ]; do + fhr_ct=$(printf "%03d" $fhr) + source_dyn="${DATA}/dynf${fhr_ct}.nc" + source_phy="${DATA}/phyf${fhr_ct}.nc" + target_dyn="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc" + target_phy="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc" + [ -f ${source_dyn} ] && cp_vrfy -p ${source_dyn} ${target_dyn} + [ -f ${source_phy} ] && cp_vrfy -p ${source_phy} ${target_phy} + (( fhr=fhr+1 )) + done fi # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 3e1494822f..76a1196329 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -216,9 +216,16 @@ print_info_msg "$VERBOSE" " Starting post-processing for fhr = $fhr hr..." PREP_STEP -eval ${RUN_CMD_POST} ${EXECdir}/upp.x < itag ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable to run post for forecast hour $fhr returned with non- -zero exit code." +eval ${RUN_CMD_POST} ${EXECdir}/upp.x < itag ${REDIRECT_OUT_ERR} +export err=$? +if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then + err_chk +else + if [ $err -ne 0 ]; then + print_err_msg_exit "Call to executable to run post for forecast hour $fhr +returned with non-zero exit code." + fi +fi POST_STEP # #----------------------------------------------------------------------- @@ -287,18 +294,8 @@ for fid in "${fids[@]}"; do fi done -# Move phy and dyn files to COMIN only for AQM -if [ "${CPL_AQM}" = "TRUE" ]; then - mv_vrfy ${dyn_file} ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc - mv_vrfy ${phy_file} ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc -fi - rm_vrfy -rf ${DATA_FHR} -# Delete the forecast directory -if [ $RUN_ENVIR = "nco" ] && [ $KEEPDATA = "FALSE" ]; then - rm -rf $DATAFCST -fi # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py index f83073a735..4a2e6babe0 100755 --- a/tests/WE2E/run_WE2E_tests.py +++ b/tests/WE2E/run_WE2E_tests.py @@ -154,11 +154,11 @@ def run_we2e_tests(homedir, args) -> None: if run_envir == "nco": if 'nco' not in test_cfg: test_cfg['nco'] = dict() - test_cfg['nco'].update({"model_ver": "we2e"}) + test_cfg['nco'].update({"model_ver_default": "we2e"}) if args.opsroot: if 'nco' not in test_cfg: test_cfg['nco'] = dict() - test_cfg['nco'].update({"OPSROOT": args.opsroot}) + test_cfg['nco'].update({"OPSROOT_default": args.opsroot}) # if platform section was not in input config, initialize as empty dict if 'platform' not in test_cfg: test_cfg['platform'] = dict() diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml index 1f0dd8382f..789c5e9674 100644 --- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml @@ -17,7 +17,7 @@ workflow: FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 DO_REAL_TIME: false nco: - NET: aqm + NET_default: aqm rocoto: tasks: taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' diff --git a/ush/config.aqm.community.yaml b/ush/config.aqm.community.yaml index 55675bb8ce..30e391edf2 100644 --- a/ush/config.aqm.community.yaml +++ b/ush/config.aqm.community.yaml @@ -25,7 +25,7 @@ workflow: FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 DO_REAL_TIME: false nco: - NET: aqm + NET_default: aqm rocoto: tasks: taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' diff --git a/ush/config.aqm.nco.realtime.yaml b/ush/config.aqm.nco.realtime.yaml index 1a0da22677..f2299eacc9 100644 --- a/ush/config.aqm.nco.realtime.yaml +++ b/ush/config.aqm.nco.realtime.yaml @@ -29,12 +29,12 @@ workflow: COLDSTART: false WARMSTART_CYCLE_DIR: /path/to/restart/dir nco: - envir: prod - NET: aqm - model_ver: v7.0 - RUN: aqm_nco_aqmna13km - OPSROOT: /path/to/custom/opsroot - KEEPDATA: true + envir_default: prod + NET_default: aqm + model_ver_default: v7.0 + RUN_default: aqm + OPSROOT_default: /path/to/custom/opsroot + KEEPDATA_default: true rocoto: tasks: taskgroups: '{{ ["parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/aqm_post.yaml"]|include }}' diff --git a/ush/config.nco.yaml b/ush/config.nco.yaml index 0019cf989c..8339f5d9a6 100644 --- a/ush/config.nco.yaml +++ b/ush/config.nco.yaml @@ -17,9 +17,9 @@ workflow: VERBOSE: true COMPILER: intel nco: - NET: rrfs - model_ver: v1.0 - RUN: rrfs_test + NET_default: rrfs + model_ver_default: v1.0 + RUN_default: rrfs_test task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 7ea9dd501b..5a83c090e9 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -430,23 +430,6 @@ platform: #----------------------------------------------------------------------- # EXTRN_MDL_DATA_STORES: "" - # - #----------------------------------------------------------------------- - # - # COMINgfs: - # Path to the real-time GFS data - # - # COMINgefs: - # Path to the real-time GEFS data - # - # COMINairnow: - # Path to the real-time AIRNOW observation data - # - #----------------------------------------------------------------------- - # - COMINgfs: "" - COMINgefs: "" - COMINairnow: "/path/to/real/time/airnow/data" #----------------------------- # WORKFLOW config parameters #----------------------------- @@ -986,7 +969,7 @@ workflow: #----------------------------------------------------------------------- # # COLDSTART: - # Flag turning on/off warm start + # Flag turning on/off warm start of the first cycle # # WARMSTART_CYCLE_DIR: # Path to the directory where RESTART dir is located for warm start @@ -1004,7 +987,11 @@ nco: #----------------------------------------------------------------------- # # Set variables that are only used in NCO mode (i.e. when RUN_ENVIR is - # set to "nco"). Definitions: + # set to "nco"). All variables have the suffix [_default] meaning the default value. + # This is because they are supposed to be defined in job cards for the + # production using ecFlow. + # + # Definitions: # # envir, NET, model_ver, RUN: # Standard environment variables defined in the NCEP Central Operations WCOSS @@ -1042,43 +1029,34 @@ nco: # #----------------------------------------------------------------------- # - envir: "para" - NET: "rrfs" - RUN: "rrfs" - model_ver: "v1.0.0" + envir_default: "para" + NET_default: "rrfs" + RUN_default: "rrfs" + model_ver_default: "v1.0.0" - OPSROOT: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' - COMROOT: '{{ OPSROOT }}/com' - PACKAGEROOT: '{{ OPSROOT }}/packages' - DATAROOT: '{{ OPSROOT }}/tmp' - DCOMROOT: '{{ OPSROOT }}/dcom' - LOGBASEDIR: '{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}' - EXTROOT: '{{ OPSROOT }}/ext' - COMIN_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' - COMOUT_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' + OPSROOT_default: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' + COMROOT_default: '{{ OPSROOT_default }}/com' + DATAROOT_default: '{{ OPSROOT_default }}/tmp' + DCOMROOT_default: '{{ OPSROOT_default }}/dcom' + LOGBASEDIR_default: '{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}' + COMIN_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}' + COMOUT_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}' # # New additions from RRFS_dev1 # - NWGES: '{{ OPSROOT }}/nwges' + NWGES: '{{ OPSROOT_default }}/nwges' NWGES_BASEDIR: '{{ NWGES }}' - # - #----------------------------------------------------------------------- - # - # The following are also described in the NCO doc above - # - #----------------------------------------------------------------------- - # - DBNROOT: "" - SENDECF: false - SENDDBN: false - SENDDBN_NTC: false - SENDCOM: false - SENDWEB: false - KEEPDATA: true - MAILTO: "" - MAILCC: "" + DBNROOT_default: "" + SENDECF_default: false + SENDDBN_default: false + SENDDBN_NTC_default: false + SENDCOM_default: false + SENDWEB_default: false + KEEPDATA_default: true + MAILTO_default: "" + MAILCC_default: "" #---------------------------------- # GSI namelist parameters @@ -2529,13 +2507,13 @@ cpl_aqm_parm: # AQM_CONFIG_DIR: # Configuration directory for AQM # - # AQM_BIO_DIR: + # DCOMINbio: # Path to the directory containing AQM bio files # # AQM_BIO_FILE: # File name of AQM BIO file # - # AQM_DUST_DIR: + # DCOMINdust: # Path to the directory containing AQM dust file # # AQM_DUST_FILE_PREFIX: @@ -2544,7 +2522,7 @@ cpl_aqm_parm: # AQM_DUST_FILE_SUFFIX: # Suffix and extension of AQM dust file # - # AQM_CANOPY_DIR: + # DCOMINcanopy: # Path to the directory containing AQM canopy files # # AQM_CANOPY_FILE_PREFIX: @@ -2553,7 +2531,7 @@ cpl_aqm_parm: # AQM_CANOPY_FILE_SUFFIX: # Suffix and extension of AQM CANOPY file # - # AQM_FIRE_DIR: + # DCOMINfire: # Path to the directory containing AQM fire files # # AQM_FIRE_FILE_PREFIX: @@ -2574,13 +2552,13 @@ cpl_aqm_parm: # AQM_RC_PRODUCT_FREQUENCY: # Frequency of AQM output products # - # AQM_LBCS_DIR: + # DCOMINchem_lbc: # Path to the directory containing chemical LBC files # # AQM_LBCS_FILES: # File name of chemical LBCs # - # AQM_GEFS_DIR: + # DCOMINgefs: # Path to the directory containing GEFS aerosol LBC files # # AQM_GEFS_FILE_PREFIX: @@ -2612,11 +2590,19 @@ cpl_aqm_parm: # NEXUS_GFS_SFC_ARCHV_DIR: # Path to archive directory for gfs surface files on HPSS # - # PT_SRC_BASEDIR: + # DCOMINpt_src: # Parent directory containing point source files # - # AQM_AIRNOW_HIST_DIR: - # Path to the directory where the historical AIRNOW data are located + # DCOMINairnow: + # Path to the directory containing AIRNOW observation data + # + # COMINbicor: + # Path of reading in historical training data for biascorrection + # + # COMOUTbicor: + # Path to save the current cycle's model output and AirNow obs as + # training data for future use $COMINbicor and $COMOUTbicor can be + # distuigshed by the ${yyyy}${mm}$dd under the same location # #----------------------------------------------------------------------- # @@ -2630,19 +2616,26 @@ cpl_aqm_parm: DO_AQM_SAVE_AIRNOW_HIST: false DO_AQM_SAVE_FIRE: false + DCOMINbio_default: "" + DCOMINdust_default: "/path/to/dust/dir" + DCOMINcanopy_default: "/path/to/canopy/dir" + DCOMINfire_default: "" + DCOMINchem_lbcs_default: "" + DCOMINgefs_default: "" + DCOMINpt_src_default: "/path/to/point/source/base/directory" + DCOMINairnow_default: "/path/to/airnow/obaservation/data" + COMINbicor: "/path/to/historical/airnow/data/dir" + COMOUTbicor: "/path/to/historical/airnow/data/dir" + AQM_CONFIG_DIR: "" - AQM_BIO_DIR: "" AQM_BIO_FILE: "BEIS_SARC401.ncf" - AQM_DUST_DIR: "/path/to/dust/dir" AQM_DUST_FILE_PREFIX: "FENGSHA_p8_10km_inputs" AQM_DUST_FILE_SUFFIX: ".nc" - AQM_CANOPY_DIR: "/path/to/canopy/dir" AQM_CANOPY_FILE_PREFIX: "gfs.t12z.geo" AQM_CANOPY_FILE_SUFFIX: ".canopy_regrid.nc" - AQM_FIRE_DIR: "" AQM_FIRE_FILE_PREFIX: "GBBEPx_C401GRID.emissions_v003" AQM_FIRE_FILE_SUFFIX: ".nc" AQM_FIRE_FILE_OFFSET_HRS: 0 @@ -2652,10 +2645,8 @@ cpl_aqm_parm: AQM_RC_PRODUCT_FN: "aqm.prod.nc" AQM_RC_PRODUCT_FREQUENCY: "hourly" - AQM_LBCS_DIR: "" AQM_LBCS_FILES: "gfs_bndy_chen_.tile7.000.nc" - AQM_GEFS_DIR: "" AQM_GEFS_FILE_PREFIX: "geaer" AQM_GEFS_FILE_CYC: "" @@ -2667,10 +2658,6 @@ cpl_aqm_parm: NEXUS_GFS_SFC_DIR: "" NEXUS_GFS_SFC_ARCHV_DIR: "/NCEPPROD/hpssprod/runhistory" - PT_SRC_BASEDIR: "/path/to/point/source/base/directory" - - AQM_AIRNOW_HIST_DIR: "/path/to/historical/airnow/data/dir" - rocoto: attrs: "" cycledefs: "" diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 02d8d827e5..acf675a3b9 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -63,24 +63,24 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # # Set parameters in the aqm.rc file. # - aqm_rc_bio_file_fp=os.path.join(AQM_BIO_DIR, AQM_BIO_FILE) + aqm_rc_bio_file_fp=os.path.join(DCOMINbio, AQM_BIO_FILE) # Fire config aqm_rc_fire_file_fp=os.path.join( - COMINext, + COMIN, "FIRE_EMISSION", f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" ) # Dust config aqm_rc_dust_file_fp=os.path.join( - AQM_DUST_DIR, + DCOMINdust, f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", ) # Canopy config aqm_rc_canopy_file_fp=os.path.join( - AQM_CANOPY_DIR, + DCOMINcanopy, PREDEF_GRID_NAME, f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}", ) @@ -101,7 +101,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): "aqm_config_dir": AQM_CONFIG_DIR, "init_concentrations": init_concentrations, "aqm_rc_bio_file_fp": aqm_rc_bio_file_fp, - "aqm_bio_dir": AQM_BIO_DIR, + "dcominbio": DCOMINbio, "aqm_rc_fire_file_fp": aqm_rc_fire_file_fp, "aqm_rc_fire_frequency": AQM_RC_FIRE_FREQUENCY, "aqm_rc_dust_file_fp": aqm_rc_dust_file_fp, diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index 4c0ce2ed60..e243f31b37 100644 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -4,6 +4,7 @@ #----------------------------------------------------------------------- # # If requested to share data with next task, override jobid +# When an argument exists with this script, a shared job id will be created. # #----------------------------------------------------------------------- # @@ -12,6 +13,77 @@ if [ $# -ne 0 ]; then export pid=$share_pid export jobid=${job}.${pid} fi + +# +#----------------------------------------------------------------------- +# +# Set NCO standard environment variables +# +#----------------------------------------------------------------------- +# +export envir="${envir:-${envir_default}}" +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" +export model_ver="${model_ver:-${model_ver_default}}" +export COMROOT="${COMROOT:-${COMROOT_default}}" +export DATAROOT="${DATAROOT:-${DATAROOT_default}}" +export DCOMROOT="${DCOMROOT:-${DCOMROOT_default}}" +export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_default}}" + +export DBNROOT="${DBNROOT:-${DBNROOT_default}}" +export SENDECF="${SENDECF:-${SENDECF_default}}" +export SENDDBN="${SENDDBN:-${SENDDBN_default}}" +export SENDDBN_NTC="${SENDDBN_NTC:-${SENDDBN_NTC_default}}" +export SENDCOM="${SENDCOM:-${SENDCOM_default}}" +export SENDWEB="${SENDWEB:-${SENDWEB_default}}" +export KEEPDATA="${KEEPDATA:-${KEEPDATA_default}}" +export MAILTO="${MAILTO:-${MAILTO_default}}" +export MAILCC="${MAILCC:-${MAILCC_default}}" + +if [ "${RUN_ENVIR}" = "nco" ]; then + if [ "${MACHINE}" = "WCOSS2" ]; then + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" + export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" + export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" + export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}" + else + export COMIN="${COMIN_BASEDIR}/${RUN}.${PDY}/${cyc}" + export COMOUT="${COMOUT_BASEDIR}/${RUN}.${PDY}/${cyc}" + export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" + fi +else + export COMIN="${COMIN_BASEDIR}/${PDY}${cyc}" + export COMOUT="${COMOUT_BASEDIR}/${PDY}${cyc}" + export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" +fi +export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}" + +export DCOMINbio="${DCOMINbio:-${DCOMINbio_default}}" +export DCOMINdust="${DCOMINdust:-${DCOMINdust_default}}" +export DCOMINcanopy="${DCOMINcanopy:-${DCOMINcanopy_default}}" +export DCOMINfire="${DCOMINfire:-${DCOMINfire_default}}" +export DCOMINchem_lbcs="${DCOMINchem_lbcs:-${DCOMINchem_lbcs_default}}" +export DCOMINgefs="${DCOMINgefs:-${DCOMINgefs_default}}" +export DCOMINpt_src="${DCOMINpt_src:-${DCOMINpt_src_default}}" +export DCOMINairnow="${DCOMINairnow:-${DCOMINairnow_default}}" + +# +#----------------------------------------------------------------------- +# +# Change YES/NO (NCO standards; job card) to TRUE/FALSE (workflow standards) +# for NCO environment variables +# +#----------------------------------------------------------------------- +# +export KEEPDATA=$(boolify "${KEEPDATA}") +export SENDCOM=$(boolify "${SENDCOM}") +export SENDDBN=$(boolify "${SENDDBN}") +export SENDDBN_NTC=$(boolify "${SENDDBN_NTC}") +export SENDECF=$(boolify "${SENDECF}") +export SENDWEB=$(boolify "${SENDWEB}") + # #----------------------------------------------------------------------- # @@ -72,6 +144,7 @@ if [ "${RUN_ENVIR}" = "nco" ]; then export pgmerr="${DATA}/errfile" export REDIRECT_OUT_ERR=">>${pgmout} 2>${pgmerr}" export pgmout_lines=1 + export pgmerr_lines=1 function PREP_STEP() { export pgm="$(basename ${0})" @@ -94,6 +167,11 @@ if [ "${RUN_ENVIR}" = "nco" ]; then pgmout_line=$( wc -l $pgmout ) pgmout_lines=$((pgmout_lines + 1)) fi + if [ -f $pgmerr ]; then + tail -n +${pgmerr_lines} $pgmerr + pgmerr_line=$( wc -l $pgmerr ) + pgmerr_lines=$((pgmerr_lines + 1)) + fi } else export pgmout= @@ -108,24 +186,6 @@ else fi export -f PREP_STEP export -f POST_STEP -# -#----------------------------------------------------------------------- -# -# Set COMIN / COMOUT -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "nco" ]; then - export COMIN="${COMIN_BASEDIR}/${RUN}.${PDY}/${cyc}" - export COMOUT="${COMOUT_BASEDIR}/${RUN}.${PDY}/${cyc}" - export COMINext="${EXTROOT}/${RUN}.${PDY}/${cyc}" -else - export COMIN="${COMIN_BASEDIR}/${PDY}${cyc}" - export COMOUT="${COMOUT_BASEDIR}/${PDY}${cyc}" - export COMINext="${EXTROOT}/${PDY}${cyc}" -fi -export COMIN_PDY="${COMIN_BASEDIR}/${RUN}.${PDY}" -export COMIN_PDYm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" # #----------------------------------------------------------------------- @@ -135,7 +195,7 @@ export COMIN_PDYm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then __EXPTLOG=${EXPTDIR}/log mkdir_vrfy -p ${__EXPTLOG} for i in ${LOGDIR}/*.${WORKFLOW_ID}.log; do @@ -147,15 +207,27 @@ fi #----------------------------------------------------------------------- # # Add a postamble function +# When an argument exists, the working directory will not be removed +# even with KEEPDATA: false. +# Only when an argument is TRUE, the existing working directories in +# the tmp directory will be removed. # #----------------------------------------------------------------------- # function job_postamble() { # Remove temp directory - if [ "${RUN_ENVIR}" = "nco" ] && [ $# -eq 0 ]; then - cd ${DATAROOT} - [[ $KEEPDATA = "FALSE" ]] && rm -rf $DATA + if [ "${RUN_ENVIR}" = "nco" ] && [ "${KEEPDATA}" = "FALSE" ]; then + cd ${DATAROOT} + # Remove current data directory + if [ $# -eq 0 ]; then + rm -rf $DATA + # Remove all current and shared data directories + elif [ "$1" = "TRUE" ]; then + rm -rf $DATA + share_pid="${WORKFLOW_ID}_${PDY}${cyc}" + rm -rf *${share_pid} + fi fi # Print exit message diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index a3b8aaca5a..fcd64d6b64 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -100,7 +100,9 @@ set -u # default_modules_dir="$HOMEdir/modulefiles" machine=$(echo_lowercase $MACHINE) -source "${HOMEdir}/etc/lmod-setup.sh" ${machine} +if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then + source "${HOMEdir}/etc/lmod-setup.sh" ${machine} +fi module use "${default_modules_dir}" if [ "${machine}" != "wcoss2" ]; then @@ -177,8 +179,6 @@ elif [ -f ${modules_dir}/python_srw.lua ] ; then modules_dir = \"${modules_dir}\"" fi - - module list # Modules that use conda and need an environment activated will set the @@ -210,7 +210,13 @@ print_info_msg "$VERBOSE" " Launching J-job (jjob_fp) for task \"${task_name}\" ... jjob_fp = \"${jjob_fp}\" " -exec "${jjob_fp}" + +if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then + /bin/bash "${jjob_fp}" +else + exec "${jjob_fp}" +fi + # #----------------------------------------------------------------------- # @@ -221,4 +227,3 @@ exec "${jjob_fp}" # { restore_shell_opts; } > /dev/null 2>&1 - diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 7604301fa3..121e3a91ef 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -45,12 +45,13 @@ task_get_da_obs: cpl_aqm_parm: AQM_CONFIG_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/aqm/epa/data - AQM_BIO_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/aqm/bio - AQM_DUST_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/FENGSHA - AQM_CANOPY_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/canopy - AQM_FIRE_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/RAVE_fire - AQM_LBCS_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 - AQM_GEFS_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GEFS_DATA + DCOMINbio_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/aqm/bio + DCOMINdust_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/FENGSHA + DCOMINcanopy_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/canopy + DCOMINfire_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/RAVE_fire + DCOMINchem_lbcs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 + DCOMINgefs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GEFS_DATA + DCOMINpt_src_default: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus/NEI2016v1/v2023-01-PT NEXUS_INPUT_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/emissions/nexus NEXUS_FIX_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/nexus/fix NEXUS_GFS_SFC_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GFS_DATA diff --git a/ush/machine/wcoss2.yaml b/ush/machine/wcoss2.yaml index 2703451bf1..6da06b7288 100644 --- a/ush/machine/wcoss2.yaml +++ b/ush/machine/wcoss2.yaml @@ -48,17 +48,19 @@ data: HRRR: compath.py ${envir}/hrrr/${hrrr_ver}/hrrr.${PDYext}/conus cpl_aqm_parm: AQM_CONFIG_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/epa/data - AQM_BIO_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/bio - AQM_DUST_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/FENGSHA - AQM_CANOPY_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/canopy - AQM_FIRE_DIR: /lfs/h2/emc/physics/noscrub/kai.wang/RAVE_fire/RAVE_NA_NRT - AQM_LBCS_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 - AQM_GEFS_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GEFS_DATA - AQM_AIRNOW_HIST_DIR: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0 + DCOMINbio_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/bio + DCOMINdust_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/FENGSHA + DCOMINcanopy_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/canopy + DCOMINfire_default: /lfs/h1/ops/dev/dcom + DCOMINchem_lbcs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 + DCOMINgefs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GEFS_DATA + DCOMINpt_src_default: /lfs/h2/emc/physics/noscrub/Youhua.Tang/nei2016v1-pt/v2023-01-PT + DCOMINairnow_default: /lfs/h1/ops/prod/dcom + COMINbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81 + COMOUTbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81 NEXUS_INPUT_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus_emissions NEXUS_FIX_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus/fix NEXUS_GFS_SFC_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GFS_DATA - PT_SRC_BASEDIR: /lfs/h2/emc/physics/noscrub/Youhua.Tang/nei2016v1-pt/v2023-01-PT rocoto: tasks: diff --git a/ush/setup.py b/ush/setup.py index 41b0e0bb92..637e3b541c 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -1160,14 +1160,12 @@ def get_location(xcs, fmt, expt_cfg): # running in community mode, we set these paths to the experiment # directory. nco_vars = [ - "opsroot", - "comroot", - "packageroot", - "dataroot", - "dcomroot", + "opsroot_default", + "comroot_default", + "dataroot_default", + "dcomroot_default", "comin_basedir", "comout_basedir", - "extroot", ] nco_config = expt_config["nco"] @@ -1177,30 +1175,28 @@ def get_location(xcs, fmt, expt_cfg): nco_config[nco_var.upper()] = exptdir # Use env variables for NCO variables and create NCO directories - if run_envir == "nco": - + workflow_manager = expt_config["platform"].get("WORKFLOW_MANAGER") + if run_envir == "nco" and workflow_manager == "rocoto": for nco_var in nco_vars: envar = os.environ.get(nco_var) if envar is not None: nco_config[nco_var.upper()] = envar - mkdir_vrfy(f' -p "{nco_config.get("OPSROOT")}"') - mkdir_vrfy(f' -p "{nco_config.get("COMROOT")}"') - mkdir_vrfy(f' -p "{nco_config.get("PACKAGEROOT")}"') - mkdir_vrfy(f' -p "{nco_config.get("DATAROOT")}"') - mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT")}"') - mkdir_vrfy(f' -p "{nco_config.get("EXTROOT")}"') + mkdir_vrfy(f' -p "{nco_config.get("OPSROOT_default")}"') + mkdir_vrfy(f' -p "{nco_config.get("COMROOT_default")}"') + mkdir_vrfy(f' -p "{nco_config.get("DATAROOT_default")}"') + mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT_default")}"') # Update the rocoto string for the fcst output location if # running an ensemble in nco mode if global_sect["DO_ENSEMBLE"]: rocoto_config["entities"]["FCST_DIR"] = \ - "{{ nco.DATAROOT }}/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" + "{{ nco.DATAROOT_default }}/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" - if nco_config["DBNROOT"]: - mkdir_vrfy(f' -p "{nco_config["DBNROOT"]}"') + if nco_config["DBNROOT_default"] and workflow_manager == "rocoto": + mkdir_vrfy(f' -p "{nco_config["DBNROOT_default"]}"') - mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR")}"') + mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR_default")}"') # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml index 83563c0f62..0af5f3c4ac 100644 --- a/ush/valid_param_vals.yaml +++ b/ush/valid_param_vals.yaml @@ -7,7 +7,7 @@ valid_vals_DEBUG: [True, False] valid_vals_MACHINE: ["HERA", "WCOSS2", "ORION", "JET", "ODIN", "CHEYENNE", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA"] valid_vals_SCHED: ["slurm", "pbspro", "lsf", "lsfcray", "none"] valid_vals_FCST_MODEL: ["ufs-weather-model"] -valid_vals_WORKFLOW_MANAGER: ["rocoto", "none"] +valid_vals_WORKFLOW_MANAGER: ["rocoto", "ecflow", "none"] valid_vals_PREDEF_GRID_NAME: [ "RRFS_CONUS_25km", "RRFS_CONUS_13km",