diff --git a/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/jobs/JGFS_ATMOS_CYCLONE_GENESIS index de130bf9aa..a80bcc1153 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_GENESIS +++ b/jobs/JGFS_ATMOS_CYCLONE_GENESIS @@ -3,11 +3,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis" -c "base genesis" -# Hack to temporary skip this as the tracker has not been build -# on Hercules Rocky 9 yet -# TODO: Remove this after tracker has been built for Rocky 9 #2639 -if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi - ############################################## # Set variables used in the exglobal script ############################################## diff --git a/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/jobs/JGFS_ATMOS_CYCLONE_TRACKER index 067de2c4aa..24fe33f8ca 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_TRACKER +++ b/jobs/JGFS_ATMOS_CYCLONE_TRACKER @@ -3,11 +3,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "tracker" -c "base tracker" -# Hack to temporary skip this as the tracker has not been build -# on Hercules Rocky 9 yet -# TODO: Remove this after tracker has been built for Rocky 9 #2639 -if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi - export COMPONENT="atmos" diff --git a/jobs/JGLOBAL_ATMOS_VMINMON b/jobs/JGLOBAL_ATMOS_VMINMON index 8ad9b91792..26cdc73c95 100755 --- a/jobs/JGLOBAL_ATMOS_VMINMON +++ b/jobs/JGLOBAL_ATMOS_VMINMON @@ -17,16 +17,18 @@ export gcyc=${GDATE:8:2} ############################################# # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_MINMON -YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ + COMOUT_ATMOS_MINMON:COM_ATMOS_MINMON_TMPL -export gsistat="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.gsistat" -export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}} -export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_MINMON_PREV}} +YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COMIN_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL -if [[ ! -d ${M_TANKverf} ]]; then mkdir -p -m 775 "${M_TANKverf}" ; fi -if [[ ! -d ${M_TANKverfM1} ]]; then mkdir -p -m 775 "${M_TANKverfM1}" ; fi +export gsistat="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.gsistat" +export M_TANKverf=${M_TANKverf:-${COMOUT_ATMOS_MINMON}} +export M_TANKverfM1=${M_TANKverfM1:-${COMIN_ATMOS_MINMON_PREV}} + +if [[ ! -d ${M_TANKverf} ]]; then mkdir -p "${M_TANKverf}" ; fi ######################################################## # Execute the script. diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 4a5a7198af..2ca5be9bfb 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -493,4 +493,11 @@ if [[ "${machine}" =~ "PW" ]]; then export DO_WAVE="NO" fi +# The tracker and genesis are not installed on Orion/Hercules yet; this requires spack-stack builds of the package. +# TODO: we should place these in workflow/hosts/[orion|hercules].yaml. +if [[ "${machine}" == "ORION" || "${machine}" == "HERCULES" ]]; then + export DO_TRACKER="NO" + export DO_GENESIS="NO" +fi + echo "END: config.base" diff --git a/parm/config/gfs/config.prepsnowobs b/parm/config/gfs/config.prepsnowobs index 60ca16ce9e..20bdd89ddf 100644 --- a/parm/config/gfs/config.prepsnowobs +++ b/parm/config/gfs/config.prepsnowobs @@ -8,11 +8,8 @@ echo "BEGIN: config.prepsnowobs" # Get task specific resources . "${EXPDIR}/config.resources" prepsnowobs -export GTS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_gts.yaml.j2" export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" -export BUFR2IODAX="${EXECgfs}/bufr2ioda.x" - export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" diff --git a/parm/config/gfs/config.resources.HERA b/parm/config/gfs/config.resources.HERA index e79d4c5b0a..d1b09fcc32 100644 --- a/parm/config/gfs/config.resources.HERA +++ b/parm/config/gfs/config.resources.HERA @@ -5,8 +5,9 @@ case ${step} in "anal") if [[ "${CASE}" == "C384" ]]; then - export ntasks=270 - export threads_per_task_anal=8 + export ntasks_gdas=270 + export ntasks_gfs=270 + export threads_per_task=8 export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) fi ;; @@ -26,6 +27,10 @@ case ${step} in "eupd") case ${CASE} in + "C768") + export ntasks=80 + export threads_per_task=20 + ;; "C384") export ntasks=80 ;; @@ -43,6 +48,13 @@ case ${step} in export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) ;; + "upp") + if (( "${CASE:1}" >= 768 )); then + # Run fewer tasks per node for memory + tasks_per_node=20 + fi + ;; + *) ;; esac diff --git a/parm/config/gfs/config.resources.S4 b/parm/config/gfs/config.resources.S4 index 1af64bf250..817494c7cd 100644 --- a/parm/config/gfs/config.resources.S4 +++ b/parm/config/gfs/config.resources.S4 @@ -32,7 +32,7 @@ case ${step} in *) ;; esac - export tasks_node=$(( max_tasks_per_node / threads_per_task )) + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) ;; "eobs") diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index a2984f190b..b1460dfa67 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -9,6 +9,7 @@ echo "BEGIN: config.snowanl" source "${EXPDIR}/config.resources" snowanl export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" +export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2" # Name of the JEDI executable and its yaml template export JEDIEXE="${EXECgfs}/gdas.x" diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index babbe1f2dd..6309c4073b 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -281,7 +281,7 @@ case "${fv3_res}" in export rf_cutoff=100.0 export fv_sg_adj=450 export WRITE_GROUP_GDAS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=15 export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 fi diff --git a/scripts/exglobal_prep_snow_obs.py b/scripts/exglobal_prep_snow_obs.py index a6a9070151..aa1eb1bb7d 100755 --- a/scripts/exglobal_prep_snow_obs.py +++ b/scripts/exglobal_prep_snow_obs.py @@ -20,6 +20,5 @@ # Instantiate the snow prepare task SnowAnl = SnowAnalysis(config) - SnowAnl.prepare_GTS() if SnowAnl.task_config.cyc == 0: SnowAnl.prepare_IMS() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index faa95efb18..7c1c181359 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit faa95efb18f0f52acab2cf09b17f78406f9b48b1 +Subproject commit 7c1c181359c2c1952bab3dc1c481bbdb361aa472 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd index 9382fd01c2..a6ea311e5c 160000 --- a/sorc/gsi_utils.fd +++ b/sorc/gsi_utils.fd @@ -1 +1 @@ -Subproject commit 9382fd01c2a626c8934c3f553d420a45de2b4dec +Subproject commit a6ea311e5c82369d255e3afdc99c1bce0c9a3014 diff --git a/ush/check_ice_netcdf.sh b/ush/check_ice_netcdf.sh deleted file mode 100755 index 9d2d945a8b..0000000000 --- a/ush/check_ice_netcdf.sh +++ /dev/null @@ -1,43 +0,0 @@ -#! /usr/bin/env bash - -yyyy=${1?} -mm=${2?} -dd=${3?} -cyc=${4?} -fhr=${5?} -ROTDIR=${6?} -member=${7?} -FHOUT_ICE_GFS=${8?} - -fhri=$((10#${fhr})) - -#Will need to consider fhmin in the future to calculate the offset if we are to stick with this approach. -((offset = ( cyc ) % FHOUT_ICE_GFS)) - -if (( offset != 0 )); then - (( fhri = fhri - cyc )) - fhr3=$(printf %03i "${fhri}") - if (( fhri <= FHOUT_ICE_GFS )); then - (( interval = FHOUT_ICE_GFS - cyc )) - ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model/ice/history/gefs.ice.t${cyc}z.${interval}hr_avg.f${fhr3}.nc - else - ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr3}.nc - fi -else - ncfile=${ROTDIR}/gefs.${yyyy}${mm}${dd}/${cyc}/mem${member}/model/ice/history/gefs.ice.t${cyc}z.${FHOUT_ICE_GFS}hr_avg.f${fhr}.nc -fi - -#Check if netcdf file exists. -if [[ ! -f "${ncfile}" ]];then - rc=1 -else - #Check if netcdf file is older than 2 minutes. - ncage="$(find "${ncfile}" -mmin -2)" - if [[ -n "${ncage}" ]]; then - rc=1 - else - rc=0 - fi -fi - -exit "${rc}" diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 79d5e7e24a..04ee78db28 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -652,15 +652,8 @@ CICE_predet(){ # CICE does not have a concept of high frequency output like FV3 # Convert output settings into an explicit list for CICE - if (( $(( ( cyc + FHMIN ) % FHOUT_ICE )) == 0 )); then - # shellcheck disable=SC2312 - mapfile -t CICE_OUTPUT_FH < <(seq "${FHMIN}" "${FHOUT_ICE}" "${FHMAX}") || exit 10 - else - CICE_OUTPUT_FH=("${FHMIN}") - # shellcheck disable=SC2312 - mapfile -t -O "${#CICE_OUTPUT_FH[@]}" CICE_OUTPUT_FH < <(seq "$(( FHMIN + $(( ( cyc + FHMIN ) % FHOUT_ICE )) ))" "${FHOUT_ICE}" "${FHMAX}") || exit 10 - CICE_OUTPUT_FH+=("${FHMAX}") - fi + # shellcheck disable=SC2312 + mapfile -t CICE_OUTPUT_FH < <(seq "${FHMIN}" "${FHOUT_ICE}" "${FHMAX}") || exit 10 # Fix files ${NCP} "${FIXgfs}/cice/${ICERES}/${CICE_GRID}" "${DATA}/" diff --git a/ush/python/pygfs/task/oceanice_products.py b/ush/python/pygfs/task/oceanice_products.py index 98b57ae801..39ec53b100 100644 --- a/ush/python/pygfs/task/oceanice_products.py +++ b/ush/python/pygfs/task/oceanice_products.py @@ -58,22 +58,10 @@ def __init__(self, config: Dict[str, Any]) -> None: valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H")) + forecast_hour = self.task_config.FORECAST_HOUR if self.task_config.COMPONENT == 'ice': - offset = int(self.task_config.current_cycle.strftime("%H")) % self.task_config.FHOUT_ICE_GFS - # For CICE cases where offset is not 0, forecast_hour needs to be adjusted based on the offset. - # TODO: Consider FHMIN when calculating offset. - if offset != 0: - forecast_hour = self.task_config.FORECAST_HOUR - int(self.task_config.current_cycle.strftime("%H")) - # For the first forecast hour, the interval may be different from the intervals of subsequent forecast hours - if forecast_hour <= self.task_config.FHOUT_ICE_GFS: - interval = self.task_config.FHOUT_ICE_GFS - int(self.task_config.current_cycle.strftime("%H")) - else: - interval = self.task_config.FHOUT_ICE_GFS - else: - forecast_hour = self.task_config.FORECAST_HOUR - interval = self.task_config.FHOUT_ICE_GFS + interval = self.task_config.FHOUT_ICE_GFS if self.task_config.COMPONENT == 'ocean': - forecast_hour = self.task_config.FORECAST_HOUR interval = self.task_config.FHOUT_OCN_GFS # TODO: This is a bit of a hack, but it works for now diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 9656b00a8e..4b991d2b34 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -54,83 +54,6 @@ def __init__(self, config): # Extend task_config with local_dict self.task_config = AttrDict(**self.task_config, **local_dict) - @logit(logger) - def prepare_GTS(self) -> None: - """Prepare the GTS data for a global snow analysis - - This method will prepare GTS data for a global snow analysis using JEDI. - This includes: - - processing GTS bufr snow depth observation data to IODA format - - Parameters - ---------- - Analysis: parent class for GDAS task - - Returns - ---------- - None - """ - - # create a temporary dict of all keys needed in this method - localconf = AttrDict() - keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', - 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] - for key in keys: - localconf[key] = self.task_config[key] - - # Read and render the GTS_OBS_LIST yaml - logger.info(f"Reading {self.task_config.GTS_OBS_LIST}") - prep_gts_config = parse_j2yaml(self.task_config.GTS_OBS_LIST, localconf) - logger.debug(f"{self.task_config.GTS_OBS_LIST}:\n{pformat(prep_gts_config)}") - - # copy the GTS obs files from COM_OBS to DATA/obs - logger.info("Copying GTS obs for bufr2ioda.x") - FileHandler(prep_gts_config.gtsbufr).sync() - - logger.info("Link BUFR2IODAX into DATA/") - exe_src = self.task_config.BUFR2IODAX - exe_dest = os.path.join(localconf.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - - # Create executable instance - exe = Executable(self.task_config.BUFR2IODAX) - - def _gtsbufr2iodax(exe, yaml_file): - if not os.path.isfile(yaml_file): - logger.exception(f"FATAL ERROR: {yaml_file} not found") - raise FileNotFoundError(yaml_file) - - logger.info(f"Executing {exe}") - try: - exe(yaml_file) - except OSError: - raise OSError(f"Failed to execute {exe} {yaml_file}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exe} {yaml_file}") - - # Loop over entries in prep_gts_config.bufr2ioda keys - # 1. generate bufr2ioda YAML files - # 2. execute bufr2ioda.x - for name in prep_gts_config.bufr2ioda.keys(): - gts_yaml = os.path.join(self.task_config.DATA, f"bufr_{name}_snow.yaml") - logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}") - temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf) - save_as_yaml(temp_yaml, gts_yaml) - logger.info(f"Wrote bufr2ioda YAML to: {gts_yaml}") - - # execute BUFR2IODAX to convert {name} bufr data into IODA format - _gtsbufr2iodax(exe, gts_yaml) - - # Ensure the IODA snow depth GTS file is produced by the IODA converter - # If so, copy to COM_OBS/ - try: - FileHandler(prep_gts_config.gtsioda).sync() - except OSError as err: - logger.exception(f"{self.task_config.BUFR2IODAX} failed to produce GTS ioda files") - raise OSError(err) - @logit(logger) def prepare_IMS(self) -> None: """Prepare the IMS data for a global snow analysis @@ -248,7 +171,7 @@ def initialize(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() - keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + keys = ['PARMgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] for key in keys: localconf[key] = self.task_config[key] @@ -268,6 +191,11 @@ def initialize(self) -> None: logger.info("Staging ensemble backgrounds") FileHandler(self.get_ens_bkg_dict(localconf)).sync() + # stage GTS bufr2ioda mapping YAML files + logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_SNOW_STAGE_YAML}") + gts_mapping_list = parse_j2yaml(self.task_config.GTS_SNOW_STAGE_YAML, localconf) + FileHandler(gts_mapping_list).sync() + # Write out letkfoi YAML file save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index 8a4f148f24..3b72677a58 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -213,21 +213,11 @@ def _atmosoceaniceprod(self, component: str): history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl], {'MEMDIR': 'mem#member#'}) deps = [] data = f'{history_path}/{history_file_tmpl}' - if component in ['ocean']: - dep_dict = {'type': 'data', 'data': data, 'age': 120} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': 'fcst_mem#member#'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps, dep_condition='or') - elif component in ['ice']: - command = f"{self.HOMEgfs}/ush/check_ice_netcdf.sh @Y @m @d @H #fhr# &ROTDIR; #member# {fhout_ice_gfs}" - dep_dict = {'type': 'sh', 'command': command} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - else: - dep_dict = {'type': 'data', 'data': data, 'age': 120} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'fcst_mem#member#'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps, dep_condition='or') postenvars = self.envars.copy() postenvar_dict = {'ENSMEM': '#member#',