Skip to content

Commit

Permalink
Merge branch 'NOAA-EMC:develop' into feature/stage_coldatm_warmocnice
Browse files Browse the repository at this point in the history
  • Loading branch information
KateFriedman-NOAA authored Sep 23, 2024
2 parents 7fa1d85 + fe57bb4 commit a4b8624
Show file tree
Hide file tree
Showing 17 changed files with 51 additions and 187 deletions.
5 changes: 0 additions & 5 deletions jobs/JGFS_ATMOS_CYCLONE_GENESIS
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@
source "${HOMEgfs}/ush/preamble.sh"
source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis" -c "base genesis"

# Hack to temporary skip this as the tracker has not been build
# on Hercules Rocky 9 yet
# TODO: Remove this after tracker has been built for Rocky 9 #2639
if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi

##############################################
# Set variables used in the exglobal script
##############################################
Expand Down
5 changes: 0 additions & 5 deletions jobs/JGFS_ATMOS_CYCLONE_TRACKER
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@
source "${HOMEgfs}/ush/preamble.sh"
source "${HOMEgfs}/ush/jjob_header.sh" -e "tracker" -c "base tracker"

# Hack to temporary skip this as the tracker has not been build
# on Hercules Rocky 9 yet
# TODO: Remove this after tracker has been built for Rocky 9 #2639
if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi

export COMPONENT="atmos"


Expand Down
18 changes: 10 additions & 8 deletions jobs/JGLOBAL_ATMOS_VMINMON
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,18 @@ export gcyc=${GDATE:8:2}
#############################################
# TANKverf - WHERE OUTPUT DATA WILL RESIDE
#############################################
YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS
YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_MINMON
YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL
YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
COMIN_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \
COMOUT_ATMOS_MINMON:COM_ATMOS_MINMON_TMPL

export gsistat="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.gsistat"
export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}}
export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_MINMON_PREV}}
YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \
COMIN_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL

if [[ ! -d ${M_TANKverf} ]]; then mkdir -p -m 775 "${M_TANKverf}" ; fi
if [[ ! -d ${M_TANKverfM1} ]]; then mkdir -p -m 775 "${M_TANKverfM1}" ; fi
export gsistat="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.gsistat"
export M_TANKverf=${M_TANKverf:-${COMOUT_ATMOS_MINMON}}
export M_TANKverfM1=${M_TANKverfM1:-${COMIN_ATMOS_MINMON_PREV}}

if [[ ! -d ${M_TANKverf} ]]; then mkdir -p "${M_TANKverf}" ; fi

########################################################
# Execute the script.
Expand Down
7 changes: 7 additions & 0 deletions parm/config/gfs/config.base
Original file line number Diff line number Diff line change
Expand Up @@ -493,4 +493,11 @@ if [[ "${machine}" =~ "PW" ]]; then
export DO_WAVE="NO"
fi

# The tracker and genesis are not installed on Orion/Hercules yet; this requires spack-stack builds of the package.
# TODO: we should place these in workflow/hosts/[orion|hercules].yaml.
if [[ "${machine}" == "ORION" || "${machine}" == "HERCULES" ]]; then
export DO_TRACKER="NO"
export DO_GENESIS="NO"
fi

echo "END: config.base"
3 changes: 0 additions & 3 deletions parm/config/gfs/config.prepsnowobs
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,8 @@ echo "BEGIN: config.prepsnowobs"
# Get task specific resources
. "${EXPDIR}/config.resources" prepsnowobs

export GTS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_gts.yaml.j2"
export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2"

export BUFR2IODAX="${EXECgfs}/bufr2ioda.x"

export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe"
export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2"

Expand Down
16 changes: 14 additions & 2 deletions parm/config/gfs/config.resources.HERA
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
case ${step} in
"anal")
if [[ "${CASE}" == "C384" ]]; then
export ntasks=270
export threads_per_task_anal=8
export ntasks_gdas=270
export ntasks_gfs=270
export threads_per_task=8
export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
fi
;;
Expand All @@ -26,6 +27,10 @@ case ${step} in

"eupd")
case ${CASE} in
"C768")
export ntasks=80
export threads_per_task=20
;;
"C384")
export ntasks=80
;;
Expand All @@ -43,6 +48,13 @@ case ${step} in
export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
;;

"upp")
if (( "${CASE:1}" >= 768 )); then
# Run fewer tasks per node for memory
tasks_per_node=20
fi
;;

*)
;;
esac
2 changes: 1 addition & 1 deletion parm/config/gfs/config.resources.S4
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ case ${step} in
*)
;;
esac
export tasks_node=$(( max_tasks_per_node / threads_per_task ))
export tasks_per_node=$(( max_tasks_per_node / threads_per_task ))
;;

"eobs")
Expand Down
1 change: 1 addition & 0 deletions parm/config/gfs/config.snowanl
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ echo "BEGIN: config.snowanl"
source "${EXPDIR}/config.resources" snowanl

export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2"
export GTS_SNOW_STAGE_YAML="${PARMgfs}/gdas/snow/obs/config/bufr2ioda_mapping.yaml.j2"

# Name of the JEDI executable and its yaml template
export JEDIEXE="${EXECgfs}/gdas.x"
Expand Down
2 changes: 1 addition & 1 deletion parm/config/gfs/config.ufs
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ case "${fv3_res}" in
export rf_cutoff=100.0
export fv_sg_adj=450
export WRITE_GROUP_GDAS=2
export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10
export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=15
export WRITE_GROUP_GFS=4
export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2
fi
Expand Down
1 change: 0 additions & 1 deletion scripts/exglobal_prep_snow_obs.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,5 @@

# Instantiate the snow prepare task
SnowAnl = SnowAnalysis(config)
SnowAnl.prepare_GTS()
if SnowAnl.task_config.cyc == 0:
SnowAnl.prepare_IMS()
43 changes: 0 additions & 43 deletions ush/check_ice_netcdf.sh

This file was deleted.

11 changes: 2 additions & 9 deletions ush/forecast_predet.sh
Original file line number Diff line number Diff line change
Expand Up @@ -652,15 +652,8 @@ CICE_predet(){

# CICE does not have a concept of high frequency output like FV3
# Convert output settings into an explicit list for CICE
if (( $(( ( cyc + FHMIN ) % FHOUT_ICE )) == 0 )); then
# shellcheck disable=SC2312
mapfile -t CICE_OUTPUT_FH < <(seq "${FHMIN}" "${FHOUT_ICE}" "${FHMAX}") || exit 10
else
CICE_OUTPUT_FH=("${FHMIN}")
# shellcheck disable=SC2312
mapfile -t -O "${#CICE_OUTPUT_FH[@]}" CICE_OUTPUT_FH < <(seq "$(( FHMIN + $(( ( cyc + FHMIN ) % FHOUT_ICE )) ))" "${FHOUT_ICE}" "${FHMAX}") || exit 10
CICE_OUTPUT_FH+=("${FHMAX}")
fi
# shellcheck disable=SC2312
mapfile -t CICE_OUTPUT_FH < <(seq "${FHMIN}" "${FHOUT_ICE}" "${FHMAX}") || exit 10

# Fix files
${NCP} "${FIXgfs}/cice/${ICERES}/${CICE_GRID}" "${DATA}/"
Expand Down
16 changes: 2 additions & 14 deletions ush/python/pygfs/task/oceanice_products.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,22 +58,10 @@ def __init__(self, config: Dict[str, Any]) -> None:

valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H"))

forecast_hour = self.task_config.FORECAST_HOUR
if self.task_config.COMPONENT == 'ice':
offset = int(self.task_config.current_cycle.strftime("%H")) % self.task_config.FHOUT_ICE_GFS
# For CICE cases where offset is not 0, forecast_hour needs to be adjusted based on the offset.
# TODO: Consider FHMIN when calculating offset.
if offset != 0:
forecast_hour = self.task_config.FORECAST_HOUR - int(self.task_config.current_cycle.strftime("%H"))
# For the first forecast hour, the interval may be different from the intervals of subsequent forecast hours
if forecast_hour <= self.task_config.FHOUT_ICE_GFS:
interval = self.task_config.FHOUT_ICE_GFS - int(self.task_config.current_cycle.strftime("%H"))
else:
interval = self.task_config.FHOUT_ICE_GFS
else:
forecast_hour = self.task_config.FORECAST_HOUR
interval = self.task_config.FHOUT_ICE_GFS
interval = self.task_config.FHOUT_ICE_GFS
if self.task_config.COMPONENT == 'ocean':
forecast_hour = self.task_config.FORECAST_HOUR
interval = self.task_config.FHOUT_OCN_GFS

# TODO: This is a bit of a hack, but it works for now
Expand Down
84 changes: 6 additions & 78 deletions ush/python/pygfs/task/snow_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,83 +54,6 @@ def __init__(self, config):
# Extend task_config with local_dict
self.task_config = AttrDict(**self.task_config, **local_dict)

@logit(logger)
def prepare_GTS(self) -> None:
"""Prepare the GTS data for a global snow analysis
This method will prepare GTS data for a global snow analysis using JEDI.
This includes:
- processing GTS bufr snow depth observation data to IODA format
Parameters
----------
Analysis: parent class for GDAS task
Returns
----------
None
"""

# create a temporary dict of all keys needed in this method
localconf = AttrDict()
keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV',
'OPREFIX', 'CASE', 'OCNRES', 'ntiles']
for key in keys:
localconf[key] = self.task_config[key]

# Read and render the GTS_OBS_LIST yaml
logger.info(f"Reading {self.task_config.GTS_OBS_LIST}")
prep_gts_config = parse_j2yaml(self.task_config.GTS_OBS_LIST, localconf)
logger.debug(f"{self.task_config.GTS_OBS_LIST}:\n{pformat(prep_gts_config)}")

# copy the GTS obs files from COM_OBS to DATA/obs
logger.info("Copying GTS obs for bufr2ioda.x")
FileHandler(prep_gts_config.gtsbufr).sync()

logger.info("Link BUFR2IODAX into DATA/")
exe_src = self.task_config.BUFR2IODAX
exe_dest = os.path.join(localconf.DATA, os.path.basename(exe_src))
if os.path.exists(exe_dest):
rm_p(exe_dest)
os.symlink(exe_src, exe_dest)

# Create executable instance
exe = Executable(self.task_config.BUFR2IODAX)

def _gtsbufr2iodax(exe, yaml_file):
if not os.path.isfile(yaml_file):
logger.exception(f"FATAL ERROR: {yaml_file} not found")
raise FileNotFoundError(yaml_file)

logger.info(f"Executing {exe}")
try:
exe(yaml_file)
except OSError:
raise OSError(f"Failed to execute {exe} {yaml_file}")
except Exception:
raise WorkflowException(f"An error occured during execution of {exe} {yaml_file}")

# Loop over entries in prep_gts_config.bufr2ioda keys
# 1. generate bufr2ioda YAML files
# 2. execute bufr2ioda.x
for name in prep_gts_config.bufr2ioda.keys():
gts_yaml = os.path.join(self.task_config.DATA, f"bufr_{name}_snow.yaml")
logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}")
temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf)
save_as_yaml(temp_yaml, gts_yaml)
logger.info(f"Wrote bufr2ioda YAML to: {gts_yaml}")

# execute BUFR2IODAX to convert {name} bufr data into IODA format
_gtsbufr2iodax(exe, gts_yaml)

# Ensure the IODA snow depth GTS file is produced by the IODA converter
# If so, copy to COM_OBS/
try:
FileHandler(prep_gts_config.gtsioda).sync()
except OSError as err:
logger.exception(f"{self.task_config.BUFR2IODAX} failed to produce GTS ioda files")
raise OSError(err)

@logit(logger)
def prepare_IMS(self) -> None:
"""Prepare the IMS data for a global snow analysis
Expand Down Expand Up @@ -248,7 +171,7 @@ def initialize(self) -> None:

# create a temporary dict of all keys needed in this method
localconf = AttrDict()
keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV',
keys = ['PARMgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV',
'OPREFIX', 'CASE', 'OCNRES', 'ntiles']
for key in keys:
localconf[key] = self.task_config[key]
Expand All @@ -268,6 +191,11 @@ def initialize(self) -> None:
logger.info("Staging ensemble backgrounds")
FileHandler(self.get_ens_bkg_dict(localconf)).sync()

# stage GTS bufr2ioda mapping YAML files
logger.info(f"Staging GTS bufr2ioda mapping YAML files from {self.task_config.GTS_SNOW_STAGE_YAML}")
gts_mapping_list = parse_j2yaml(self.task_config.GTS_SNOW_STAGE_YAML, localconf)
FileHandler(gts_mapping_list).sync()

# Write out letkfoi YAML file
save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml)
logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}")
Expand Down
20 changes: 5 additions & 15 deletions workflow/rocoto/gefs_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,21 +213,11 @@ def _atmosoceaniceprod(self, component: str):
history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl], {'MEMDIR': 'mem#member#'})
deps = []
data = f'{history_path}/{history_file_tmpl}'
if component in ['ocean']:
dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
dep_dict = {'type': 'metatask', 'name': 'fcst_mem#member#'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps, dep_condition='or')
elif component in ['ice']:
command = f"{self.HOMEgfs}/ush/check_ice_netcdf.sh @Y @m @d @H #fhr# &ROTDIR; #member# {fhout_ice_gfs}"
dep_dict = {'type': 'sh', 'command': command}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
else:
dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
dep_dict = {'type': 'metatask', 'name': 'fcst_mem#member#'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps, dep_condition='or')

postenvars = self.envars.copy()
postenvar_dict = {'ENSMEM': '#member#',
Expand Down

0 comments on commit a4b8624

Please sign in to comment.