From b405b7d3d11d384ce9fe3b9cd2180f315f7b38f2 Mon Sep 17 00:00:00 2001 From: Dan Holdaway <27729500+danholdaway@users.noreply.github.com> Date: Wed, 8 May 2024 20:52:48 -0400 Subject: [PATCH 1/6] Use JCB for assembling JEDI YAML files for atmospheric GDAS (#2477) Change the JEDI YAML assembly for the atmospheric GDAS to use the JEDI Configuration Builder (JCB) tool so that YAMLs can be made more portable and invoke the observation chronicle mechanism. Resolves #2476 Co-authored-by: danholdaway Co-authored-by: Walter Kolczynski - NOAA --- .gitignore | 7 +++++ .gitmodules | 4 +++ parm/config/gfs/config.atmanl | 10 ++++--- parm/config/gfs/config.atmanlfv3inc | 2 +- parm/config/gfs/config.atmensanl | 5 ++-- sorc/gdas.cd | 2 +- sorc/jcb | 1 + sorc/link_workflow.sh | 17 +++++++---- ush/python/pygfs/task/analysis.py | 38 ++++++++++++++++++++---- ush/python/pygfs/task/atm_analysis.py | 8 +++-- ush/python/pygfs/task/atmens_analysis.py | 3 ++ 11 files changed, 76 insertions(+), 21 deletions(-) create mode 160000 sorc/jcb diff --git a/.gitignore b/.gitignore index 943ad64e1a..04193fca0a 100644 --- a/.gitignore +++ b/.gitignore @@ -48,6 +48,8 @@ parm/gdas/io parm/gdas/ioda parm/gdas/snow parm/gdas/soca +parm/gdas/jcb-gdas +parm/gdas/jcb-algorithms parm/monitor parm/post/AEROSOL_LUTS.dat parm/post/nam_micro_lookup.dat @@ -195,3 +197,8 @@ versions/run.ver ush/python/wxflow workflow/wxflow ci/scripts/wxflow + +# jcb checkout and symlinks +ush/python/jcb +workflow/jcb +ci/scripts/jcb diff --git a/.gitmodules b/.gitmodules index 4851e232ee..ea1b5c06af 100644 --- a/.gitmodules +++ b/.gitmodules @@ -30,3 +30,7 @@ path = sorc/upp.fd url = https://github.com/NOAA-EMC/UPP.git ignore = dirty +[submodule "sorc/jcb"] + path = sorc/jcb + url = https://github.com/noaa-emc/jcb + fetchRecurseSubmodules = false diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 5eb692b473..dd8ca80b11 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -5,18 +5,20 @@ echo "BEGIN: config.atmanl" -export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/gdas_prototype_3d.yaml.j2" -export JEDIYAML="${PARMgfs}/gdas/atm/variational/3dvar_drpcg.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2" + export STATICB_TYPE="gsibec" +export LOCALIZATION_TYPE="bump" export INTERP_METHOD='barycentric' if [[ ${DOHYBVAR} = "YES" ]]; then # shellcheck disable=SC2153 export CASE_ANL=${CASE_ENS} - export BERROR_YAML="${PARMgfs}/gdas/atm/berror/hybvar_${STATICB_TYPE}.yaml.j2" + export BERROR_YAML="background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}" else export CASE_ANL=${CASE} - export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" + export BERROR_YAML="background_error_static_${STATICB_TYPE}" fi export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" diff --git a/parm/config/gfs/config.atmanlfv3inc b/parm/config/gfs/config.atmanlfv3inc index 14c11d3dd3..ab7efa3a60 100644 --- a/parm/config/gfs/config.atmanlfv3inc +++ b/parm/config/gfs/config.atmanlfv3inc @@ -8,7 +8,7 @@ echo "BEGIN: config.atmanlfv3inc" # Get task specific resources . "${EXPDIR}/config.resources" atmanlfv3inc -export JEDIYAML=${PARMgfs}/gdas/atm/utils/fv3jedi_fv3inc_variational.yaml.j2 +export JCB_ALGO=fv3jedi_fv3inc_variational export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x echo "END: config.atmanlfv3inc" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 23eab7f7b9..3484cb670d 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -5,8 +5,9 @@ echo "BEGIN: config.atmensanl" -export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" -export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2" + export INTERP_METHOD='barycentric' export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 70f1319139..2b2d417a96 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 70f13191391d0909e92da47dc7d17ddf1dc4c6c6 +Subproject commit 2b2d417a96528527d7d3e7eedaccf150dc075d92 diff --git a/sorc/jcb b/sorc/jcb new file mode 160000 index 0000000000..de75655d81 --- /dev/null +++ b/sorc/jcb @@ -0,0 +1 @@ +Subproject commit de75655d81ec2ee668d8d47bf4a43625c81dde7c diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 0041ce083b..c5d7243e8f 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -88,11 +88,17 @@ if [[ "${LINK_NEST:-OFF}" == "ON" ]] ; then source "${HOMEgfs}/versions/fix.nest.ver" fi -# Link wxflow in ush/python, workflow and ci/scripts +# Link python pacakges in ush/python +# TODO: This will be unnecessary when these are part of the virtualenv +packages=("wxflow" "jcb") +for package in "${packages[@]}"; do + cd "${HOMEgfs}/ush/python" || exit 1 + [[ -s "${package}" ]] && rm -f "${package}" + ${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" . +done + +# Link wxflow in workflow and ci/scripts # TODO: This will be unnecessary when wxflow is part of the virtualenv -cd "${HOMEgfs}/ush/python" || exit 1 -[[ -s "wxflow" ]] && rm -f wxflow -${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . cd "${HOMEgfs}/workflow" || exit 1 [[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . @@ -100,7 +106,6 @@ cd "${HOMEgfs}/ci/scripts" || exit 1 [[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . - # Link fix directories if [[ -n "${FIX_DIR}" ]]; then if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi @@ -228,7 +233,7 @@ fi #------------------------------ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then cd "${HOMEgfs}/parm/gdas" || exit 1 - declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca") + declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca" "jcb-gdas" "jcb-algorithms") for comp in "${gdasapp_comps[@]}"; do [[ -d "${comp}" ]] && rm -rf "${comp}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/parm/${comp}" . diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5a516a02c8..5464c25370 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -6,8 +6,9 @@ from logging import getLogger from pprint import pformat from netCDF4 import Dataset -from typing import List, Dict, Any, Union +from typing import List, Dict, Any, Union, Optional +from jcb import render from wxflow import (parse_j2yaml, FileHandler, rm_p, logit, Task, Executable, WorkflowException, to_fv3time, to_YMD, Template, TemplateConstants) @@ -46,11 +47,14 @@ def initialize(self) -> None: self.link_jediexe() @logit(logger) - def get_jedi_config(self) -> Dict[str, Any]: + def get_jedi_config(self, algorithm: Optional[str] = None) -> Dict[str, Any]: """Compile a dictionary of JEDI configuration from JEDIYAML template file Parameters ---------- + algorithm (optional) : str + Name of the algorithm to use in the JEDI configuration. Will override the algorithm + set in the self.config.JCB_<>_YAML file Returns ---------- @@ -60,7 +64,31 @@ def get_jedi_config(self) -> Dict[str, Any]: # generate JEDI YAML file logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") - jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) + + if 'JCB_BASE_YAML' in self.task_config.keys(): + # Step 1: fill templates of the jcb base YAML file + jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config) + + # Step 2: (optional) fill templates of algorithm override YAML and merge + if 'JCB_ALGO_YAML' in self.task_config.keys(): + jcb_algo_config = parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config) + jcb_config = {**jcb_config, **jcb_algo_config} + + # If algorithm is present override the algorithm in the JEDI config + if algorithm: + jcb_config['algorithm'] = algorithm + + # Step 3: generate the JEDI Yaml using JCB driving YAML + jedi_config = render(jcb_config) + elif 'JEDIYAML' in self.task_config.keys(): + # Generate JEDI YAML file (without using JCB) + logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") + jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") + else: + raise KeyError(f"Task config must contain JCB_BASE_YAML or JEDIYAML") + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") return jedi_config @@ -82,7 +110,7 @@ def get_obs_dict(self) -> Dict[str, Any]: a dictionary containing the list of observation files to copy for FileHandler """ - logger.info(f"Extracting a list of observation files from {self.task_config.JEDIYAML}") + logger.info(f"Extracting a list of observation files from Jedi config file") observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') logger.debug(f"observations:\n{pformat(observations)}") @@ -116,7 +144,7 @@ def get_bias_dict(self) -> Dict[str, Any]: a dictionary containing the list of observation bias files to copy for FileHandler """ - logger.info(f"Extracting a list of bias correction files from {self.task_config.JEDIYAML}") + logger.info(f"Extracting a list of bias correction files from Jedi config file") observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') logger.debug(f"observations:\n{pformat(observations)}") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 47d291268e..95545c57a4 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -49,6 +49,9 @@ def __init__(self, config): 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"{self.runtime_config.DATA}/obs/", + 'atm_obsdataout_path': f"{self.runtime_config.DATA}/diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) @@ -137,8 +140,9 @@ def variational(self: Analysis) -> None: @logit(logger) def init_fv3_increment(self: Analysis) -> None: # Setup JEDI YAML file - self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, os.path.basename(self.task_config.JEDIYAML)) - save_as_yaml(self.get_jedi_config(), self.task_config.jedi_yaml) + self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, + f"{self.task_config.JCB_ALGO}.yaml") + save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) # Link JEDI executable to run directory self.task_config.jedi_exe = self.link_jediexe() diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index a1aecfe07c..5aaacc42e8 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -46,6 +46,9 @@ def __init__(self, config): 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"./obs/", + 'atm_obsdataout_path': f"./diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) From c7b3973014480a20dd8e24edaeb83a9e9e68159f Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Thu, 9 May 2024 11:36:58 -0400 Subject: [PATCH 2/6] Updates for cold start half cycle, then continuing with IAU for WCDA (#2560) This PR allows us to run C384 S2S with IAU, but starting with the first half-cycle as a cold-start. This will be necessary for cycled testing as we build towards the full system for GFSv17. This updates the copying of the restarts for RUN=gdas for both ocean and ice copying what the atm model is doing. It also reduced the amount of restart files from 4 to 3. Other updates: * Add DOJEDI ocean triggers for archiving certain files update from: @CatherineThomas-NOAA * Adds COPY_FINAL_RESTARTS option to turn on/off copying the last restart file to COM. Defaults to off... * Defines model_start_date_current_cycle & model_start_date_next_cycle to help with knowing which IC to grab. Refs #2546 Co-authored-by: Rahul Mahajan --- jobs/JGLOBAL_FORECAST | 2 +- parm/config/gefs/config.fcst | 1 + parm/config/gfs/config.fcst | 1 + scripts/exglobal_archive.sh | 5 +- ush/forecast_det.sh | 13 +--- ush/forecast_postdet.sh | 142 ++++++++++++++--------------------- ush/forecast_predet.sh | 9 ++- ush/hpssarch_gen.sh | 15 ++-- 8 files changed, 85 insertions(+), 103 deletions(-) diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 6c4200dd6e..8d91be8a57 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -99,6 +99,6 @@ fi # Remove the Temporary working directory ########################################## cd "${DATAROOT}" || true -[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA} ${DATArestart}" # do not remove DATAjob. It contains DATAoutput +[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA}" "${DATArestart}" # do not remove DATAjob. It contains DATAoutput exit 0 diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 5c592556c8..9e5904d689 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -6,6 +6,7 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM +export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 63273e0fe4..81fda1942a 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -6,6 +6,7 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM +export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 5842c76b57..acb926d0e6 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -237,7 +237,10 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then #gdasocean if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} gdasocean gdasocean_analysis" + targrp_list="${targrp_list} gdasocean" + if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + targrp_list="${targrp_list} gdasocean_analysis" + fi fi #gdasice diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index de2a47c921..e4b9ded3d3 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -6,15 +6,9 @@ UFS_det(){ echo "SUB ${FUNCNAME[0]}: Run type determination for UFS" # Determine if the current cycle is a warm start (based on the availability of restarts) - if [[ "${DOIAU:-}" == "YES" ]]; then - if [[ -f "${COM_ATMOS_RESTART_PREV}/${current_cycle_begin:0:8}.${current_cycle_begin:8:2}0000.coupler.res" ]]; then - warm_start=".true." - fi - else - if [[ -f "${COM_ATMOS_RESTART_PREV}/${current_cycle:0:8}.${current_cycle:8:2}0000.coupler.res" ]]; then - warm_start=".true." - fi - fi + if [[ -f "${COM_ATMOS_RESTART_PREV}/${model_start_date_current_cycle:0:8}.${model_start_date_current_cycle:8:2}0000.coupler.res" ]]; then + warm_start=".true." + fi # If restarts were not available, this is likely a cold start if [[ "${warm_start}" == ".false." ]]; then @@ -30,6 +24,7 @@ UFS_det(){ # Since warm start is false, we cannot do IAU DOIAU="NO" IAU_OFFSET=0 + model_start_date_current_cycle=${current_cycle} # It is still possible that a restart is available from a previous forecast attempt # So we have to continue checking for restarts diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 8ea556055d..9c8858ec3d 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -48,11 +48,7 @@ FV3_postdet() { restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/FV3_RESTART" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" restart_dir="${COM_ATMOS_RESTART_PREV}" fi @@ -92,11 +88,10 @@ FV3_postdet() { # Need a coupler.res that is consistent with the model start time if [[ "${DOIAU}" == "YES" ]]; then local model_start_time="${previous_cycle}" - local model_current_time="${current_cycle_begin}" else local model_start_time="${current_cycle}" - local model_current_time="${current_cycle}" fi + local model_current_time="${model_start_date_current_cycle}" rm -f "${DATA}/INPUT/coupler.res" cat >> "${DATA}/INPUT/coupler.res" << EOF 3 (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4) @@ -258,13 +253,15 @@ FV3_out() { # Copy the final restart files at the end of the forecast segment # The final restart written at the end of the forecast does not include the valid date # TODO: verify the above statement since RM found that it did! - echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" - for fv3_restart_file in "${fv3_restart_files[@]}"; do - restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" - ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ - "${COM_ATMOS_RESTART}/${restart_file}" - done - + # TODO: For other components, this is only for gfs/gefs - check to see if this should also have this + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" + for fv3_restart_file in "${fv3_restart_files[@]}"; do + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" + ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ + "${COM_ATMOS_RESTART}/${restart_file}" + done + fi echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied" } @@ -281,11 +278,7 @@ WW3_postdet() { restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/WW3_RESTART" else - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" restart_dir="${COM_WAVE_RESTART_PREV}" fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" @@ -384,11 +377,7 @@ MOM6_postdet() { restart_date="${RERUN_DATE}" else # "${RERUN}" == "NO" restart_dir="${COM_OCEAN_RESTART_PREV}" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" fi # Copy MOM6 ICs @@ -489,11 +478,11 @@ MOM6_out() { # Coarser than 1/2 degree has a single MOM restart local mom6_restart_files mom6_restart_file restart_file mom6_restart_files=(MOM.res.nc) - # 1/4 degree resolution has 4 additional restarts + # 1/4 degree resolution has 3 additional restarts case "${OCNRES}" in "025") local nn - for (( nn = 1; nn <= 4; nn++ )); do + for (( nn = 1; nn <= 3; nn++ )); do mom6_restart_files+=("MOM.res_${nn}.nc") done ;; @@ -501,24 +490,22 @@ MOM6_out() { esac # Copy MOM6 restarts at the end of the forecast segment to COM for RUN=gfs|gefs - local restart_file - if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - for mom6_restart_file in "${mom6_restart_files[@]}"; do - restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}" - ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ - "${COM_OCEAN_RESTART}/${restart_file}" - done + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local restart_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + for mom6_restart_file in "${mom6_restart_files[@]}"; do + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}" + ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ + "${COM_OCEAN_RESTART}/${restart_file}" + done + fi fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for the next cycle for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${restart_date}" for mom6_restart_file in "${mom6_restart_files[@]}"; do restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${mom6_restart_file}" @@ -526,7 +513,6 @@ MOM6_out() { "${COM_OCEAN_RESTART}/${restart_file}" done fi - } CICE_postdet() { @@ -539,11 +525,7 @@ CICE_postdet() { seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds cice_restart_file="${DATArestart}/CICE_RESTART/cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" cice_restart_file="${COM_ICE_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc" fi @@ -554,8 +536,8 @@ CICE_postdet() { # Link iceh_ic file to COM. This is the initial condition file from CICE (f000) # TODO: Is this file needed in COM? Is this going to be used for generating any products? local vdate seconds vdatestr fhr fhr3 interval last_fhr - seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds - vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}" + seconds=$(to_seconds "${model_start_date_current_cycle:8:2}0000") # convert HHMMSS to seconds + vdatestr="${model_start_date_current_cycle:0:4}-${model_start_date_current_cycle:4:2}-${model_start_date_current_cycle:6:2}-${seconds}" ${NLN} "${COM_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc" # Link CICE forecast output files from DATA/CICE_OUTPUT to COM @@ -601,24 +583,22 @@ CICE_out() { ${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in" # Copy CICE restarts at the end of the forecast segment to COM for RUN=gfs|gefs - local seconds source_file target_file - if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - echo "Copying CICE restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - seconds=$(to_seconds "${forecast_end_cycle:8:2}0000") # convert HHMMSS to seconds - source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" - target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc" - ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ - "${COM_ICE_RESTART}/${target_file}" + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local seconds source_file target_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying CICE restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + seconds=$(to_seconds "${forecast_end_cycle:8:2}0000") # convert HHMMSS to seconds + source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" + target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc" + ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ + "${COM_ICE_RESTART}/${target_file}" + fi fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for next cycle for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying CICE restarts for 'RUN=${RUN}' at ${restart_date}" seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds source_file="cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" @@ -706,11 +686,7 @@ CMEPS_postdet() { seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds cmeps_restart_file="${DATArestart}/CMEPS_RESTART/ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" cmeps_restart_file="${COM_MED_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc" fi @@ -740,26 +716,24 @@ CMEPS_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for CMEPS mediator" # Copy mediator restarts at the end of the forecast segment to COM for RUN=gfs|gefs - echo "Copying mediator restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - local seconds source_file target_file - seconds=$(to_seconds "${forecast_end_cycle:8:2}"0000) - source_file="ufs.cpld.cpl.r.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" - target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc" - if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then - ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ - "${COM_MED_RESTART}/${target_file}" - else - echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." - fi + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + echo "Copying mediator restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + local seconds source_file target_file + seconds=$(to_seconds "${forecast_end_cycle:8:2}"0000) + source_file="ufs.cpld.cpl.r.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" + target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc" + if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then + ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ + "${COM_MED_RESTART}/${target_file}" + else + echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." + fi + fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for the next cycle to COM for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying mediator restarts for 'RUN=${RUN}' at ${restart_date}" seconds=$(to_seconds "${restart_date:8:2}"0000) source_file="ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index c300067ce9..de414437b1 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -54,7 +54,14 @@ common_predet(){ current_cycle_begin=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) current_cycle_end=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${half_window} hours" +%Y%m%d%H) next_cycle_begin=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) - next_cycle_end=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} + ${half_window} hours" +%Y%m%d%H) + #Define model start date for current_cycle and next_cycle as the time the forecast will start + if [[ "${DOIAU:-}" == "YES" ]]; then + model_start_date_current_cycle="${current_cycle_begin}" + model_start_date_next_cycle="${next_cycle_begin}" + else + model_start_date_current_cycle=${current_cycle} + model_start_date_next_cycle=${next_cycle} + fi forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) FHMIN=${FHMIN:-0} diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 1b4329c58f..101745da8e 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -560,13 +560,14 @@ if [[ ${type} == "gdas" ]]; then echo "${COM_MED_RESTART/${ROTDIR}\//}/*" } >> "${DATA}/gdasocean_restart.txt" - { - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" - } >> "${DATA}/gdasocean_analysis.txt" - + if [[ ${DO_JEDIOCNVAR} = "YES" ]]; then + { + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" + } >> "${DATA}/gdasocean_analysis.txt" + fi fi if [[ ${DO_ICE} = "YES" ]]; then From 2346c6161f75ae02369cbf30f30c6150d3e12b66 Mon Sep 17 00:00:00 2001 From: Innocent Souopgui <162634017+InnocentSouopgui-NOAA@users.noreply.github.com> Date: Thu, 9 May 2024 21:17:06 -0500 Subject: [PATCH 3/6] Migration to Rocky8 spack-stack installations on Jet (#2458) # Description Migrates Global Workflow to Rocky8 spack-stack installations on Jet. Jet has moved from CentOS7 to Rocky8. Resolves #2377 Refs NOAA-EMC/UPP#919 Refs NOAA-EMC/gfs-utils#60 Refs NOAA-EMC/GSI#732 Refs NOAA-EMC/GSI-Monitor#130 Refs NOAA-EMC/GSI-utils#33 --- modulefiles/module_base.jet.lua | 3 +++ modulefiles/module_gwsetup.jet.lua | 2 +- parm/config/gfs/config.resources | 5 +++-- sorc/gsi_enkf.fd | 2 +- sorc/gsi_monitor.fd | 2 +- sorc/upp.fd | 2 +- versions/run.jet.ver | 5 ++++- versions/run.spack.ver | 2 +- 8 files changed, 15 insertions(+), 8 deletions(-) diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua index afd2701503..31f8aa676d 100644 --- a/modulefiles/module_base.jet.lua +++ b/modulefiles/module_base.jet.lua @@ -39,6 +39,9 @@ load(pathJoin("met", (os.getenv("met_ver") or "None"))) load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) +-- Adding perl as a module; With Rocky8, perl packages will not be from the OS +load(pathJoin("perl", (os.getenv("perl_ver") or "None"))) + setenv("WGRIB2","wgrib2") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) diff --git a/modulefiles/module_gwsetup.jet.lua b/modulefiles/module_gwsetup.jet.lua index 72c40469e4..bc14b19a79 100644 --- a/modulefiles/module_gwsetup.jet.lua +++ b/modulefiles/module_gwsetup.jet.lua @@ -4,7 +4,7 @@ Load environment to run GFS workflow setup scripts on Jet load(pathJoin("rocoto")) -prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev/install/modulefiles/Core") +prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev-rocky8/install/modulefiles/Core") local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" local python_ver=os.getenv("python_ver") or "3.11.6" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 89953c7b84..3c6ccfff6f 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -699,7 +699,7 @@ case ${step} in case "${CASE}" in "C48" | "C96" | "C192") - declare -x "wtime_${step}"="00:15:00" + declare -x "wtime_${step}"="00:20:00" declare -x "wtime_${step}_gfs"="03:00:00" ;; "C384") @@ -747,6 +747,7 @@ case ${step} in exit 4 ;; esac + if [[ ${machine} == "JET" ]]; then unset memory_upp ; fi export npe_node_upp=${npe_upp} export nth_upp=1 @@ -1032,7 +1033,7 @@ case ${step} in export npe_node_esfc=$(( npe_node_max / nth_esfc )) export nth_cycle=${nth_esfc} export npe_node_cycle=$(( npe_node_max / nth_cycle )) - export memory_esfc="80GB" + if [[ ${machine} != "JET" ]]; then export memory_esfc="80G" ; fi ;; "epos") diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd index 457510c72e..8e279f9c73 160000 --- a/sorc/gsi_enkf.fd +++ b/sorc/gsi_enkf.fd @@ -1 +1 @@ -Subproject commit 457510c72e486b7b01db09e5b1a6f407778dc772 +Subproject commit 8e279f9c734097f673b07e80f385b2623d13ba4a diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd index 8efe38eade..f9d6f5f744 160000 --- a/sorc/gsi_monitor.fd +++ b/sorc/gsi_monitor.fd @@ -1 +1 @@ -Subproject commit 8efe38eadebbd5d50284aee44f6d8b6799a7f6e6 +Subproject commit f9d6f5f744462a449e70abed8c5860b1c4564ad8 diff --git a/sorc/upp.fd b/sorc/upp.fd index 4770a2f509..83e83a938b 160000 --- a/sorc/upp.fd +++ b/sorc/upp.fd @@ -1 +1 @@ -Subproject commit 4770a2f509b7122e76c4f004210031a58ae9502c +Subproject commit 83e83a938b5794a628d30e66a54902dabe58737d diff --git a/versions/run.jet.ver b/versions/run.jet.ver index d5b98bf514..3aa586ee42 100644 --- a/versions/run.jet.ver +++ b/versions/run.jet.ver @@ -1,11 +1,14 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.1 -export spack_env=gsi-addon-dev +export spack_env=gsi-addon-dev-rocky8 export hpss_ver= export ncl_ver=6.6.2 export R_ver=4.0.2 export gempak_ver=7.4.2 +# Adding perl as a module; With Rocky8, perl packages will not be from the OS +export perl_ver=5.38.0 + source "${HOMEgfs:-}/versions/run.spack.ver" export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.spack.ver b/versions/run.spack.ver index 6685d748cb..5640f7f5f8 100644 --- a/versions/run.spack.ver +++ b/versions/run.spack.ver @@ -32,4 +32,4 @@ export obsproc_run_ver=1.1.2 export prepobs_run_ver=1.0.1 export ens_tracker_ver=feature-GFSv17_com_reorg -export fit2obs_ver=1.0.0 +export fit2obs_ver=1.1.1 From 6a9c1372ecce9e50e4f6e10e56f6e504cde1afe6 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 10 May 2024 14:17:13 -0400 Subject: [PATCH 4/6] Do not use BUILT_semphore to force rebuilds when re-run (#2593) Remove the placement of the `BUILT_semaphore` file after the build in the Jenkins Pipeline and force it to rebuild any changes after a PR is re-ran. --- ci/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 5f64ba4d8d..8c70e31cc4 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -149,7 +149,7 @@ pipeline { } } sh(script: './link_workflow.sh') - sh(script: "echo ${HOMEgfs} > BUILT_semaphor") + //sh(script: "echo ${HOMEgfs} > BUILT_semaphor") } } if (env.CHANGE_ID && system == 'gfs') { From 4fb7c12c325702a47f27c802a5067efd33d0327c Mon Sep 17 00:00:00 2001 From: Fanglin Yang Date: Mon, 13 May 2024 16:37:51 -0400 Subject: [PATCH 5/6] Update damping and time-step (#2575) Updates the model to use explicit Rayleigh damping for u/v and implicit damping to w. This improves model stability and allows for longer timesteps. Also unifies the GDAS and GFS to use the same damping. Results from a test at the C1152 resolution (coupled model) can be found at https://www.emc.ncep.noaa.gov/gmb/wx24fy/C1152/newdamp/ Resolves #2574 Co-authored-by: Walter Kolczynski - NOAA Co-authored-by: Lisa Bengtsson Co-authored-by: Rahul Mahajan --- parm/config/gefs/config.fcst | 8 ------- parm/config/gefs/config.ufs | 45 ++++++++++++++++++++++++++++++++---- parm/config/gfs/config.fcst | 8 ------- parm/config/gfs/config.ufs | 45 ++++++++++++++++++++++++++++++++---- sorc/ufs_model.fd | 2 +- ush/parsing_namelists_FV3.sh | 1 + 6 files changed, 84 insertions(+), 25 deletions(-) diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 9e5904d689..f91316c7d2 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -109,18 +109,10 @@ if (( gwd_opt == 2 )); then fi # Sponge layer settings -export tau=0. -export rf_cutoff=10. export d2_bg_k1=0.20 export d2_bg_k2=0.04 export dz_min=6 export n_sponge=42 -if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then - export tau=5.0 - export rf_cutoff=1.0e3 - export d2_bg_k1=0.20 - export d2_bg_k2=0.0 -fi # PBL/turbulance schemes export hybedmf=".false." diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index b8695b6dbb..9b42e4aa82 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -80,8 +80,14 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export k_split=1 + export n_split=4 + export tau=10.0 + export rf_cutoff=100.0 + export fv_sg_adj=3600 export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 @@ -98,16 +104,22 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 ;; "C192") - export DELTIM=450 + export DELTIM=600 export layout_x=4 export layout_y=6 export layout_x_gfs=4 @@ -119,6 +131,11 @@ case "${fv3_res}" in export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=6.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=2 @@ -137,6 +154,11 @@ case "${fv3_res}" in export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=4.0 + export rf_cutoff=100.0 + export fv_sg_adj=900 export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 export WRITE_GROUP_GFS=2 @@ -155,13 +177,18 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=3.0 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 ;; "C1152") - export DELTIM=120 + export DELTIM=150 export layout_x=8 export layout_y=16 export layout_x_gfs=8 @@ -173,6 +200,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=6 + export tau=2.5 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 @@ -191,6 +223,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export k_split=4 + export n_split=5 + export tau=0.5 + export rf_cutoff=100.0 + export fv_sg_adj=300 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 81fda1942a..db7306d2e8 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -121,18 +121,10 @@ if (( gwd_opt == 2 )); then fi # Sponge layer settings -export tau=0. -export rf_cutoff=10. export d2_bg_k1=0.20 export d2_bg_k2=0.04 export dz_min=6 export n_sponge=42 -if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then - export tau=5.0 - export rf_cutoff=1.0e3 - export d2_bg_k1=0.20 - export d2_bg_k2=0.0 -fi # PBL/turbulance schemes export hybedmf=".false." diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index e37e46cf70..a7dabd3d0e 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -96,9 +96,15 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=10.0 + export rf_cutoff=100.0 + export fv_sg_adj=3600 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -132,9 +138,15 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -160,7 +172,7 @@ case "${fv3_res}" in export WRITE_GROUP_GFS=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=15 else - export DELTIM=450 + export DELTIM=600 export layout_x=4 export layout_y=6 export layout_x_gfs=4 @@ -172,6 +184,11 @@ case "${fv3_res}" in export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=6.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=2 @@ -211,6 +228,11 @@ case "${fv3_res}" in export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=4.0 + export rf_cutoff=100.0 + export fv_sg_adj=900 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 @@ -252,6 +274,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=3.0 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 @@ -259,7 +286,7 @@ case "${fv3_res}" in fi ;; "C1152") - export DELTIM=120 + export DELTIM=150 export layout_x=8 export layout_y=16 export layout_x_gfs=8 @@ -271,6 +298,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=6 + export tau=2.5 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 @@ -289,6 +321,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export k_split=4 + export n_split=5 + export tau=0.5 + export rf_cutoff=100.0 + export fv_sg_adj=300 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 7fdb58cad0..c54e98637e 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 7fdb58cad0dad2f62ce7813c6719554d1c5a17af +Subproject commit c54e98637ead81b1fc1e336bd0443c8bfb6faf01 diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 9e6e67f9cc..b88849d7e7 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -360,6 +360,7 @@ cat >> input.nml < Date: Mon, 13 May 2024 22:57:38 +0000 Subject: [PATCH 6/6] Limit gfswavepostpnt to 40 PEs/node (#2588) This fixes the slow runtime of the gfswavepostpnt job on Hercules. The job is very I/O intensive and does not scale well to large nodes, so limit the number of jobs/node to 40. Resolves #2587 --- parm/config/gefs/config.resources | 20 +++++++++++++++++--- parm/config/gfs/config.resources | 20 +++++++++++++++++--- 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index d98e437359..04d55ae082 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -252,13 +252,19 @@ case ${step} in export memory_wavepostsbs="10GB" ;; + # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. + # Limit the number of tasks/node to 40. "wavepostbndpnt") export wtime_wavepostbndpnt="01:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) - export NTASKS=${npe_wavepostbndpnt} export is_exclusive=True + if [[ ${npe_node_wavepostbndpnt} -gt 40 ]]; then + export npe_node_wavepostbndpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpnt} ;; "wavepostbndpntbll") @@ -266,8 +272,12 @@ case ${step} in export npe_wavepostbndpntbll=448 export nth_wavepostbndpntbll=1 export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll )) - export NTASKS=${npe_wavepostbndpntbll} export is_exclusive=True + if [[ ${npe_node_wavepostbndpntbll} -gt 40 ]]; then + export npe_node_wavepostbndpntbll=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpntbll} ;; "wavepostpnt") @@ -275,8 +285,12 @@ case ${step} in export npe_wavepostpnt=200 export nth_wavepostpnt=1 export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt )) - export NTASKS=${npe_wavepostpnt} export is_exclusive=True + if [[ ${npe_node_wavepostpnt} -gt 40 ]]; then + export npe_node_wavepostpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostpnt} ;; *) diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 3c6ccfff6f..e2893d6337 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -140,13 +140,19 @@ case ${step} in export memory_wavepostsbs_gfs="10GB" ;; + # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. + # Limit the number of tasks/node to 40. "wavepostbndpnt") export wtime_wavepostbndpnt="01:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) - export NTASKS=${npe_wavepostbndpnt} export is_exclusive=True + if [[ ${npe_node_wavepostbndpnt} -gt 40 ]]; then + export npe_node_wavepostbndpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpnt} ;; "wavepostbndpntbll") @@ -154,8 +160,12 @@ case ${step} in export npe_wavepostbndpntbll=448 export nth_wavepostbndpntbll=1 export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll )) - export NTASKS=${npe_wavepostbndpntbll} export is_exclusive=True + if [[ ${npe_node_wavepostbndpntbll} -gt 40 ]]; then + export npe_node_wavepostbndpntbll=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpntbll} ;; "wavepostpnt") @@ -163,8 +173,12 @@ case ${step} in export npe_wavepostpnt=200 export nth_wavepostpnt=1 export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt )) - export NTASKS=${npe_wavepostpnt} export is_exclusive=True + if [[ ${npe_node_wavepostpnt} -gt 40 ]]; then + export npe_node_wavepostpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostpnt} ;; "wavegempak")